text stringlengths 1 1.05M |
|---|
import numpy as np
from sklearn.model_selection import KFold
# Create a dataset
X = np.array([[1, 2], [3, 4], [5, 6], [7, 8], [9, 10], [11, 12]])
y = np.array([1, 2, 3, 4, 5, 6])
# Create the Cross-Validation object
kf = KFold(n_splits=5)
# Iterate through the folds
for train_index, test_index in kf.split(X):
print("Train Index: ", train_index, "\n")
print("Test Index: ", test_index)
X_train, X_test = X[train_index], X[test_index]
y_train, y_test = y[train_index], y[test_index] |
"""Init pytest fixtures."""
|
val employees = spark.sqlContext.createDataFrame(Seq(
(1, "Bob", 21, 3000),
(2, "Rob", 25, 4000),
(3, "Roy", 27, 8000),
(4, "Tom", 32, 5000)
)).toDF("id","name","age","salary")
val avg_salary = employees.select(avg("salary")).collect()(0).getDouble(0) |
<gh_stars>0
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
public class Main {
public static void main(String[] args) throws NumberFormatException, IOException {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
BufferedWriter bw = new BufferedWriter(new OutputStreamWriter(System.out));
int[] alphabet = new int[26];
String str = br.readLine().toUpperCase();
for (int i = 0; i < str.length(); i++) {
int loc = str.charAt(i) - 'A';
alphabet[loc]++;
}
int max = 0;
int state = 0;
for (int i = 1; i < 26; i++) {
if (alphabet[max] < alphabet[i]) {
max = i;
state = 0;
}
else if (alphabet[max] == alphabet[i])
state = 1;
}
if (state == 1)
bw.write("?\n");
else
bw.write((char)(max + 'A') + "\n");
bw.flush();
bw.close();
}
} |
TERMUX_PKG_HOMEPAGE=https://github.com/mikebrady/shairport-sync
TERMUX_PKG_DESCRIPTION="An AirPlay audio player"
TERMUX_PKG_LICENSE="custom"
TERMUX_PKG_LICENSE_FILE="LICENSES"
TERMUX_PKG_MAINTAINER="@termux"
# Cannot simply be updated to a newer version due to `pthread_cancel` being used
TERMUX_PKG_VERSION=3.1.2
TERMUX_PKG_SRCURL=https://github.com/mikebrady/shairport-sync/archive/refs/tags/${TERMUX_PKG_VERSION}.tar.gz
TERMUX_PKG_SHA256=8c13f7ebbd417e8cab07ea9f74392ced0f54315d8697d4513580f472859a9c65
TERMUX_PKG_DEPENDS="libconfig, libdaemon, libpopt, libsoxr, openssl, pulseaudio"
TERMUX_PKG_BUILD_IN_SRC=true
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="
--with-pa
--with-soxr
--with-ssl=openssl
"
termux_step_pre_configure() {
autoreconf -fi
CFLAGS+=" -fcommon"
_NEED_DUMMY_LIBPTHREAD_A=
_LIBPTHREAD_A=$TERMUX_PREFIX/lib/libpthread.a
if [ ! -e $_LIBPTHREAD_A ]; then
_NEED_DUMMY_LIBPTHREAD_A=true
echo '!<arch>' > $_LIBPTHREAD_A
fi
_NEED_DUMMY_LIBRT_A=
_LIBRT_A=$TERMUX_PREFIX/lib/librt.a
if [ ! -e $_LIBRT_A ]; then
_NEED_DUMMY_LIBRT_A=true
echo '!<arch>' > $_LIBRT_A
fi
}
termux_step_post_make_install() {
if [ $_NEED_DUMMY_LIBPTHREAD_A ]; then
rm -f $_LIBPTHREAD_A
fi
if [ $_NEED_DUMMY_LIBRT_A ]; then
rm -f $_LIBRT_A
fi
}
|
import hashlib
class FileIntegrityChecker:
def __init__(self, reference_hash: bytes):
self.reference_hash = reference_hash
def calculate_hash(self, file_path: str) -> bytes:
with open(file_path, 'rb') as file:
file_contents = file.read()
calculated_hash = hashlib.sha256(file_contents).digest()
return calculated_hash
def check_integrity(self, file_path: str) -> bool:
calculated_hash = self.calculate_hash(file_path)
return calculated_hash == self.reference_hash |
package com.leetcode;
public class Solution_5717 {
public int minOperations(int[] nums) {
if (nums == null || nums.length == 0 || nums.length == 1) return 0;
int result = 0;
for (int i = 1; i < nums.length; i++) {
if (nums[i] <= nums[i - 1]) {
result += nums[i - 1] + 1 - nums[i];
nums[i] = nums[i - 1] + 1;
}
}
return result;
}
}
|
#!/usr/bin/env bash
set -eu -o pipefail
# -e: exits if a command fails
# -u: errors if an variable is referenced before being set
# -o pipefail: causes a pipeline to produce a failure return code if any command errors
readonly RULES_NODEJS_DIR=$(cd $(dirname "$0")/..; pwd)
echo_and_run() { echo "+ $@" ; "$@" ; }
# sedi makes `sed -i` work on both OSX & Linux
# See https://stackoverflow.com/questions/2320564/i-need-my-sed-i-command-for-in-place-editing-to-work-with-both-gnu-sed-and-bsd
sedi () {
case $(uname) in
Darwin*) sedi=('-i' '') ;;
*) sedi='-i' ;;
esac
sed "${sedi[@]}" "$@"
}
# Replaces "file://..." with absolute path to generated npm package under /dist/npm_bazel_foobar$RANDOM
# back to "bazel://@npm_bazel_foobar//:npm_package"
echo_and_run sedi "s#\"file://${RULES_NODEJS_DIR}/dist/npm_bazel_\([a-z_]*\)\$*[0-9]*\"#\"bazel://@npm_bazel_\1//:npm_package\"#" package.json
|
#!/bin/sh
#
# Vivado(TM)
# runme.sh: a Vivado-generated Runs Script for UNIX
# Copyright 1986-2020 Xilinx, Inc. All Rights Reserved.
#
echo "This script was generated under a different operating system."
echo "Please update the PATH and LD_LIBRARY_PATH variables below, before executing this script"
exit
if [ -z "$PATH" ]; then
PATH=C:/Xilinx/Vitis/2020.2/bin;C:/Xilinx/Vivado/2020.2/ids_lite/ISE/bin/nt64;C:/Xilinx/Vivado/2020.2/ids_lite/ISE/lib/nt64:C:/Xilinx/Vivado/2020.2/bin
else
PATH=C:/Xilinx/Vitis/2020.2/bin;C:/Xilinx/Vivado/2020.2/ids_lite/ISE/bin/nt64;C:/Xilinx/Vivado/2020.2/ids_lite/ISE/lib/nt64:C:/Xilinx/Vivado/2020.2/bin:$PATH
fi
export PATH
if [ -z "$LD_LIBRARY_PATH" ]; then
LD_LIBRARY_PATH=
else
LD_LIBRARY_PATH=:$LD_LIBRARY_PATH
fi
export LD_LIBRARY_PATH
HD_PWD='C:/Users/albert.poblador/ip_lab3/ip_repo/edit_VGA_Controller_v1_0.runs/synth_1'
cd "$HD_PWD"
HD_LOG=runme.log
/bin/touch $HD_LOG
ISEStep="./ISEWrap.sh"
EAStep()
{
$ISEStep $HD_LOG "$@" >> $HD_LOG 2>&1
if [ $? -ne 0 ]
then
exit
fi
}
EAStep vivado -log VGA_Controller_v1_0.vds -m64 -product Vivado -mode batch -messageDb vivado.pb -notrace -source VGA_Controller_v1_0.tcl
|
from flask import Flask, render_template, request
app = Flask(__name__)
@app.route('/')
def index():
return render_template('index.html')
@app.route('/post', methods=['POST'])
def post_data():
data = request.form
return data
if __name__ == '__main__':
app.run() |
#!/bin/bash
##################################################################
#### Author: Blaine McDonnell (blaine@armoin.com) ####
#### Usage: ./hiveid_get_newbin ####
#### Description: Gets the latest and greatest binary file ####
#### Version: 0.1 ####
##################################################################
find /usr/local/hiveid-ap/ota/Auto*.bin | sort -r | head -1 |
#!/bin/bash
set -eux
BAZEL_VERSION='0.28.0'
BAZEL_BASE_URL='https://github.com/bazelbuild/bazel/releases/download'
BAZEL_SH="bazel-${BAZEL_VERSION}-installer-linux-x86_64.sh"
BAZEL_URL="${BAZEL_BASE_URL}/${BAZEL_VERSION}/${BAZEL_SH}"
wget -q -nc "${BAZEL_URL}"
chmod +x "${BAZEL_SH}"
./${BAZEL_SH}
rm -rf "${BAZEL_SH}"
|
package com.example.demo.controller;
import com.example.demo.entity.*;
import com.example.demo.repository.*;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import yahoofinance.Stock;
import yahoofinance.YahooFinance;
import yahoofinance.histquotes.HistoricalQuote;
import yahoofinance.histquotes.Interval;
import java.io.IOException;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
@RestController
public class GeneralController {
final Logger log = LoggerFactory.getLogger(GeneralController.class);
@Autowired
AccountRepo accountRepo;
@Autowired
StockInfoRepo stockInfoRepo;
@Autowired
StockPriceRepo stockPriceRepo;
@Autowired
StockGroupRepo stockGroupRepo;
@Autowired
ArticleRepo articleRepo;
/**
*
*/
@PostMapping("/account")
Account createAccount(@RequestBody Account newAccount) {
return accountRepo.save(newAccount);
}
@PutMapping("/account/{id}")
Account replaceAccount(@RequestBody Account newAccount, @PathVariable Long id) {
return accountRepo.findById(id)
.map(account -> {
account.setPassword(newAccount.getPassword());
account.setEmail(newAccount.getEmail());
return accountRepo.save(account);
})
.orElseGet(() -> {
newAccount.setId(id);
return accountRepo.save(newAccount);
});
}
@RequestMapping("/account")
@ResponseBody
public List<Account> account(@RequestParam(required=false) String email) {
log.info("account");
log.info(email);
if (email == null || email == "") {
log.info("returning all accounts");
return accountRepo.findAll();
}
return accountRepo.findByEmail(email);
}
@DeleteMapping("/account/{id}")
void deleteAccount(@PathVariable Long id) {
accountRepo.deleteById(id);
}
/**
*
*/
@PostMapping("/article")
Article newArticle(@RequestBody Article newArticle) {
return articleRepo.save(newArticle);
}
@RequestMapping("/article")
@ResponseBody
public List<Article> article(@RequestParam(required=false) String title) {
log.info("article");
log.info(title);
if (title == null || title == "") {
log.info("returning all articles");
return articleRepo.findAll();
}
return articleRepo.findByTitle(title);
}
@PutMapping("/article/{id}")
Article replaceArticle(@RequestBody Article newArticle, @PathVariable Long id) {
return articleRepo.findById(id)
.map(employee -> {
employee.setTitle(newArticle.getTitle());
employee.setText(newArticle.getText());
employee.setArticleDate(newArticle.getArticleDate());
employee.setPositivity(newArticle.getPositivity());
return articleRepo.save(employee);
})
.orElseGet(() -> {
newArticle.setId(id);
return articleRepo.save(newArticle);
});
}
@DeleteMapping("/article/{id}")
void deleteArticle(@PathVariable Long id) {
articleRepo.deleteById(id);
}
/**
*
*/
@PostMapping("/stockprice")
StockPrice newStockPrice(@RequestBody StockPrice newStockPrice) {
return stockPriceRepo.save(newStockPrice);
}
@RequestMapping("/stockprice/{ticker}")
@ResponseBody
public List<StockPrice> stockPrice(@PathVariable("ticker") String ticker) throws IOException {
log.info("stock");
log.info(ticker);
if (ticker == null || ticker == "") {
log.info("returning all stocks");
return stockPriceRepo.findAll();
}
List<StockPrice> ret = stockPriceRepo.findByTicker(ticker);
return ret;
}
@RequestMapping("/stockprice-date/{date}")
@ResponseBody
public List<StockPrice> stockPriceByDate(@PathVariable("date") String date) throws IOException {
log.info("stock by date");
log.info(date);
if (date == null || date == "") {
log.info("returning all stocks");
return stockPriceRepo.findAll();
}
SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
List<StockPrice> ret;
try {
ret = stockPriceRepo.findByDate(formatter.parse(date));
} catch (ParseException e){
ret = stockPriceRepo.findAll();
}
return ret;
}
@PutMapping("/stockprice/{id}")
StockPrice replaceStockPrice(@RequestBody StockPrice newStockPrice, @PathVariable Long id) {
return stockPriceRepo.findById(id)
.map(stockPrice -> {
stockPrice.setTicker(newStockPrice.getTicker());
stockPrice.setClose(newStockPrice.getClose());
stockPrice.setDate(newStockPrice.getDate());
stockPrice.setHigh(newStockPrice.getHigh());
stockPrice.setLow(newStockPrice.getLow());
stockPrice.setOpen(newStockPrice.getOpen());
stockPrice.setVolume(newStockPrice.getVolume());
return stockPriceRepo.save(stockPrice);
})
.orElseGet(() -> {
return stockPriceRepo.save(newStockPrice);
});
}
@DeleteMapping("/stock/{id}")
void deleteStockPrice(@PathVariable Long id) {
stockPriceRepo.deleteById(id);
}
@GetMapping("/deleteStock")
void deleteStockInfo(@RequestParam Long id, @RequestParam String ticker) {
Optional<StockGroup> osg = stockGroupRepo.findById(id);
if(osg.isPresent()){
StockGroup sg = osg.get();
List<StockInfo> l = sg.getOpenStockInfos();
for (StockInfo s : l) {
String t = s.getTicker();
if (t.equals(ticker)){
l.remove(s);
sg.setOpenStockInfos(l);
stockGroupRepo.delete(sg);
stockGroupRepo.save(sg);
break;
}
}
}
}
/**
*
*/
@RequestMapping("/stockGroup")
@ResponseBody
public List<StockGroup> stockGroup(@RequestParam(required=false) String email) {
log.info("stockGroup");
log.info(email);
if (email == null || email == "") {
log.info("returning all users");
return stockGroupRepo.findAll();
}
return stockGroupRepo.findByAccountEmail(email);
}
@DeleteMapping("/stockGroup/{id}")
void deleteStockGroup(@PathVariable Long id) {
stockGroupRepo.deleteById(id);
}
@GetMapping("/addToStockGroup/{id}")
void addStockInfoToStockGroup(@PathVariable Long id, @RequestParam String ticker) {
log.info("add stock info to stock group");
StockInfo si = stockInfoRepo.findByTicker(ticker);
Optional<StockGroup> sg = stockGroupRepo.findById(id);
if(si != null && sg.isPresent()){
List<StockInfo> l = sg.get().getOpenStockInfos();
l.add(si);
sg.get().setOpenStockInfos(l);
stockGroupRepo.save(sg.get());
log.info("stock group updated");
}
}
@GetMapping("/updateCorporateName/{ticker}")
void updateCorporateName(@PathVariable String ticker, @RequestParam String name) {
log.info("update corporate name");
StockInfo si = stockInfoRepo.findByTicker(ticker);
if(si != null){
si.setCorporateName(name);
stockInfoRepo.save(si);
log.info("updated name");
}
}
// @GetMapping("/queryDate/{date}")
// void updateCorporateName(@PathVariable String date) {
// log.info("update corporate name");
//
// StockInfo si = stockInfoRepo.findByTicker(ticker);
// if(si != null){
// si.setCorporateName(name);
// stockInfoRepo.save(si);
// log.info("updated name");
// }
// }
/**
*
*/
@RequestMapping("/validate")
@ResponseBody
public Boolean validate(@RequestParam(name="email",required=true) String email, @RequestParam(name="password",required=true) String password) {
Account user = accountRepo.findByEmailAndPassword(email, password);
if (user == null) {
return false;
}
return true;
}
}
|
# 2 events per second for 1 second
export WORKLOAD_ID=10-100-1
export CONCURRENT=10
export START=100
#export INC=2
export DURATION=1
export TEST_DURATION=10
|
#!/bin/sh
. $(dirname -- "$0")/env.sh
cd $ROOT_PATH
exec $PYTHON -m doit
|
require('dotenv').config()
const redis = require('redis')
/*=== creates and initializes redis instance that is heroku friendly ===*/
const redis_url = process.env.REDIS_URL || 6379
const client = redis.createClient(redis_url)
client.on('error', err => console.error(err))
module.exports = client
|
package ltm.service;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Intent;
import android.os.Binder;
import android.os.IBinder;
import android.util.Log;
import android.widget.Toast;
class LocalBinder extends Binder {
String helloService() {
return "hello service";
}
}
public class ServiceLTM extends Service {
private final LocalBinder mBinder = new LocalBinder();
@Override
public int onStartCommand( Intent intent, int flags, int startId ) {
Toast.makeText(this, "onStartCommand", Toast.LENGTH_SHORT).show();
Log.v( "ltm", "onStartCommand" );
return START_STICKY;
}
private NotificationManager mNM;
@Override
public void onCreate() {
mNM = (NotificationManager)getSystemService( NOTIFICATION_SERVICE );
Log.v("ltm", "onCreate" );
super.onCreate();
}
@Override
public void onDestroy() {
mNM.cancel( 10000 );
Toast.makeText(this, "onDestroy", Toast.LENGTH_SHORT).show();
Log.v( "ltm", "onDestroy" );
}
@Override
public IBinder onBind(Intent arg0) {
Toast.makeText(this, "onBind", Toast.LENGTH_SHORT).show();
Log.v("ltm", "onBind" );
showNotification();
return mBinder;
}
private void showNotification() {
final NotificationManager mNotification = (NotificationManager) getSystemService(NOTIFICATION_SERVICE);
final Intent launchNotificationIntent = new Intent(this, ActivityLaunched.class);
final PendingIntent pendingIntent = PendingIntent.getActivity(this, 0,
launchNotificationIntent, PendingIntent.FLAG_ONE_SHOT);
Notification.Builder builder = new Notification.Builder(this)
.setWhen(System.currentTimeMillis())
.setTicker("title")
.setSmallIcon(R.drawable.icon)
.setContentTitle("titre")
.setContentText("desc")
.setContentIntent(pendingIntent);
builder.setAutoCancel(true);
mNotification.notify(10000, builder.build());
}
}
|
<filename>Libraries/RadioLib/src/modules/ESP8266/ESP8266.h
#if !defined(_RADIOLIB_ESP8266_H) && !defined(ESP8266)
#define _RADIOLIB_ESP8266_H
#include "../../Module.h"
#include "../../protocols/TransportLayer/TransportLayer.h"
/*!
\class ESP8266
\brief Control class for %ESP8266 module. Implements TransportLayer methods.
*/
class ESP8266: public TransportLayer {
public:
/*!
\brief Default constructor.
\param mod Instance of Module that will be used to communicate with the radio.
*/
ESP8266(Module* module);
// basic methods
/*!
\brief Initialization method.
\param speed Baud rate to use for UART interface.
\returns \ref status_codes
*/
int16_t begin(long speed);
/*!
\brief Resets module using AT command.
\returns \ref status_codes
*/
int16_t reset();
/*!
\brief Joins access point.
\param ssid Access point SSID.
\param password Access point password.
*/
int16_t join(const char* ssid, const char* password);
// transport layer methods (implementations of purely virtual methods in TransportMethod class)
int16_t openTransportConnection(const char* host, const char* protocol, uint16_t port, uint16_t tcpKeepAlive = 0);
int16_t closeTransportConnection();
int16_t send(const char* data);
int16_t send(uint8_t* data, uint32_t len);
size_t receive(uint8_t* data, size_t len, uint32_t timeout = 10000);
size_t getNumBytes(uint32_t timeout = 10000, size_t minBytes = 10);
#ifndef RADIOLIB_GODMODE
private:
#endif
Module* _mod;
};
#endif
|
package com.touch.air.mall.ware.vo;
import lombok.Data;
/**
* @author: bin.wang
* @date: 2021/1/2 11:54
*/
@Data
public class PurchaseItemDoneVo {
/**
* 采购项Id
*/
private Long itemId;
private Integer status;
private String reason;
}
|
public class MathFunctions {
public int power(int base, int exponent) {
if (exponent < 0) {
throw new IllegalArgumentException("Exponent cannot be negative");
}
int result = 1;
for (int i = 0; i < exponent; i++) {
result *= base;
}
return result;
}
}
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.*;
public class MathFunctionsTest {
@Test
public void testPowerFunction() {
MathFunctions mathFunctions = new MathFunctions();
// Test power function for positive exponent
assertEquals(8, mathFunctions.power(2, 3));
// Test power function for exponent of 0
assertEquals(1, mathFunctions.power(5, 0));
// Test power function for base of 0 and positive exponent
assertEquals(0, mathFunctions.power(0, 4));
// Test power function for base of 0 and exponent of 0
assertEquals(1, mathFunctions.power(0, 0));
// Test power function for negative exponent
assertThrows(IllegalArgumentException.class, () -> mathFunctions.power(2, -3));
}
} |
//Algorithm to exchange two numbers without a temporary variable
void SwapWithoutTemp( int* a, int* b )
{
*a = *a + *b;
*b = *a - *b;
*a = *a - *b;
}
//Driver code
int a = 5, b = 7;
SwapWithoutTemp( &a, &b );
//Output
a = 7
b = 5 |
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.dx.dex.file;
import com.android.dx.rop.cst.CstType;
import com.android.dx.rop.cst.CstUtf8;
import com.android.dx.rop.type.Prototype;
import com.android.dx.rop.type.StdTypeList;
import com.android.dx.rop.type.Type;
import com.android.dx.util.AnnotatedOutput;
import com.android.dx.util.Hex;
/**
* Representation of a method prototype reference inside a Dalvik file.
*/
public final class ProtoIdItem extends IndexedItem {
/** size of instances when written out to a file, in bytes */
public static final int WRITE_SIZE = 12;
/** {@code non-null;} the wrapped prototype */
private final Prototype prototype;
/** {@code non-null;} the short-form of the prototype */
private final CstUtf8 shortForm;
/**
* {@code null-ok;} the list of parameter types or {@code null} if this
* prototype has no parameters
*/
private TypeListItem parameterTypes;
/**
* Constructs an instance.
*
* @param prototype {@code non-null;} the constant for the prototype
*/
public ProtoIdItem(Prototype prototype) {
if (prototype == null) {
throw new NullPointerException("prototype == null");
}
this.prototype = prototype;
this.shortForm = makeShortForm(prototype);
StdTypeList parameters = prototype.getParameterTypes();
this.parameterTypes = (parameters.size() == 0) ? null
: new TypeListItem(parameters);
}
/**
* Creates the short-form of the given prototype.
*
* @param prototype {@code non-null;} the prototype
* @return {@code non-null;} the short form
*/
private static CstUtf8 makeShortForm(Prototype prototype) {
StdTypeList parameters = prototype.getParameterTypes();
int size = parameters.size();
StringBuilder sb = new StringBuilder(size + 1);
sb.append(shortFormCharFor(prototype.getReturnType()));
for (int i = 0; i < size; i++) {
sb.append(shortFormCharFor(parameters.getType(i)));
}
return new CstUtf8(sb.toString());
}
/**
* Gets the short-form character for the given type.
*
* @param type {@code non-null;} the type
* @return the corresponding short-form character
*/
private static char shortFormCharFor(Type type) {
char descriptorChar = type.getDescriptor().charAt(0);
if (descriptorChar == '[') {
return 'L';
}
return descriptorChar;
}
/** {@inheritDoc} */
@Override
public ItemType itemType() {
return ItemType.TYPE_PROTO_ID_ITEM;
}
/** {@inheritDoc} */
@Override
public int writeSize() {
return WRITE_SIZE;
}
/** {@inheritDoc} */
@Override
public void addContents(DexFile file) {
StringIdsSection stringIds = file.getStringIds();
TypeIdsSection typeIds = file.getTypeIds();
MixedItemSection typeLists = file.getTypeLists();
typeIds.intern(prototype.getReturnType());
stringIds.intern(shortForm);
if (parameterTypes != null) {
parameterTypes = typeLists.intern(parameterTypes);
}
}
/** {@inheritDoc} */
@Override
public void writeTo(DexFile file, AnnotatedOutput out) {
int shortyIdx = file.getStringIds().indexOf(shortForm);
int returnIdx = file.getTypeIds().indexOf(prototype.getReturnType());
int paramsOff = OffsettedItem.getAbsoluteOffsetOr0(parameterTypes);
if (out.annotates()) {
StringBuilder sb = new StringBuilder();
sb.append(prototype.getReturnType().toHuman());
sb.append(" proto(");
StdTypeList params = prototype.getParameterTypes();
int size = params.size();
for (int i = 0; i < size; i++) {
if (i != 0) {
sb.append(", ");
}
sb.append(params.getType(i).toHuman());
}
sb.append(")");
out.annotate(0, indexString() + ' ' + sb.toString());
out.annotate(4, " shorty_idx: " + Hex.u4(shortyIdx) +
" // " + shortForm.toQuoted());
out.annotate(4, " return_type_idx: " + Hex.u4(returnIdx) +
" // " + prototype.getReturnType().toHuman());
out.annotate(4, " parameters_off: " + Hex.u4(paramsOff));
}
out.writeInt(shortyIdx);
out.writeInt(returnIdx);
out.writeInt(paramsOff);
}
}
|
<reponame>vampire-studios/Obsidian<gh_stars>1-10
package io.github.vampirestudios.obsidian.api.obsidian.item;
import io.github.vampirestudios.obsidian.Obsidian;
import io.github.vampirestudios.obsidian.api.obsidian.NameInformation;
import net.minecraft.client.util.ModelIdentifier;
import net.minecraft.item.ItemGroup;
import net.minecraft.util.Identifier;
import net.minecraft.util.Rarity;
public class ItemInformation {
public String rarity = "common";
public Identifier item_group;
public Integer max_count = 64;
public NameInformation name;
public boolean has_glint = false;
public boolean is_enchantable = false;
public int enchantability = 5;
public boolean hand_equipped = false;
public int use_duration = 5;
public boolean can_place_block = false;
public Identifier placable_block;
public boolean wearable = false;
public boolean dyeable = false;
public int defaultColor = 16579836;
public String wearableSlot;
public boolean customRenderMode = false;
public RenderModeModel[] renderModeModels;
public Rarity getRarity() {
return switch (rarity) {
default -> Rarity.COMMON;
case "uncommon" -> Rarity.UNCOMMON;
case "rare" -> Rarity.RARE;
case "epic" -> Rarity.EPIC;
};
}
public ItemGroup getItemGroup() {
return Obsidian.ITEM_GROUP_REGISTRY.get(item_group);
}
public static class RenderModeModel {
public ModelIdentifier model;
public String[] modes;
}
} |
<reponame>woonsan/incubator-freemarker
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package freemarker.template.utility;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.io.Writer;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.regex.Pattern;
import freemarker.core.Environment;
import freemarker.core.ParseException;
import freemarker.template.Template;
import freemarker.template.Version;
/**
* Some text related utilities.
*/
public class StringUtil {
private static final char[] ESCAPES = createEscapes();
private static final char[] LT = new char[] { '&', 'l', 't', ';' };
private static final char[] GT = new char[] { '&', 'g', 't', ';' };
private static final char[] AMP = new char[] { '&', 'a', 'm', 'p', ';' };
private static final char[] QUOT = new char[] { '&', 'q', 'u', 'o', 't', ';' };
private static final char[] HTML_APOS = new char[] { '&', '#', '3', '9', ';' };
private static final char[] XML_APOS = new char[] { '&', 'a', 'p', 'o', 's', ';' };
/*
* For better performance most methods are folded down. Don't you scream... :)
*/
/**
* HTML encoding (does not convert line breaks and apostrophe-quote).
* Replaces all '>' '<' '&' and '"' with entity reference, but not "'" (apostrophe-quote).
* The last is not escaped as back then when this was written some user agents didn't understood
* "&apos;" nor "&#39;".
*
* @deprecated Use {@link #XHTMLEnc(String)} instead, because it escapes apostrophe-quote too.
*/
@Deprecated
public static String HTMLEnc(String s) {
return XMLEncNA(s);
}
/**
* XML Encoding.
* Replaces all '>' '<' '&', "'" and '"' with entity reference
*/
public static String XMLEnc(String s) {
return XMLOrHTMLEnc(s, true, true, XML_APOS);
}
/**
* Like {@link #XMLEnc(String)}, but writes the result into a {@link Writer}.
*
* @since 2.3.24
*/
public static void XMLEnc(String s, Writer out) throws IOException {
XMLOrHTMLEnc(s, XML_APOS, out);
}
/**
* XHTML Encoding.
* Replaces all '>' '<' '&', "'" and '"' with entity reference
* suitable for XHTML decoding in common user agents (including legacy
* user agents, which do not decode "&apos;" to "'", so "&#39;" is used
* instead [see http://www.w3.org/TR/xhtml1/#C_16])
*/
public static String XHTMLEnc(String s) {
return XMLOrHTMLEnc(s, true, true, HTML_APOS);
}
/**
* Like {@link #XHTMLEnc(String)}, but writes the result into a {@link Writer}.
*
* @since 2.3.24
*/
public static void XHTMLEnc(String s, Writer out) throws IOException {
XMLOrHTMLEnc(s, HTML_APOS, out);
}
private static String XMLOrHTMLEnc(String s, boolean escGT, boolean escQuot, char[] apos) {
final int ln = s.length();
// First we find out if we need to escape, and if so, what the length of the output will be:
int firstEscIdx = -1;
int lastEscIdx = 0;
int plusOutLn = 0;
for (int i = 0; i < ln; i++) {
escape: do {
final char c = s.charAt(i);
switch (c) {
case '<':
plusOutLn += LT.length - 1;
break;
case '>':
if (!(escGT || maybeCDataEndGT(s, i))) {
break escape;
}
plusOutLn += GT.length - 1;
break;
case '&':
plusOutLn += AMP.length - 1;
break;
case '"':
if (!escQuot) {
break escape;
}
plusOutLn += QUOT.length - 1;
break;
case '\'': // apos
if (apos == null) {
break escape;
}
plusOutLn += apos.length - 1;
break;
default:
break escape;
}
if (firstEscIdx == -1) {
firstEscIdx = i;
}
lastEscIdx = i;
} while (false);
}
if (firstEscIdx == -1) {
return s; // Nothing to escape
} else {
final char[] esced = new char[ln + plusOutLn];
if (firstEscIdx != 0) {
s.getChars(0, firstEscIdx, esced, 0);
}
int dst = firstEscIdx;
scan: for (int i = firstEscIdx; i <= lastEscIdx; i++) {
final char c = s.charAt(i);
switch (c) {
case '<':
dst = shortArrayCopy(LT, esced, dst);
continue scan;
case '>':
if (!(escGT || maybeCDataEndGT(s, i))) {
break;
}
dst = shortArrayCopy(GT, esced, dst);
continue scan;
case '&':
dst = shortArrayCopy(AMP, esced, dst);
continue scan;
case '"':
if (!escQuot) {
break;
}
dst = shortArrayCopy(QUOT, esced, dst);
continue scan;
case '\'': // apos
if (apos == null) {
break;
}
dst = shortArrayCopy(apos, esced, dst);
continue scan;
}
esced[dst++] = c;
}
if (lastEscIdx != ln - 1) {
s.getChars(lastEscIdx + 1, ln, esced, dst);
}
return String.valueOf(esced);
}
}
private static boolean maybeCDataEndGT(String s, int i) {
if (i == 0) return true;
if (s.charAt(i - 1) != ']') return false;
if (i == 1 || s.charAt(i - 2) == ']') return true;
return false;
}
private static void XMLOrHTMLEnc(String s, char[] apos, Writer out) throws IOException {
int writtenEnd = 0; // exclusive end
int ln = s.length();
for (int i = 0; i < ln; i++) {
char c = s.charAt(i);
if (c == '<' || c == '>' || c == '&' || c == '"' || c == '\'') {
int flushLn = i - writtenEnd;
if (flushLn != 0) {
out.write(s, writtenEnd, flushLn);
}
writtenEnd = i + 1;
switch (c) {
case '<': out.write(LT); break;
case '>': out.write(GT); break;
case '&': out.write(AMP); break;
case '"': out.write(QUOT); break;
default: out.write(apos); break;
}
}
}
if (writtenEnd < ln) {
out.write(s, writtenEnd, ln - writtenEnd);
}
}
/**
* For efficiently copying very short char arrays.
*/
private static int shortArrayCopy(char[] src, char[] dst, int dstOffset) {
int ln = src.length;
for (int i = 0; i < ln; i++) {
dst[dstOffset++] = src[i];
}
return dstOffset;
}
/**
* XML encoding without replacing apostrophes.
* @see #XMLEnc(String)
*/
public static String XMLEncNA(String s) {
return XMLOrHTMLEnc(s, true, true, null);
}
/**
* XML encoding for attribute values quoted with <tt>"</tt> (not with <tt>'</tt>!).
* Also can be used for HTML attributes that are quoted with <tt>"</tt>.
* @see #XMLEnc(String)
*/
public static String XMLEncQAttr(String s) {
return XMLOrHTMLEnc(s, false, true, null);
}
/**
* XML encoding without replacing apostrophes and quotation marks and
* greater-thans (except in {@code ]]>}).
* @see #XMLEnc(String)
*/
public static String XMLEncNQG(String s) {
return XMLOrHTMLEnc(s, false, false, null);
}
/**
* Rich Text Format encoding (does not replace line breaks).
* Escapes all '\' '{' '}'.
*/
public static String RTFEnc(String s) {
int ln = s.length();
// First we find out if we need to escape, and if so, what the length of the output will be:
int firstEscIdx = -1;
int lastEscIdx = 0;
int plusOutLn = 0;
for (int i = 0; i < ln; i++) {
char c = s.charAt(i);
if (c == '{' || c == '}' || c == '\\') {
if (firstEscIdx == -1) {
firstEscIdx = i;
}
lastEscIdx = i;
plusOutLn++;
}
}
if (firstEscIdx == -1) {
return s; // Nothing to escape
} else {
char[] esced = new char[ln + plusOutLn];
if (firstEscIdx != 0) {
s.getChars(0, firstEscIdx, esced, 0);
}
int dst = firstEscIdx;
for (int i = firstEscIdx; i <= lastEscIdx; i++) {
char c = s.charAt(i);
if (c == '{' || c == '}' || c == '\\') {
esced[dst++] = '\\';
}
esced[dst++] = c;
}
if (lastEscIdx != ln - 1) {
s.getChars(lastEscIdx + 1, ln, esced, dst);
}
return String.valueOf(esced);
}
}
/**
* Like {@link #RTFEnc(String)}, but writes the result into a {@link Writer}.
*
* @since 2.3.24
*/
public static void RTFEnc(String s, Writer out) throws IOException {
int writtenEnd = 0; // exclusive end
int ln = s.length();
for (int i = 0; i < ln; i++) {
char c = s.charAt(i);
if (c == '{' || c == '}' || c == '\\') {
int flushLn = i - writtenEnd;
if (flushLn != 0) {
out.write(s, writtenEnd, flushLn);
}
out.write('\\');
writtenEnd = i; // Not i + 1, so c will be written out later
}
}
if (writtenEnd < ln) {
out.write(s, writtenEnd, ln - writtenEnd);
}
}
/**
* URL encoding (like%20this) for query parameter values, path <em>segments</em>, fragments; this encodes all
* characters that are reserved anywhere.
*/
public static String URLEnc(String s, String charset) throws UnsupportedEncodingException {
return URLEnc(s, charset, false);
}
/**
* Like {@link #URLEnc(String, String)} but doesn't escape the slash character ({@code /}).
* This can be used to encode a path only if you know that no folder or file name will contain {@code /}
* character (not in the path, but in the name itself), which usually stands, as the commonly used OS-es don't
* allow that.
*
* @since 2.3.21
*/
public static String URLPathEnc(String s, String charset) throws UnsupportedEncodingException {
return URLEnc(s, charset, true);
}
private static String URLEnc(String s, String charset, boolean keepSlash)
throws UnsupportedEncodingException {
int ln = s.length();
int i;
for (i = 0; i < ln; i++) {
char c = s.charAt(i);
if (!safeInURL(c, keepSlash)) {
break;
}
}
if (i == ln) {
// Nothing to escape
return s;
}
StringBuilder b = new StringBuilder(ln + ln / 3 + 2);
b.append(s.substring(0, i));
int encStart = i;
for (i++; i < ln; i++) {
char c = s.charAt(i);
if (safeInURL(c, keepSlash)) {
if (encStart != -1) {
byte[] o = s.substring(encStart, i).getBytes(charset);
for (int j = 0; j < o.length; j++) {
b.append('%');
byte bc = o[j];
int c1 = bc & 0x0F;
int c2 = (bc >> 4) & 0x0F;
b.append((char) (c2 < 10 ? c2 + '0' : c2 - 10 + 'A'));
b.append((char) (c1 < 10 ? c1 + '0' : c1 - 10 + 'A'));
}
encStart = -1;
}
b.append(c);
} else {
if (encStart == -1) {
encStart = i;
}
}
}
if (encStart != -1) {
byte[] o = s.substring(encStart, i).getBytes(charset);
for (int j = 0; j < o.length; j++) {
b.append('%');
byte bc = o[j];
int c1 = bc & 0x0F;
int c2 = (bc >> 4) & 0x0F;
b.append((char) (c2 < 10 ? c2 + '0' : c2 - 10 + 'A'));
b.append((char) (c1 < 10 ? c1 + '0' : c1 - 10 + 'A'));
}
}
return b.toString();
}
private static boolean safeInURL(char c, boolean keepSlash) {
return c >= 'a' && c <= 'z' || c >= 'A' && c <= 'Z'
|| c >= '0' && c <= '9'
|| c == '_' || c == '-' || c == '.' || c == '!' || c == '~'
|| c >= '\'' && c <= '*'
|| keepSlash && c == '/';
}
private static char[] createEscapes() {
char[] escapes = new char['\\' + 1];
for (int i = 0; i < 32; ++i) {
escapes[i] = 1;
}
escapes['\\'] = '\\';
escapes['\''] = '\'';
escapes['"'] = '"';
escapes['<'] = 'l';
escapes['>'] = 'g';
escapes['&'] = 'a';
escapes['\b'] = 'b';
escapes['\t'] = 't';
escapes['\n'] = 'n';
escapes['\f'] = 'f';
escapes['\r'] = 'r';
return escapes;
}
/**
* Escapes a string according the FTL string literal escaping rules, assuming the literal is quoted with
* {@code quotation}; it doesn't add the quotation marks itself.
*
* @param quotation
* Either {@code '"'} or {@code '\''}. It's assumed that the string literal whose part we calculate is
* enclosed within this kind of quotation mark. Thus, the other kind of quotation character will not be
* escaped in the result.
*
* @since 2.3.22
*/
public static String FTLStringLiteralEnc(String s, char quotation) {
return FTLStringLiteralEnc(s, quotation, false);
}
/**
* Escapes a string according the FTL string literal escaping rules; it doesn't add the quotation marks. As this
* method doesn't know if the string literal is quoted with reuglar quotation marks or apostrophe quute, it will
* escape both.
*
* @see #FTLStringLiteralEnc(String, char)
*/
public static String FTLStringLiteralEnc(String s) {
return FTLStringLiteralEnc(s, (char) 0, false);
}
private static String FTLStringLiteralEnc(String s, char quotation, boolean addQuotation) {
final int ln = s.length();
final char otherQuotation;
if (quotation == 0) {
otherQuotation = 0;
} else if (quotation == '"') {
otherQuotation = '\'';
} else if (quotation == '\'') {
otherQuotation = '"';
} else {
throw new IllegalArgumentException("Unsupported quotation character: " + quotation);
}
final int escLn = ESCAPES.length;
StringBuilder buf = null;
for (int i = 0; i < ln; i++) {
char c = s.charAt(i);
char escape =
c < escLn ? ESCAPES[c] :
c == '{' && i > 0 && isInterpolationStart(s.charAt(i - 1)) ? '{' :
0;
if (escape == 0 || escape == otherQuotation) {
if (buf != null) {
buf.append(c);
}
} else {
if (buf == null) {
buf = new StringBuilder(s.length() + 4 + (addQuotation ? 2 : 0));
if (addQuotation) {
buf.append(quotation);
}
buf.append(s.substring(0, i));
}
if (escape == 1) {
// hex encoding for characters below 0x20
// that have no other escape representation
buf.append("\\x00");
int c2 = (c >> 4) & 0x0F;
c = (char) (c & 0x0F);
buf.append((char) (c2 < 10 ? c2 + '0' : c2 - 10 + 'A'));
buf.append((char) (c < 10 ? c + '0' : c - 10 + 'A'));
} else {
buf.append('\\');
buf.append(escape);
}
}
}
if (buf == null) {
return addQuotation ? quotation + s + quotation : s;
} else {
if (addQuotation) {
buf.append(quotation);
}
return buf.toString();
}
}
private static boolean isInterpolationStart(char c) {
return c == '$' || c == '#';
}
/**
* FTL string literal decoding.
*
* \\, \", \', \n, \t, \r, \b and \f will be replaced according to
* Java rules. In additional, it knows \g, \l, \a and \{ which are
* replaced with <, >, & and { respectively.
* \x works as hexadecimal character code escape. The character
* codes are interpreted according to UCS basic plane (Unicode).
* "f\x006Fo", "f\x06Fo" and "f\x6Fo" will be "foo".
* "f\x006F123" will be "foo123" as the maximum number of digits is 4.
*
* All other \X (where X is any character not mentioned above or End-of-string)
* will cause a ParseException.
*
* @param s String literal <em>without</em> the surrounding quotation marks
* @return String with all escape sequences resolved
* @throws ParseException if there string contains illegal escapes
*/
public static String FTLStringLiteralDec(String s) throws ParseException {
int idx = s.indexOf('\\');
if (idx == -1) {
return s;
}
int lidx = s.length() - 1;
int bidx = 0;
StringBuilder buf = new StringBuilder(lidx);
do {
buf.append(s.substring(bidx, idx));
if (idx >= lidx) {
throw new ParseException("The last character of string literal is backslash", 0, 0);
}
char c = s.charAt(idx + 1);
switch (c) {
case '"':
buf.append('"');
bidx = idx + 2;
break;
case '\'':
buf.append('\'');
bidx = idx + 2;
break;
case '\\':
buf.append('\\');
bidx = idx + 2;
break;
case 'n':
buf.append('\n');
bidx = idx + 2;
break;
case 'r':
buf.append('\r');
bidx = idx + 2;
break;
case 't':
buf.append('\t');
bidx = idx + 2;
break;
case 'f':
buf.append('\f');
bidx = idx + 2;
break;
case 'b':
buf.append('\b');
bidx = idx + 2;
break;
case 'g':
buf.append('>');
bidx = idx + 2;
break;
case 'l':
buf.append('<');
bidx = idx + 2;
break;
case 'a':
buf.append('&');
bidx = idx + 2;
break;
case '{':
buf.append('{');
bidx = idx + 2;
break;
case 'x': {
idx += 2;
int x = idx;
int y = 0;
int z = lidx > idx + 3 ? idx + 3 : lidx;
while (idx <= z) {
char b = s.charAt(idx);
if (b >= '0' && b <= '9') {
y <<= 4;
y += b - '0';
} else if (b >= 'a' && b <= 'f') {
y <<= 4;
y += b - 'a' + 10;
} else if (b >= 'A' && b <= 'F') {
y <<= 4;
y += b - 'A' + 10;
} else {
break;
}
idx++;
}
if (x < idx) {
buf.append((char) y);
} else {
throw new ParseException("Invalid \\x escape in a string literal",0,0);
}
bidx = idx;
break;
}
default:
throw new ParseException("Invalid escape sequence (\\" + c + ") in a string literal",0,0);
}
idx = s.indexOf('\\', bidx);
} while (idx != -1);
buf.append(s.substring(bidx));
return buf.toString();
}
public static Locale deduceLocale(String input) {
if (input == null) return null;
Locale locale = Locale.getDefault();
if (input.length() > 0 && input.charAt(0) == '"') input = input.substring(1, input.length() - 1);
StringTokenizer st = new StringTokenizer(input, ",_ ");
String lang = "", country = "";
if (st.hasMoreTokens()) {
lang = st.nextToken();
}
if (st.hasMoreTokens()) {
country = st.nextToken();
}
if (!st.hasMoreTokens()) {
locale = new Locale(lang, country);
} else {
locale = new Locale(lang, country, st.nextToken());
}
return locale;
}
public static String capitalize(String s) {
StringTokenizer st = new StringTokenizer(s, " \t\r\n", true);
StringBuilder buf = new StringBuilder(s.length());
while (st.hasMoreTokens()) {
String tok = st.nextToken();
buf.append(tok.substring(0, 1).toUpperCase());
buf.append(tok.substring(1).toLowerCase());
}
return buf.toString();
}
public static boolean getYesNo(String s) {
if (s.startsWith("\"")) {
s = s.substring(1, s.length() - 1);
}
if (s.equalsIgnoreCase("n")
|| s.equalsIgnoreCase("no")
|| s.equalsIgnoreCase("f")
|| s.equalsIgnoreCase("false")) {
return false;
} else if (s.equalsIgnoreCase("y")
|| s.equalsIgnoreCase("yes")
|| s.equalsIgnoreCase("t")
|| s.equalsIgnoreCase("true")) {
return true;
}
throw new IllegalArgumentException("Illegal boolean value: " + s);
}
/**
* Splits a string at the specified character.
*/
public static String[] split(String s, char c) {
int i, b, e;
int cnt;
String res[];
int ln = s.length();
i = 0;
cnt = 1;
while ((i = s.indexOf(c, i)) != -1) {
cnt++;
i++;
}
res = new String[cnt];
i = 0;
b = 0;
while (b <= ln) {
e = s.indexOf(c, b);
if (e == -1) e = ln;
res[i++] = s.substring(b, e);
b = e + 1;
}
return res;
}
/**
* Splits a string at the specified string.
*/
public static String[] split(String s, String sep, boolean caseInsensitive) {
String splitString = caseInsensitive ? sep.toLowerCase() : sep;
String input = caseInsensitive ? s.toLowerCase() : s;
int i, b, e;
int cnt;
String res[];
int ln = s.length();
int sln = sep.length();
if (sln == 0) throw new IllegalArgumentException(
"The separator string has 0 length");
i = 0;
cnt = 1;
while ((i = input.indexOf(splitString, i)) != -1) {
cnt++;
i += sln;
}
res = new String[cnt];
i = 0;
b = 0;
while (b <= ln) {
e = input.indexOf(splitString, b);
if (e == -1) e = ln;
res[i++] = s.substring(b, e);
b = e + sln;
}
return res;
}
/**
* Same as {@link #replace(String, String, String, boolean, boolean)} with two {@code false} parameters.
* @since 2.3.20
*/
public static String replace(String text, String oldSub, String newSub) {
return replace(text, oldSub, newSub, false, false);
}
/**
* Replaces all occurrences of a sub-string in a string.
* @param text The string where it will replace <code>oldsub</code> with
* <code>newsub</code>.
* @return String The string after the replacements.
*/
public static String replace(String text,
String oldsub,
String newsub,
boolean caseInsensitive,
boolean firstOnly) {
StringBuilder buf;
int tln;
int oln = oldsub.length();
if (oln == 0) {
int nln = newsub.length();
if (nln == 0) {
return text;
} else {
if (firstOnly) {
return newsub + text;
} else {
tln = text.length();
buf = new StringBuilder(tln + (tln + 1) * nln);
buf.append(newsub);
for (int i = 0; i < tln; i++) {
buf.append(text.charAt(i));
buf.append(newsub);
}
return buf.toString();
}
}
} else {
oldsub = caseInsensitive ? oldsub.toLowerCase() : oldsub;
String input = caseInsensitive ? text.toLowerCase() : text;
int e = input.indexOf(oldsub);
if (e == -1) {
return text;
}
int b = 0;
tln = text.length();
buf = new StringBuilder(
tln + Math.max(newsub.length() - oln, 0) * 3);
do {
buf.append(text.substring(b, e));
buf.append(newsub);
b = e + oln;
e = input.indexOf(oldsub, b);
} while (e != -1 && !firstOnly);
buf.append(text.substring(b));
return buf.toString();
}
}
/**
* Removes a line-break from the end of the string (if there's any).
*/
public static String chomp(String s) {
if (s.endsWith("\r\n")) return s.substring(0, s.length() - 2);
if (s.endsWith("\r") || s.endsWith("\n"))
return s.substring(0, s.length() - 1);
return s;
}
/**
* Converts a 0-length string to null, leaves the string as is otherwise.
* @param s maybe {@code null}.
*/
public static String emptyToNull(String s) {
if (s == null) return null;
return s.length() == 0 ? null : s;
}
/**
* Converts the parameter with <code>toString</code> (if it's not <code>null</code>) and passes it to
* {@link #jQuote(String)}.
*/
public static String jQuote(Object obj) {
return jQuote(obj != null ? obj.toString() : null);
}
/**
* Quotes string as Java Language string literal.
* Returns string <code>"null"</code> if <code>s</code>
* is <code>null</code>.
*/
public static String jQuote(String s) {
if (s == null) {
return "null";
}
int ln = s.length();
StringBuilder b = new StringBuilder(ln + 4);
b.append('"');
for (int i = 0; i < ln; i++) {
char c = s.charAt(i);
if (c == '"') {
b.append("\\\"");
} else if (c == '\\') {
b.append("\\\\");
} else if (c < 0x20) {
if (c == '\n') {
b.append("\\n");
} else if (c == '\r') {
b.append("\\r");
} else if (c == '\f') {
b.append("\\f");
} else if (c == '\b') {
b.append("\\b");
} else if (c == '\t') {
b.append("\\t");
} else {
b.append("\\u00");
int x = c / 0x10;
b.append(toHexDigit(x));
x = c & 0xF;
b.append(toHexDigit(x));
}
} else {
b.append(c);
}
} // for each characters
b.append('"');
return b.toString();
}
/**
* Converts the parameter with <code>toString</code> (if not
* <code>null</code>)and passes it to {@link #jQuoteNoXSS(String)}.
*/
public static String jQuoteNoXSS(Object obj) {
return jQuoteNoXSS(obj != null ? obj.toString() : null);
}
/**
* Same as {@link #jQuoteNoXSS(String)} but also escapes <code>'<'</code>
* as <code>\</code><code>u003C</code>. This is used for log messages to prevent XSS
* on poorly written Web-based log viewers.
*/
public static String jQuoteNoXSS(String s) {
if (s == null) {
return "null";
}
int ln = s.length();
StringBuilder b = new StringBuilder(ln + 4);
b.append('"');
for (int i = 0; i < ln; i++) {
char c = s.charAt(i);
if (c == '"') {
b.append("\\\"");
} else if (c == '\\') {
b.append("\\\\");
} else if (c == '<') {
b.append("\\u003C");
} else if (c < 0x20) {
if (c == '\n') {
b.append("\\n");
} else if (c == '\r') {
b.append("\\r");
} else if (c == '\f') {
b.append("\\f");
} else if (c == '\b') {
b.append("\\b");
} else if (c == '\t') {
b.append("\\t");
} else {
b.append("\\u00");
int x = c / 0x10;
b.append(toHexDigit(x));
x = c & 0xF;
b.append(toHexDigit(x));
}
} else {
b.append(c);
}
} // for each characters
b.append('"');
return b.toString();
}
/**
* Creates a <em>quoted</em> FTL string literal from a string, using escaping where necessary. The result either
* uses regular quotation marks (UCS 0x22) or apostrophe-quotes (UCS 0x27), depending on the string content.
* (Currently, apostrophe-quotes will be chosen exactly when the string contains regular quotation character and
* doesn't contain apostrophe-quote character.)
*
* @param s
* The value that should be converted to an FTL string literal whose evaluated value equals to {@code s}
*
* @since 2.3.22
*/
public static String ftlQuote(String s) {
char quotation;
if (s.indexOf('"') != -1 && s.indexOf('\'') == -1) {
quotation = '\'';
} else {
quotation = '\"';
}
return FTLStringLiteralEnc(s, quotation, true);
}
/**
* Tells if a character can occur on the beginning of an FTL identifier expression (without escaping).
*
* @since 2.3.22
*/
public static boolean isFTLIdentifierStart(final char c) {
// This code was generated on JDK 1.8.0_20 Win64 with src/main/misc/identifierChars/IdentifierCharGenerator.java
if (c < 0xAA) { // This branch was edited for speed.
if (c >= 'a' && c <= 'z' || c >= '@' && c <= 'Z') {
return true;
} else {
return c == '$' || c == '_';
}
} else { // c >= 0xAA
if (c < 0xA7F8) {
if (c < 0x2D6F) {
if (c < 0x2128) {
if (c < 0x2090) {
if (c < 0xD8) {
if (c < 0xBA) {
return c == 0xAA || c == 0xB5;
} else { // c >= 0xBA
return c == 0xBA || c >= 0xC0 && c <= 0xD6;
}
} else { // c >= 0xD8
if (c < 0x2071) {
return c >= 0xD8 && c <= 0xF6 || c >= 0xF8 && c <= 0x1FFF;
} else { // c >= 0x2071
return c == 0x2071 || c == 0x207F;
}
}
} else { // c >= 0x2090
if (c < 0x2115) {
if (c < 0x2107) {
return c >= 0x2090 && c <= 0x209C || c == 0x2102;
} else { // c >= 0x2107
return c == 0x2107 || c >= 0x210A && c <= 0x2113;
}
} else { // c >= 0x2115
if (c < 0x2124) {
return c == 0x2115 || c >= 0x2119 && c <= 0x211D;
} else { // c >= 0x2124
return c == 0x2124 || c == 0x2126;
}
}
}
} else { // c >= 0x2128
if (c < 0x2C30) {
if (c < 0x2145) {
if (c < 0x212F) {
return c == 0x2128 || c >= 0x212A && c <= 0x212D;
} else { // c >= 0x212F
return c >= 0x212F && c <= 0x2139 || c >= 0x213C && c <= 0x213F;
}
} else { // c >= 0x2145
if (c < 0x2183) {
return c >= 0x2145 && c <= 0x2149 || c == 0x214E;
} else { // c >= 0x2183
return c >= 0x2183 && c <= 0x2184 || c >= 0x2C00 && c <= 0x2C2E;
}
}
} else { // c >= 0x2C30
if (c < 0x2D00) {
if (c < 0x2CEB) {
return c >= 0x2C30 && c <= 0x2C5E || c >= 0x2C60 && c <= 0x2CE4;
} else { // c >= 0x2CEB
return c >= 0x2CEB && c <= 0x2CEE || c >= 0x2CF2 && c <= 0x2CF3;
}
} else { // c >= 0x2D00
if (c < 0x2D2D) {
return c >= 0x2D00 && c <= 0x2D25 || c == 0x2D27;
} else { // c >= 0x2D2D
return c == 0x2D2D || c >= 0x2D30 && c <= 0x2D67;
}
}
}
}
} else { // c >= 0x2D6F
if (c < 0x31F0) {
if (c < 0x2DD0) {
if (c < 0x2DB0) {
if (c < 0x2DA0) {
return c == 0x2D6F || c >= 0x2D80 && c <= 0x2D96;
} else { // c >= 0x2DA0
return c >= 0x2DA0 && c <= 0x2DA6 || c >= 0x2DA8 && c <= 0x2DAE;
}
} else { // c >= 0x2DB0
if (c < 0x2DC0) {
return c >= 0x2DB0 && c <= 0x2DB6 || c >= 0x2DB8 && c <= 0x2DBE;
} else { // c >= 0x2DC0
return c >= 0x2DC0 && c <= 0x2DC6 || c >= 0x2DC8 && c <= 0x2DCE;
}
}
} else { // c >= 0x2DD0
if (c < 0x3031) {
if (c < 0x2E2F) {
return c >= 0x2DD0 && c <= 0x2DD6 || c >= 0x2DD8 && c <= 0x2DDE;
} else { // c >= 0x2E2F
return c == 0x2E2F || c >= 0x3005 && c <= 0x3006;
}
} else { // c >= 0x3031
if (c < 0x3040) {
return c >= 0x3031 && c <= 0x3035 || c >= 0x303B && c <= 0x303C;
} else { // c >= 0x3040
return c >= 0x3040 && c <= 0x318F || c >= 0x31A0 && c <= 0x31BA;
}
}
}
} else { // c >= 0x31F0
if (c < 0xA67F) {
if (c < 0xA4D0) {
if (c < 0x3400) {
return c >= 0x31F0 && c <= 0x31FF || c >= 0x3300 && c <= 0x337F;
} else { // c >= 0x3400
return c >= 0x3400 && c <= 0x4DB5 || c >= 0x4E00 && c <= 0xA48C;
}
} else { // c >= 0xA4D0
if (c < 0xA610) {
return c >= 0xA4D0 && c <= 0xA4FD || c >= 0xA500 && c <= 0xA60C;
} else { // c >= 0xA610
return c >= 0xA610 && c <= 0xA62B || c >= 0xA640 && c <= 0xA66E;
}
}
} else { // c >= 0xA67F
if (c < 0xA78B) {
if (c < 0xA717) {
return c >= 0xA67F && c <= 0xA697 || c >= 0xA6A0 && c <= 0xA6E5;
} else { // c >= 0xA717
return c >= 0xA717 && c <= 0xA71F || c >= 0xA722 && c <= 0xA788;
}
} else { // c >= 0xA78B
if (c < 0xA7A0) {
return c >= 0xA78B && c <= 0xA78E || c >= 0xA790 && c <= 0xA793;
} else { // c >= 0xA7A0
return c >= 0xA7A0 && c <= 0xA7AA;
}
}
}
}
}
} else { // c >= 0xA7F8
if (c < 0xAB20) {
if (c < 0xAA44) {
if (c < 0xA8FB) {
if (c < 0xA840) {
if (c < 0xA807) {
return c >= 0xA7F8 && c <= 0xA801 || c >= 0xA803 && c <= 0xA805;
} else { // c >= 0xA807
return c >= 0xA807 && c <= 0xA80A || c >= 0xA80C && c <= 0xA822;
}
} else { // c >= 0xA840
if (c < 0xA8D0) {
return c >= 0xA840 && c <= 0xA873 || c >= 0xA882 && c <= 0xA8B3;
} else { // c >= 0xA8D0
return c >= 0xA8D0 && c <= 0xA8D9 || c >= 0xA8F2 && c <= 0xA8F7;
}
}
} else { // c >= 0xA8FB
if (c < 0xA984) {
if (c < 0xA930) {
return c == 0xA8FB || c >= 0xA900 && c <= 0xA925;
} else { // c >= 0xA930
return c >= 0xA930 && c <= 0xA946 || c >= 0xA960 && c <= 0xA97C;
}
} else { // c >= 0xA984
if (c < 0xAA00) {
return c >= 0xA984 && c <= 0xA9B2 || c >= 0xA9CF && c <= 0xA9D9;
} else { // c >= 0xAA00
return c >= 0xAA00 && c <= 0xAA28 || c >= 0xAA40 && c <= 0xAA42;
}
}
}
} else { // c >= 0xAA44
if (c < 0xAAC0) {
if (c < 0xAA80) {
if (c < 0xAA60) {
return c >= 0xAA44 && c <= 0xAA4B || c >= 0xAA50 && c <= 0xAA59;
} else { // c >= 0xAA60
return c >= 0xAA60 && c <= 0xAA76 || c == 0xAA7A;
}
} else { // c >= 0xAA80
if (c < 0xAAB5) {
return c >= 0xAA80 && c <= 0xAAAF || c == 0xAAB1;
} else { // c >= 0xAAB5
return c >= 0xAAB5 && c <= 0xAAB6 || c >= 0xAAB9 && c <= 0xAABD;
}
}
} else { // c >= 0xAAC0
if (c < 0xAAF2) {
if (c < 0xAADB) {
return c == 0xAAC0 || c == 0xAAC2;
} else { // c >= 0xAADB
return c >= 0xAADB && c <= 0xAADD || c >= 0xAAE0 && c <= 0xAAEA;
}
} else { // c >= 0xAAF2
if (c < 0xAB09) {
return c >= 0xAAF2 && c <= 0xAAF4 || c >= 0xAB01 && c <= 0xAB06;
} else { // c >= 0xAB09
return c >= 0xAB09 && c <= 0xAB0E || c >= 0xAB11 && c <= 0xAB16;
}
}
}
}
} else { // c >= 0xAB20
if (c < 0xFB46) {
if (c < 0xFB13) {
if (c < 0xAC00) {
if (c < 0xABC0) {
return c >= 0xAB20 && c <= 0xAB26 || c >= 0xAB28 && c <= 0xAB2E;
} else { // c >= 0xABC0
return c >= 0xABC0 && c <= 0xABE2 || c >= 0xABF0 && c <= 0xABF9;
}
} else { // c >= 0xAC00
if (c < 0xD7CB) {
return c >= 0xAC00 && c <= 0xD7A3 || c >= 0xD7B0 && c <= 0xD7C6;
} else { // c >= 0xD7CB
return c >= 0xD7CB && c <= 0xD7FB || c >= 0xF900 && c <= 0xFB06;
}
}
} else { // c >= 0xFB13
if (c < 0xFB38) {
if (c < 0xFB1F) {
return c >= 0xFB13 && c <= 0xFB17 || c == 0xFB1D;
} else { // c >= 0xFB1F
return c >= 0xFB1F && c <= 0xFB28 || c >= 0xFB2A && c <= 0xFB36;
}
} else { // c >= 0xFB38
if (c < 0xFB40) {
return c >= 0xFB38 && c <= 0xFB3C || c == 0xFB3E;
} else { // c >= 0xFB40
return c >= 0xFB40 && c <= 0xFB41 || c >= 0xFB43 && c <= 0xFB44;
}
}
}
} else { // c >= 0xFB46
if (c < 0xFF21) {
if (c < 0xFDF0) {
if (c < 0xFD50) {
return c >= 0xFB46 && c <= 0xFBB1 || c >= 0xFBD3 && c <= 0xFD3D;
} else { // c >= 0xFD50
return c >= 0xFD50 && c <= 0xFD8F || c >= 0xFD92 && c <= 0xFDC7;
}
} else { // c >= 0xFDF0
if (c < 0xFE76) {
return c >= 0xFDF0 && c <= 0xFDFB || c >= 0xFE70 && c <= 0xFE74;
} else { // c >= 0xFE76
return c >= 0xFE76 && c <= 0xFEFC || c >= 0xFF10 && c <= 0xFF19;
}
}
} else { // c >= 0xFF21
if (c < 0xFFCA) {
if (c < 0xFF66) {
return c >= 0xFF21 && c <= 0xFF3A || c >= 0xFF41 && c <= 0xFF5A;
} else { // c >= 0xFF66
return c >= 0xFF66 && c <= 0xFFBE || c >= 0xFFC2 && c <= 0xFFC7;
}
} else { // c >= 0xFFCA
if (c < 0xFFDA) {
return c >= 0xFFCA && c <= 0xFFCF || c >= 0xFFD2 && c <= 0xFFD7;
} else { // c >= 0xFFDA
return c >= 0xFFDA && c <= 0xFFDC;
}
}
}
}
}
}
}
}
/**
* Tells if a character can occur in an FTL identifier expression (without escaping) as other than the first
* character.
*
* @since 2.3.22
*/
public static boolean isFTLIdentifierPart(final char c) {
return isFTLIdentifierStart(c) || (c >= '0' && c <= '9');
}
/**
* Escapes the <code>String</code> with the escaping rules of Java language
* string literals, so it's safe to insert the value into a string literal.
* The resulting string will not be quoted.
*
* <p>All characters under UCS code point 0x20 will be escaped.
* Where they have no dedicated escape sequence in Java, they will
* be replaced with hexadecimal escape (<tt>\</tt><tt>u<i>XXXX</i></tt>).
*
* @see #jQuote(String)
*/
public static String javaStringEnc(String s) {
int ln = s.length();
for (int i = 0; i < ln; i++) {
char c = s.charAt(i);
if (c == '"' || c == '\\' || c < 0x20) {
StringBuilder b = new StringBuilder(ln + 4);
b.append(s.substring(0, i));
while (true) {
if (c == '"') {
b.append("\\\"");
} else if (c == '\\') {
b.append("\\\\");
} else if (c < 0x20) {
if (c == '\n') {
b.append("\\n");
} else if (c == '\r') {
b.append("\\r");
} else if (c == '\f') {
b.append("\\f");
} else if (c == '\b') {
b.append("\\b");
} else if (c == '\t') {
b.append("\\t");
} else {
b.append("\\u00");
int x = c / 0x10;
b.append((char)
(x < 0xA ? x + '0' : x - 0xA + 'a'));
x = c & 0xF;
b.append((char)
(x < 0xA ? x + '0' : x - 0xA + 'a'));
}
} else {
b.append(c);
}
i++;
if (i >= ln) {
return b.toString();
}
c = s.charAt(i);
}
} // if has to be escaped
} // for each characters
return s;
}
/**
* Escapes a {@link String} to be safely insertable into a JavaScript string literal; for more see
* {@link #jsStringEnc(String, boolean) jsStringEnc(s, false)}.
*/
public static String javaScriptStringEnc(String s) {
return jsStringEnc(s, false);
}
/**
* Escapes a {@link String} to be safely insertable into a JSON string literal; for more see
* {@link #jsStringEnc(String, boolean) jsStringEnc(s, true)}.
*/
public static String jsonStringEnc(String s) {
return jsStringEnc(s, true);
}
private static final int NO_ESC = 0;
private static final int ESC_HEXA = 1;
private static final int ESC_BACKSLASH = 3;
/**
* Escapes a {@link String} to be safely insertable into a JavaScript or a JSON string literal.
* The resulting string will <em>not</em> be quoted; the caller must ensure that they are there in the final
* output. Note that for JSON, the quotation marks must be {@code "}, not {@code '}, because JSON doesn't escape
* {@code '}.
*
* <p>The escaping rules guarantee that if the inside
* of the literal is from one or more touching sections of strings escaped with this, no character sequence will
* occur that closes the string literal or has special meaning in HTML/XML that can terminate the script section.
* (If, however, the escaped section is preceded by or followed by strings from other sources, this can't be
* guaranteed in some rare cases. Like <tt>x = "</${a?js_string}"</tt> might closes the "script"
* element if {@code a} is is {@code "script>"}.)
*
* The escaped characters are:
*
* <table style="width: auto; border-collapse: collapse" border="1" summary="Characters escaped by jsStringEnc">
* <tr>
* <th>Input
* <th>Output
* <tr>
* <td><tt>"</tt>
* <td><tt>\"</tt>
* <tr>
* <td><tt>'</tt> if not in JSON-mode
* <td><tt>\'</tt>
* <tr>
* <td><tt>\</tt>
* <td><tt>\\</tt>
* <tr>
* <td><tt>/</tt> if the method can't know that it won't be directly after <tt><</tt>
* <td><tt>\/</tt>
* <tr>
* <td><tt>></tt> if the method can't know that it won't be directly after <tt>]]</tt> or <tt>--</tt>
* <td>JavaScript: <tt>\></tt>; JSON: <tt>\</tt><tt>u003E</tt>
* <tr>
* <td><tt><</tt> if the method can't know that it won't be directly followed by <tt>!</tt> or <tt>?</tt>
* <td><tt><tt>\</tt>u003C</tt>
* <tr>
* <td>
* u0000-u001f (UNICODE control characters - disallowed by JSON)<br>
* u007f-u009f (UNICODE control characters - disallowed by JSON)
* <td><tt>\n</tt>, <tt>\r</tt> and such, or if there's no such dedicated escape:
* JavaScript: <tt>\x<i>XX</i></tt>, JSON: <tt>\<tt>u</tt><i>XXXX</i></tt>
* <tr>
* <td>
* u2028 (Line separator - source code line-break in ECMAScript)<br>
* u2029 (Paragraph separator - source code line-break in ECMAScript)<br>
* <td><tt>\<tt>u</tt><i>XXXX</i></tt>
* </table>
*
* @since 2.3.20
*/
public static String jsStringEnc(String s, boolean json) {
NullArgumentException.check("s", s);
int ln = s.length();
StringBuilder sb = null;
for (int i = 0; i < ln; i++) {
final char c = s.charAt(i);
final int escapeType; //
if (!(c > '>' && c < 0x7F && c != '\\') && c != ' ' && !(c >= 0xA0 && c < 0x2028)) { // skip common chars
if (c <= 0x1F) { // control chars range 1
if (c == '\n') {
escapeType = 'n';
} else if (c == '\r') {
escapeType = 'r';
} else if (c == '\f') {
escapeType = 'f';
} else if (c == '\b') {
escapeType = 'b';
} else if (c == '\t') {
escapeType = 't';
} else {
escapeType = ESC_HEXA;
}
} else if (c == '"') {
escapeType = ESC_BACKSLASH;
} else if (c == '\'') {
escapeType = json ? NO_ESC : ESC_BACKSLASH;
} else if (c == '\\') {
escapeType = ESC_BACKSLASH;
} else if (c == '/' && (i == 0 || s.charAt(i - 1) == '<')) { // against closing elements
escapeType = ESC_BACKSLASH;
} else if (c == '>') { // against "]]> and "-->"
final boolean dangerous;
if (i == 0) {
dangerous = true;
} else {
final char prevC = s.charAt(i - 1);
if (prevC == ']' || prevC == '-') {
if (i == 1) {
dangerous = true;
} else {
final char prevPrevC = s.charAt(i - 2);
dangerous = prevPrevC == prevC;
}
} else {
dangerous = false;
}
}
escapeType = dangerous ? (json ? ESC_HEXA : ESC_BACKSLASH) : NO_ESC;
} else if (c == '<') { // against "<!"
final boolean dangerous;
if (i == ln - 1) {
dangerous = true;
} else {
char nextC = s.charAt(i + 1);
dangerous = nextC == '!' || nextC == '?';
}
escapeType = dangerous ? ESC_HEXA : NO_ESC;
} else if ((c >= 0x7F && c <= 0x9F) // control chars range 2
|| (c == 0x2028 || c == 0x2029) // UNICODE line terminators
) {
escapeType = ESC_HEXA;
} else {
escapeType = NO_ESC;
}
if (escapeType != NO_ESC) { // If needs escaping
if (sb == null) {
sb = new StringBuilder(ln + 6);
sb.append(s.substring(0, i));
}
sb.append('\\');
if (escapeType > 0x20) {
sb.append((char) escapeType);
} else if (escapeType == ESC_HEXA) {
if (!json && c < 0x100) {
sb.append('x');
sb.append(toHexDigit(c >> 4));
sb.append(toHexDigit(c & 0xF));
} else {
sb.append('u');
int cp = c;
sb.append(toHexDigit((cp >> 12) & 0xF));
sb.append(toHexDigit((cp >> 8) & 0xF));
sb.append(toHexDigit((cp >> 4) & 0xF));
sb.append(toHexDigit(cp & 0xF));
}
} else { // escapeType == ESC_BACKSLASH
sb.append(c);
}
continue;
}
// Falls through when escapeType == NO_ESC
}
// Needs no escaping
if (sb != null) sb.append(c);
} // for each characters
return sb == null ? s : sb.toString();
}
private static char toHexDigit(int d) {
return (char) (d < 0xA ? d + '0' : d - 0xA + 'A');
}
/**
* Parses a name-value pair list, where the pairs are separated with comma,
* and the name and value is separated with colon.
* The keys and values can contain only letters, digits and <tt>_</tt>. They
* can't be quoted. White-space around the keys and values are ignored. The
* value can be omitted if <code>defaultValue</code> is not null. When a
* value is omitted, then the colon after the key must be omitted as well.
* The same key can't be used for multiple times.
*
* @param s the string to parse.
* For example: <code>"strong:100, soft:900"</code>.
* @param defaultValue the value used when the value is omitted in a
* key-value pair.
*
* @return the map that contains the name-value pairs.
*
* @throws java.text.ParseException if the string is not a valid name-value
* pair list.
*/
public static Map parseNameValuePairList(String s, String defaultValue)
throws java.text.ParseException {
Map map = new HashMap();
char c = ' ';
int ln = s.length();
int p = 0;
int keyStart;
int valueStart;
String key;
String value;
fetchLoop: while (true) {
// skip ws
while (p < ln) {
c = s.charAt(p);
if (!Character.isWhitespace(c)) {
break;
}
p++;
}
if (p == ln) {
break fetchLoop;
}
keyStart = p;
// seek key end
while (p < ln) {
c = s.charAt(p);
if (!(Character.isLetterOrDigit(c) || c == '_')) {
break;
}
p++;
}
if (keyStart == p) {
throw new java.text.ParseException(
"Expecting letter, digit or \"_\" "
+ "here, (the first character of the key) but found "
+ jQuote(String.valueOf(c))
+ " at position " + p + ".",
p);
}
key = s.substring(keyStart, p);
// skip ws
while (p < ln) {
c = s.charAt(p);
if (!Character.isWhitespace(c)) {
break;
}
p++;
}
if (p == ln) {
if (defaultValue == null) {
throw new java.text.ParseException(
"Expecting \":\", but reached "
+ "the end of the string "
+ " at position " + p + ".",
p);
}
value = defaultValue;
} else if (c != ':') {
if (defaultValue == null || c != ',') {
throw new java.text.ParseException(
"Expecting \":\" here, but found "
+ jQuote(String.valueOf(c))
+ " at position " + p + ".",
p);
}
// skip ","
p++;
value = defaultValue;
} else {
// skip ":"
p++;
// skip ws
while (p < ln) {
c = s.charAt(p);
if (!Character.isWhitespace(c)) {
break;
}
p++;
}
if (p == ln) {
throw new java.text.ParseException(
"Expecting the value of the key "
+ "here, but reached the end of the string "
+ " at position " + p + ".",
p);
}
valueStart = p;
// seek value end
while (p < ln) {
c = s.charAt(p);
if (!(Character.isLetterOrDigit(c) || c == '_')) {
break;
}
p++;
}
if (valueStart == p) {
throw new java.text.ParseException(
"Expecting letter, digit or \"_\" "
+ "here, (the first character of the value) "
+ "but found "
+ jQuote(String.valueOf(c))
+ " at position " + p + ".",
p);
}
value = s.substring(valueStart, p);
// skip ws
while (p < ln) {
c = s.charAt(p);
if (!Character.isWhitespace(c)) {
break;
}
p++;
}
// skip ","
if (p < ln) {
if (c != ',') {
throw new java.text.ParseException(
"Excpecting \",\" or the end "
+ "of the string here, but found "
+ jQuote(String.valueOf(c))
+ " at position " + p + ".",
p);
} else {
p++;
}
}
}
// store the key-value pair
if (map.put(key, value) != null) {
throw new java.text.ParseException(
"Dublicated key: "
+ jQuote(key), keyStart);
}
}
return map;
}
/**
* Used internally by the XML DOM wrapper to check if the subvariable name is just an element name, or a more
* complex XPath expression.
*
* @return whether the name is a valid XML tagname. (This routine might only be 99% accurate. Should maybe REVISIT)
*
* @deprecated Don't use this outside FreeMarker; it's name if misleading, and it doesn't follow the XML specs.
*/
@Deprecated
static public boolean isXMLID(String name) {
int ln = name.length();
for (int i = 0; i < ln; i++) {
char c = name.charAt(i);
if (i == 0 && (c == '-' || c == '.' || Character.isDigit(c))) {
return false;
}
if (!Character.isLetterOrDigit(c) && c != '_' && c != '-' && c != '.') {
if (c == ':') {
if (i + 1 < ln && name.charAt(i + 1) == ':') {
// "::" is used in XPath
return false;
}
// We don't return here, as a lonely ":" is allowed.
} else {
return false;
}
}
}
return true;
}
/**
* @return whether the qname matches the combination of nodeName, nsURI, and environment prefix settings.
*/
static public boolean matchesName(String qname, String nodeName, String nsURI, Environment env) {
String defaultNS = env.getDefaultNS();
if ((defaultNS != null) && defaultNS.equals(nsURI)) {
return qname.equals(nodeName)
|| qname.equals(Template.DEFAULT_NAMESPACE_PREFIX + ":" + nodeName);
}
if ("".equals(nsURI)) {
if (defaultNS != null) {
return qname.equals(Template.NO_NS_PREFIX + ":" + nodeName);
} else {
return qname.equals(nodeName) || qname.equals(Template.NO_NS_PREFIX + ":" + nodeName);
}
}
String prefix = env.getPrefixForNamespace(nsURI);
if (prefix == null) {
return false; // Is this the right thing here???
}
return qname.equals(prefix + ":" + nodeName);
}
/**
* Pads the string at the left with spaces until it reaches the desired
* length. If the string is longer than this length, then it returns the
* unchanged string.
*
* @param s the string that will be padded.
* @param minLength the length to reach.
*/
public static String leftPad(String s, int minLength) {
return leftPad(s, minLength, ' ');
}
/**
* Pads the string at the left with the specified character until it reaches
* the desired length. If the string is longer than this length, then it
* returns the unchanged string.
*
* @param s the string that will be padded.
* @param minLength the length to reach.
* @param filling the filling pattern.
*/
public static String leftPad(String s, int minLength, char filling) {
int ln = s.length();
if (minLength <= ln) {
return s;
}
StringBuilder res = new StringBuilder(minLength);
int dif = minLength - ln;
for (int i = 0; i < dif; i++) {
res.append(filling);
}
res.append(s);
return res.toString();
}
/**
* Pads the string at the left with a filling pattern until it reaches the
* desired length. If the string is longer than this length, then it returns
* the unchanged string. For example: <code>leftPad('ABC', 9, '1234')</code>
* returns <code>"123412ABC"</code>.
*
* @param s the string that will be padded.
* @param minLength the length to reach.
* @param filling the filling pattern. Must be at least 1 characters long.
* Can't be <code>null</code>.
*/
public static String leftPad(String s, int minLength, String filling) {
int ln = s.length();
if (minLength <= ln) {
return s;
}
StringBuilder res = new StringBuilder(minLength);
int dif = minLength - ln;
int fln = filling.length();
if (fln == 0) {
throw new IllegalArgumentException(
"The \"filling\" argument can't be 0 length string.");
}
int cnt = dif / fln;
for (int i = 0; i < cnt; i++) {
res.append(filling);
}
cnt = dif % fln;
for (int i = 0; i < cnt; i++) {
res.append(filling.charAt(i));
}
res.append(s);
return res.toString();
}
/**
* Pads the string at the right with spaces until it reaches the desired
* length. If the string is longer than this length, then it returns the
* unchanged string.
*
* @param s the string that will be padded.
* @param minLength the length to reach.
*/
public static String rightPad(String s, int minLength) {
return rightPad(s, minLength, ' ');
}
/**
* Pads the string at the right with the specified character until it
* reaches the desired length. If the string is longer than this length,
* then it returns the unchanged string.
*
* @param s the string that will be padded.
* @param minLength the length to reach.
* @param filling the filling pattern.
*/
public static String rightPad(String s, int minLength, char filling) {
int ln = s.length();
if (minLength <= ln) {
return s;
}
StringBuilder res = new StringBuilder(minLength);
res.append(s);
int dif = minLength - ln;
for (int i = 0; i < dif; i++) {
res.append(filling);
}
return res.toString();
}
/**
* Pads the string at the right with a filling pattern until it reaches the
* desired length. If the string is longer than this length, then it returns
* the unchanged string. For example: <code>rightPad('ABC', 9, '1234')</code>
* returns <code>"ABC412341"</code>. Note that the filling pattern is
* started as if you overlay <code>"123412341"</code> with the left-aligned
* <code>"ABC"</code>, so it starts with <code>"4"</code>.
*
* @param s the string that will be padded.
* @param minLength the length to reach.
* @param filling the filling pattern. Must be at least 1 characters long.
* Can't be <code>null</code>.
*/
public static String rightPad(String s, int minLength, String filling) {
int ln = s.length();
if (minLength <= ln) {
return s;
}
StringBuilder res = new StringBuilder(minLength);
res.append(s);
int dif = minLength - ln;
int fln = filling.length();
if (fln == 0) {
throw new IllegalArgumentException(
"The \"filling\" argument can't be 0 length string.");
}
int start = ln % fln;
int end = fln - start <= dif
? fln
: start + dif;
for (int i = start; i < end; i++) {
res.append(filling.charAt(i));
}
dif -= end - start;
int cnt = dif / fln;
for (int i = 0; i < cnt; i++) {
res.append(filling);
}
cnt = dif % fln;
for (int i = 0; i < cnt; i++) {
res.append(filling.charAt(i));
}
return res.toString();
}
/**
* Converts a version number string to an integer for easy comparison.
* The version number must start with numbers separated with
* dots. There can be any number of such dot-separated numbers, but only
* the first three will be considered. After the numbers arbitrary text can
* follow, and will be ignored.
*
* The string will be trimmed before interpretation.
*
* @return major * 1000000 + minor * 1000 + micro
*/
public static int versionStringToInt(String version) {
return new Version(version).intValue();
}
/**
* Tries to run {@code toString()}, but if that fails, returns a
* {@code "[com.example.SomeClass.toString() failed: " + e + "]"} instead. Also, it returns {@code null} for
* {@code null} parameter.
*
* @since 2.3.20
*/
public static String tryToString(Object object) {
if (object == null) return null;
try {
return object.toString();
} catch (Throwable e) {
return failedToStringSubstitute(object, e);
}
}
private static String failedToStringSubstitute(Object object, Throwable e) {
String eStr;
try {
eStr = e.toString();
} catch (Throwable e2) {
eStr = ClassUtil.getShortClassNameOfObject(e);
}
return "[" + ClassUtil.getShortClassNameOfObject(object) + ".toString() failed: " + eStr + "]";
}
/**
* Converts {@code 1}, {@code 2}, {@code 3} and so forth to {@code "A"}, {@code "B"}, {@code "C"} and so fort. When
* reaching {@code "Z"}, it continues like {@code "AA"}, {@code "AB"}, etc. The lowest supported number is 1, but
* there's no upper limit.
*
* @throws IllegalArgumentException
* If the argument is 0 or less.
*
* @since 2.3.22
*/
public static String toUpperABC(int n) {
return toABC(n, 'A');
}
/**
* Same as {@link #toUpperABC(int)}, but produces lower case result, like {@code "ab"}.
*
* @since 2.3.22
*/
public static String toLowerABC(int n) {
return toABC(n, 'a');
}
/**
* @param oneDigit
* The character that stands for the value 1.
*/
private static String toABC(final int n, char oneDigit) {
if (n < 1) {
throw new IllegalArgumentException("Can't convert 0 or negative "
+ "numbers to latin-number: " + n);
}
// First find out how many "digits" will we need. We start from A, then
// try AA, then AAA, etc. (Note that the smallest digit is "A", which is
// 1, not 0. Hence this isn't like a usual 26-based number-system):
int reached = 1;
int weight = 1;
while (true) {
int nextWeight = weight * 26;
int nextReached = reached + nextWeight;
if (nextReached <= n) {
// So we will have one more digit
weight = nextWeight;
reached = nextReached;
} else {
// No more digits
break;
}
}
// Increase the digits of the place values until we get as close
// to n as possible (but don't step over it).
StringBuilder sb = new StringBuilder();
while (weight != 0) {
// digitIncrease: how many we increase the digit which is already 1
final int digitIncrease = (n - reached) / weight;
sb.append((char) (oneDigit + digitIncrease));
reached += digitIncrease * weight;
weight /= 26;
}
return sb.toString();
}
/**
* Behaves exactly like {@link String#trim()}, but works on arrays. If the resulting array would have the same
* content after trimming, it returns the original array instance. Otherwise it returns a new array instance (or
* {@link CollectionUtils#EMPTY_CHAR_ARRAY}).
*
* @since 2.3.22
*/
public static char[] trim(final char[] cs) {
if (cs.length == 0) {
return cs;
}
int start = 0;
int end = cs.length;
while (start < end && cs[start] <= ' ') {
start++;
}
while (start < end && cs[end - 1] <= ' ') {
end--;
}
if (start == 0 && end == cs.length) {
return cs;
}
if (start == end) {
return CollectionUtils.EMPTY_CHAR_ARRAY;
}
char[] newCs = new char[end - start];
System.arraycopy(cs, start, newCs, 0, end - start);
return newCs;
}
/**
* Tells if {@link String#trim()} will return a 0-length string for the {@link String} equivalent of the argument.
*
* @since 2.3.22
*/
public static boolean isTrimmableToEmpty(char[] text) {
return isTrimmableToEmpty(text, 0, text.length);
}
/**
* Like {@link #isTrimmableToEmpty(char[])}, but acts on a sub-array that starts at {@code start} (inclusive index).
*
* @since 2.3.23
*/
public static boolean isTrimmableToEmpty(char[] text, int start) {
return isTrimmableToEmpty(text, start, text.length);
}
/**
* Like {@link #isTrimmableToEmpty(char[])}, but acts on a sub-array that starts at {@code start} (inclusive index)
* and ends at {@code end} (exclusive index).
*
* @since 2.3.23
*/
public static boolean isTrimmableToEmpty(char[] text, int start, int end) {
for (int i = start; i < end; i++) {
// We follow Java's String.trim() here, which simply states that c <= ' ' is whitespace.
if (text[i] > ' ') {
return false;
}
}
return true;
}
/**
* Same as {@link #globToRegularExpression(String, boolean)} with {@code caseInsensitive} argument {@code false}.
*
* @since 2.3.24
*/
public static Pattern globToRegularExpression(String glob) {
return globToRegularExpression(glob, false);
}
/**
* Creates a regular expression from a glob. The glob must use {@code /} for as file separator, not {@code \}
* (backslash), and is always case sensitive.
*
* <p>This glob implementation recognizes these special characters:
* <ul>
* <li>{@code ?}: Wildcard that matches exactly one character, other than {@code /}
* <li>{@code *}: Wildcard that matches zero, one or multiple characters, other than {@code /}
* <li>{@code **}: Wildcard that matches zero, one or multiple directories. For example, {@code **}{@code /head.ftl}
* matches {@code foo/bar/head.ftl}, {@code foo/head.ftl} and {@code head.ftl} too. {@code **} must be either
* preceded by {@code /} or be at the beginning of the glob. {@code **} must be either followed by {@code /} or be
* at the end of the glob. When {@code **} is at the end of the glob, it also matches file names, like
* {@code a/**} matches {@code a/b/c.ftl}. If the glob only consist of a {@code **}, it will be a match for
* everything.
* <li>{@code \} (backslash): Makes the next character non-special (a literal). For example {@code How\?.ftl} will
* match {@code How?.ftl}, but not {@code HowX.ftl}. Naturally, two backslashes produce one literal backslash.
* <li>{@code [}: Reserved for future purposes; can't be used
* <li><code>{</code>: Reserved for future purposes; can't be used
* </ul>
*
* @since 2.3.24
*/
public static Pattern globToRegularExpression(String glob, boolean caseInsensitive) {
StringBuilder regex = new StringBuilder();
int nextStart = 0;
boolean escaped = false;
int ln = glob.length();
for (int idx = 0; idx < ln; idx++) {
char c = glob.charAt(idx);
if (!escaped) {
if (c == '?') {
appendLiteralGlobSection(regex, glob, nextStart, idx);
regex.append("[^/]");
nextStart = idx + 1;
} else if (c == '*') {
appendLiteralGlobSection(regex, glob, nextStart, idx);
if (idx + 1 < ln && glob.charAt(idx + 1) == '*') {
if (!(idx == 0 || glob.charAt(idx - 1) == '/')) {
throw new IllegalArgumentException(
"The \"**\" wildcard must be directly after a \"/\" or it must be at the "
+ "beginning, in this glob: " + glob);
}
if (idx + 2 == ln) { // trailing "**"
regex.append(".*");
idx++;
} else { // "**/"
if (!(idx + 2 < ln && glob.charAt(idx + 2) == '/')) {
throw new IllegalArgumentException(
"The \"**\" wildcard must be followed by \"/\", or must be at tehe end, "
+ "in this glob: " + glob);
}
regex.append("(.*?/)*");
idx += 2; // "*/".length()
}
} else {
regex.append("[^/]*");
}
nextStart = idx + 1;
} else if (c == '\\') {
escaped = true;
} else if (c == '[' || c == '{') {
throw new IllegalArgumentException(
"The \"" + c + "\" glob operator is currently unsupported "
+ "(precede it with \\ for literal matching), "
+ "in this glob: " + glob);
}
} else {
escaped = false;
}
}
appendLiteralGlobSection(regex, glob, nextStart, glob.length());
return Pattern.compile(regex.toString(), caseInsensitive ? Pattern.CASE_INSENSITIVE | Pattern.UNICODE_CASE : 0);
}
private static void appendLiteralGlobSection(StringBuilder regex, String glob, int start, int end) {
if (start == end) return;
String part = unescapeLiteralGlobSection(glob.substring(start, end));
regex.append(Pattern.quote(part));
}
private static String unescapeLiteralGlobSection(String s) {
int backslashIdx = s.indexOf('\\');
if (backslashIdx == -1) {
return s;
}
int ln = s.length();
StringBuilder sb = new StringBuilder(ln - 1);
int nextStart = 0;
do {
sb.append(s, nextStart, backslashIdx);
nextStart = backslashIdx + 1;
} while ((backslashIdx = s.indexOf('\\', nextStart + 1)) != -1);
if (nextStart < ln) {
sb.append(s, nextStart, ln);
}
return sb.toString();
}
}
|
<reponame>leftjs/gym-api<filename>src/main/java/com/donler/gym/model/Company.java
package com.donler.gym.model;
import com.fasterxml.jackson.annotation.JsonInclude;
import io.swagger.annotations.ApiModelProperty;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.validation.constraints.NotNull;
/**
* 合同
*
* Created by jason on 4/15/16.
*/
@Entity
@JsonInclude(value = JsonInclude.Include.NON_EMPTY)
public class Company {
@Id
@GeneratedValue(strategy = GenerationType.AUTO)
private Long id;
@ApiModelProperty(example = "未来健身俱乐部-上海厚康体育投资有限公司",notes = "公司名称")
@NotNull(message = "公司名称不能为空")
private String name;
@ApiModelProperty(example = "021-67600001", notes = "公司号码")
@NotNull(message = "公司号码不能为空")
private String phoneNumber;
@ApiModelProperty(example = "<EMAIL>", notes = "公司邮箱")
@NotNull(message = "公司邮箱不能为空")
private String email;
@ApiModelProperty(notes = "经营地址", example = "上海市浦东新区杨高南路739号陆家嘴金融世纪广场3座2楼")
@NotNull(message = "经营地址不能为空")
private String address;
@ApiModelProperty(notes = "法人代表",example = "Jason")
@NotNull(message = "法人代表")
private String incorporator;
@ApiModelProperty(notes = "营业执照注册号", example = "1284057999")
private String licenseCode;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getPhoneNumber() {
return phoneNumber;
}
public void setPhoneNumber(String phoneNumber) {
this.phoneNumber = phoneNumber;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getAddress() {
return address;
}
public void setAddress(String address) {
this.address = address;
}
public String getIncorporator() {
return incorporator;
}
public void setIncorporator(String incorporator) {
this.incorporator = incorporator;
}
public String getLicenseCode() {
return licenseCode;
}
public void setLicenseCode(String licenseCode) {
this.licenseCode = licenseCode;
}
public Company() {
}
public Company(String name, String phoneNumber, String email, String address, String incorporator, String licenseCode) {
this.name = name;
this.phoneNumber = phoneNumber;
this.email = email;
this.address = address;
this.incorporator = incorporator;
this.licenseCode = licenseCode;
}
}
|
/*
* This file is generated by jOOQ.
*/
package io.cattle.platform.core.model.tables;
import io.cattle.platform.core.model.CattleTable;
import io.cattle.platform.core.model.Keys;
import io.cattle.platform.core.model.tables.records.SettingRecord;
import java.util.Arrays;
import java.util.List;
import javax.annotation.Generated;
import org.jooq.Field;
import org.jooq.Identity;
import org.jooq.Schema;
import org.jooq.Table;
import org.jooq.TableField;
import org.jooq.UniqueKey;
import org.jooq.impl.TableImpl;
/**
* This class is generated by jOOQ.
*/
@Generated(
value = {
"http://www.jooq.org",
"jOOQ version:3.9.3"
},
comments = "This class is generated by jOOQ"
)
@SuppressWarnings({ "all", "unchecked", "rawtypes" })
public class SettingTable extends TableImpl<SettingRecord> {
private static final long serialVersionUID = -410756367;
/**
* The reference instance of <code>cattle.setting</code>
*/
public static final SettingTable SETTING = new SettingTable();
/**
* The class holding records for this type
*/
@Override
public Class<SettingRecord> getRecordType() {
return SettingRecord.class;
}
/**
* The column <code>cattle.setting.id</code>.
*/
public final TableField<SettingRecord, Long> ID = createField("id", org.jooq.impl.SQLDataType.BIGINT.nullable(false), this, "");
/**
* The column <code>cattle.setting.name</code>.
*/
public final TableField<SettingRecord, String> NAME = createField("name", org.jooq.impl.SQLDataType.VARCHAR.length(255).nullable(false), this, "");
/**
* The column <code>cattle.setting.value</code>.
*/
public final TableField<SettingRecord, String> VALUE = createField("value", org.jooq.impl.SQLDataType.CLOB.nullable(false), this, "");
/**
* Create a <code>cattle.setting</code> table reference
*/
public SettingTable() {
this("setting", null);
}
/**
* Create an aliased <code>cattle.setting</code> table reference
*/
public SettingTable(String alias) {
this(alias, SETTING);
}
private SettingTable(String alias, Table<SettingRecord> aliased) {
this(alias, aliased, null);
}
private SettingTable(String alias, Table<SettingRecord> aliased, Field<?>[] parameters) {
super(alias, null, aliased, parameters, "");
}
/**
* {@inheritDoc}
*/
@Override
public Schema getSchema() {
return CattleTable.CATTLE;
}
/**
* {@inheritDoc}
*/
@Override
public Identity<SettingRecord, Long> getIdentity() {
return Keys.IDENTITY_SETTING;
}
/**
* {@inheritDoc}
*/
@Override
public UniqueKey<SettingRecord> getPrimaryKey() {
return Keys.KEY_SETTING_PRIMARY;
}
/**
* {@inheritDoc}
*/
@Override
public List<UniqueKey<SettingRecord>> getKeys() {
return Arrays.<UniqueKey<SettingRecord>>asList(Keys.KEY_SETTING_PRIMARY);
}
/**
* {@inheritDoc}
*/
@Override
public SettingTable as(String alias) {
return new SettingTable(alias, this);
}
/**
* Rename this table
*/
@Override
public SettingTable rename(String name) {
return new SettingTable(name, null);
}
}
|
<gh_stars>0
import { NgModule } from '@angular/core';
import {MatToolbarModule} from '@angular/material/toolbar';
import {MatButtonModule} from '@angular/material/button';
import {MatExpansionModule} from '@angular/material/expansion';
import {MatIconModule} from '@angular/material/icon';
import {MatFormFieldModule} from '@angular/material/form-field';
import {MatInputModule} from '@angular/material/input';
import {MatTabsModule} from '@angular/material/tabs';
import {MatProgressSpinnerModule} from '@angular/material/progress-spinner';
import {MatPaginatorModule} from '@angular/material/paginator';
import {MatDialogModule} from '@angular/material/dialog';
import {MatSelectModule} from '@angular/material/select';
const material = [
MatToolbarModule,
MatButtonModule,
MatExpansionModule,
MatIconModule,
MatFormFieldModule,
MatInputModule,
MatTabsModule,
MatProgressSpinnerModule,
MatPaginatorModule,
MatDialogModule,
MatSelectModule
];
@NgModule({
imports: [material],
exports: [material]
})
export class MaterialModule { }
|
#!/bin/bash
echo -n "Enter your name and press [ENTER]: "
read username
echo -n "Enter host [ENTER]: "
read host
rsync -a ./packages/server/package* ./packages/server/env ./packages/server/secret ./packages/server/dist ${username}@${host}:/var/www/html/apps/inkvisitor-sandbox/server
rsync -a ./packages/server/src/service/emails ${username}@${host}:/var/www/html/apps/inkvisitor-sandbox/server/dist/server/src/service
ssh ${username}@${host} 'cd /var/www/html/apps/inkvisitor-sandbox/server && npm install && npm run restart:sandbox'
|
import random
import math
def estimate_pi(num_simulations):
count_inside_circle = 0
for _ in range(num_simulations):
x = random.uniform(0,1)
y = random.uniform(0,1)
dist_from_origin = math.sqrt(x**2 + y**2)
if dist_from_origin <= 1:
count_inside_circle += 1
pi = 4 * (count_inside_circle / num_simulations)
return pi
if __name__ == '__main__':
print(estimate_pi(1000)) |
<filename>test/fixtures/scale.time/ticks-capacity.js
module.exports = {
threshold: 0.01,
config: {
type: 'line',
data: {
labels: [
'2012-01-01', '2013-01-01', '2014-01-01', '2015-01-01',
'2016-01-01', '2017-01-01', '2018-01-01', '2019-01-01'
]
},
options: {
scales: {
x: {
type: 'time',
time: {
unit: 'year'
}
},
y: {
display: false
}
}
}
},
options: {
spriteText: true
}
};
|
#!/usr/bin/env bash
# Copyright 2019 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -euo pipefail
[[ -n "${DEBUG:-}" ]] && set -x
gopath="$(go env GOPATH)"
if ! [[ -x "$gopath/bin/golangci-lint" ]]; then
echo >&2 'Installing golangci-lint'
curl --silent --fail --location \
https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sh -s -- -b "$gopath/bin" v1.21.0
fi
# configured by .golangci.yml
GO111MODULE=on "$gopath/bin/golangci-lint" run
install_impi() {
impi_dir="$(mktemp -d)"
trap 'rm -rf -- ${impi_dir}' EXIT
GOPATH="${impi_dir}" \
GO111MODULE=off \
GOBIN="${gopath}/bin" \
go get github.com/pavius/impi/cmd/impi
}
# install impi that ensures import grouping is done consistently
if ! [[ -x "${gopath}/bin/impi" ]]; then
echo >&2 'Installing impi'
install_impi
fi
"$gopath/bin/impi" --local sigs.k8s.io/krew --scheme stdThirdPartyLocal ./...
install_shfmt() {
shfmt_dir="$(mktemp -d)"
trap 'rm -rf -- ${shfmt_dir}' EXIT
GOPATH="${shfmt_dir}" \
GO111MODULE=off \
GOBIN="${gopath}/bin" \
go get mvdan.cc/sh/cmd/shfmt
}
# install shfmt that ensures consistent format in shell scripts
if ! [[ -x "${gopath}/bin/shfmt" ]]; then
echo >&2 'Installing shfmt'
install_shfmt
fi
SCRIPTDIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
shfmt_out="$($gopath/bin/shfmt -l -i=2 ${SCRIPTDIR})"
if [[ -n "${shfmt_out}" ]]; then
echo >&2 "The following shell scripts need to be formatted, run: 'shfmt -w -i=2 ${SCRIPTDIR}'"
echo >&2 "${shfmt_out}"
exit 1
fi
|
#!/bin/bash -e
# make sure -x (debugging) is off so we don't print the token in the logs
set +x
# only run on tags
if [[ "$CIRCLE_TAG" = "" ]]; then exit 0; fi
if [ -f ~/.npmrc ]; then mv ~/.npmrc ~/.npmrc.bak; fi
echo "//registry.npmjs.org/:_authToken=${NPM_TOKEN}" > ~/.npmrc
echo "Publishing flow-parser-bin";
npm publish ./dist/npm-flow-parser-bin.tgz;
echo "Publishing flow-parser";
npm publish ./dist/npm-flow-parser.tgz;
echo "Publishing flow-remove-types";
npm publish ./dist/npm-flow-remove-types.tgz;
echo "Publishing flow-node";
npm publish ./dist/npm-flow-node.tgz;
if [ -f ~/.npmrc.bak ]; then mv ~/.npmrc.bak ~/.npmrc; fi
|
nosetests -v --with-coverage --verbosity=2 --cover-erase --cover-html --nocapture --nologcapture --with-doctest --cover-html-dir=coverage/ --cover-package=cpf_cnpj tests |
// import Image from 'next/image';
import Quiz from '../../components/Quiz';
import { getQuestions, startQuiz } from '../../lib/axios';
import nookies from 'nookies';
import Head from 'next/head';
const domain = ({ domain, questions, endTime }) => {
return (
<>
<Head>
<title>CSI - CCS | {domain.charAt(0).toUpperCase() + domain.slice(1)} Quiz</title>
</Head>
<div className="flex flex-nowrap flex-row justify-center relative">
<Quiz domain={domain} questions={questions} endTime={endTime} />
<div className="w-64 absolute left-0 pt-20 hidden 2xl:block ">
<img src={`/assets/quiz_${domain}.png`} alt={domain} />
</div>
</div>
</>
);
};
export default domain;
export async function getServerSideProps(ctx) {
const {
query: { domain },
} = ctx;
const domains = ['tech', 'management', 'design', 'video'];
if (!domains.includes(domain)) {
return {
notFound: true,
};
}
const cookies = nookies.get(ctx);
const res = await startQuiz({ domain }, cookies);
if (res.error) return { props: { rateLimited: true } };
const { success, message: startMessage } = res;
if (!success) {
return {
redirect: {
destination: `/user/dashboard?success=${success}&msg=${startMessage}`,
},
};
}
const { success: questionSuccess, result, message: quizMessage } = await getQuestions({ domain }, cookies);
if (!questionSuccess)
return {
redirect: {
destination: `/user/dashboard?success=${questionSuccess}&msg=${quizMessage}`,
},
};
const { questions, endTime } = result;
return {
props: { domain, questions, endTime },
};
}
|
import {Link} from "gatsby"
import PropTypes from "prop-types"
import React from "react"
import SocialMediaLinksComponent from '@bit/saundersb.common.social-media-links-component';
interface HeaderInterface {
siteTitle: string
}
const Header = ({siteTitle}: HeaderInterface) => (
<header
style={{
background: `steelblue`,
marginBottom: `1.45rem`,
}}
>
<div style={{
display: 'flex',
flexDirection: 'row',
justifyContent: 'space-between',
alignItems: 'center'
}}>
<div
style={{
maxWidth: 960,
padding: `1.45rem`,
display: 'flex',
justifyContent: 'flex-start'
}}
>
<h1 style={{margin: 0}}>
<Link
to="/"
style={{
textDecoration: `none`,
}}
>
{siteTitle}
</Link>
</h1>
</div>
<div style={{
padding: `0.25rem`
}}>
<SocialMediaLinksComponent/>
</div>
</div>
</header>
)
Header.propTypes = {
siteTitle: PropTypes.string,
}
Header.defaultProps = {
siteTitle: ``,
}
export default Header
|
import React from 'react';
import IconSearch from '../img/cerca-student-hotels.svg';
import IconCompare from '../img/compara-student-hotels.svg';
import IconSave from '../img/risparmia-student-hotels.svg';
const BlockIcons = () => {
return (
<section className="section has-margin-bottom">
<div className="container">
<div className="is-centered has-margin-bottom">
<h3 className="title is-3">Risparmia sui migliori Student Hotel</h3>
<p className="">La forza del nostro team e delle nostre sofisticate tecnologie</p>
</div>
<div className="columns">
<div className="column is-10-desktop is-offset-1-desktop">
<div className="columns">
<div className="column has-text-centered">
<div className="sh-block-icon"><img src={IconSearch} alt="trova location"/></div>
<h5 className="title is-5">Trova la location desiderata</h5>
<p>Con il nostro portale puoi scoprire la struttura perfetta per le tue esigenze cercando tra una vasta selezione di hotel per studenti in Italia e all'estero.</p>
</div>
<div className="column has-text-centered">
<div className="sh-block-icon"><img src={IconCompare} alt="compara i prezzi"/></div>
<h5 className="title is-5">Compara le migliori offerte</h5>
<p>Esamina le strutture a cui sei interessato, controlla le recensioni degli utenti, segui le migliori raccomandazioni, consulta le mappe per evitare sorprese.</p>
</div>
<div className="column has-text-centered">
<div className="sh-block-icon"><img src={IconSave} alt="prenota student hotel"/></div>
<h5 className="title is-5">Prenota al prezzo più basso</h5>
<p>Consultando i prezzi tra i vari offerenti nel web (tra cui: Booking.com, Expedia, Agodà, Hotels.com) puoi risparmiare fino al 60% su ogni prenotazione.</p>
</div>
</div>
</div>
</div>
</div>
</section>
)
}
export default BlockIcons; |
package controller
import (
"net/http"
"strconv"
"varconf-server/core/dao"
"varconf-server/core/moudle/router"
"varconf-server/core/service"
"varconf-server/core/web/common"
)
type AppController struct {
common.Controller
appService *service.AppService
configService *service.ConfigService
}
func InitAppController(s *router.Router, appService *service.AppService, configService *service.ConfigService) *AppController {
appController := AppController{appService: appService, configService: configService}
s.Get("/app", appController.list)
s.Get("/app/:appId([0-9]+)", appController.detail)
s.Delete("/app/:appId([0-9]+)", appController.delete)
s.Put("/app", appController.create)
s.Patch("/app/:appId([0-9]+)", appController.update)
return &appController
}
// GET /app
func (_self *AppController) list(w http.ResponseWriter, r *http.Request, c *router.Context) {
// read page
pageIndex, pageSize := _self.ReadPageInfo(r)
pageData, pageCount, totalCount := _self.appService.PageQuery(r.URL.Query().Get("likeName"), pageIndex, pageSize)
// remove ApiKey
for _, v := range pageData {
v.ApiKey = ""
}
_self.WritePageData(w, pageData, pageIndex, pageCount, pageSize, totalCount)
}
// GET /app/:appId([0-9]+)/
func (_self *AppController) detail(w http.ResponseWriter, r *http.Request, c *router.Context) {
// read param
params := r.URL.Query()
appId, err := strconv.ParseInt(params.Get(":appId"), 10, 64)
if err != nil {
common.WriteErrorResponse(w, err.Error())
return
}
// query app
appData := _self.appService.QueryApp(appId)
common.WriteSucceedResponse(w, appData)
}
// DELETE /app/:appId([0-9]+)
func (_self *AppController) delete(w http.ResponseWriter, r *http.Request, c *router.Context) {
// read param
params := r.URL.Query()
appId, err := strconv.ParseInt(params.Get(":appId"), 10, 64)
if err != nil {
common.WriteErrorResponse(w, err.Error())
return
}
// delete app
success := _self.appService.DeleteApp(appId)
if !success {
common.WriteErrorResponse(w, nil)
return
}
common.WriteSucceedResponse(w, nil)
}
// PUT /app
func (_self *AppController) create(w http.ResponseWriter, r *http.Request, c *router.Context) {
// read param
appData := dao.AppData{}
err := common.ReadJson(r, &appData)
if err != nil {
common.WriteErrorResponse(w, err.Error())
return
}
// create app
success := _self.appService.CreateApp(&appData)
if !success {
common.WriteErrorResponse(w, nil)
return
}
common.WriteSucceedResponse(w, appData)
}
// PATCH /app/:appId([0-9]+)
func (_self *AppController) update(w http.ResponseWriter, r *http.Request, c *router.Context) {
// read param
appData := dao.AppData{}
err := common.ReadJson(r, &appData)
if err != nil {
common.WriteErrorResponse(w, err.Error())
return
}
params := r.URL.Query()
appId, err := strconv.ParseInt(params.Get(":appId"), 10, 64)
if err != nil {
common.WriteErrorResponse(w, err.Error())
return
}
// update app
appData.AppId = appId
success := _self.appService.SelectedUpdateApp(appData)
if !success {
common.WriteErrorResponse(w, nil)
return
}
common.WriteSucceedResponse(w, appData)
}
|
#!/usr/bin/env -S bash ../.port_include.sh
port=dropbear
version=2019.78
files="https://mirror.dropbear.nl/mirror/releases/dropbear-${version}.tar.bz2 dropbear-${version}.tar.bz2
https://mirror.dropbear.nl/mirror/releases/dropbear-${version}.tar.bz2.asc dropbear-${version}.tar.bz2.asc
https://mirror.dropbear.nl/mirror/releases/dropbear-key-2015.asc dropbear-key-2015.asc"
auth_type="sig"
auth_opts="--keyring ./dropbear-key-2015.asc dropbear-${version}.tar.bz2.asc"
useconfigure=true
# don't care about zlib, less deps is better
configopts="--disable-zlib "
# Serenity's utmp is not fully compatible with what dropbear expects.
configopts+="--disable-utmp --disable-wtmp --disable-login --disable-lastlog "
|
<reponame>kevinkimball/sparkpost-rails
require 'spec_helper'
describe SparkPostRails::DeliveryMethod do
before(:each) do
SparkPostRails.configuration.set_defaults
@delivery_method = SparkPostRails::DeliveryMethod.new
end
context "Return Path" do
it "handles return path set in the configuration" do
SparkPostRails.configure do |c|
c.return_path = "<EMAIL>-<EMAIL>"
end
test_email = Mailer.test_email
@delivery_method.deliver!(test_email)
expect(@delivery_method.data[:return_path]).to eq('<EMAIL>')
end
it "handles return path on an individual message" do
test_email = Mailer.test_email return_path: "<EMAIL>"
@delivery_method.deliver!(test_email)
expect(@delivery_method.data[:return_path]).to eq('<EMAIL>')
end
it "handles the value on an individual message overriding configuration" do
SparkPostRails.configure do |c|
c.return_path = "<EMAIL>"
end
test_email = Mailer.test_email return_path: "<EMAIL>"
@delivery_method.deliver!(test_email)
expect(@delivery_method.data[:return_path]).to eq('<EMAIL>')
end
it "handles a default setting of none" do
test_email = Mailer.test_email
@delivery_method.deliver!(test_email)
expect(@delivery_method.data.has_key?(:return_path)).to eq(false)
end
end
end
|
<reponame>bverhoeve/design-patterns<filename>src/garage/Onderhoud.java
package garage;
public class Onderhoud {
public int start;
public int end;
public String nummerplaat;
public Onderhoud (String nummerplaat, int start, int end) {
this.nummerplaat = nummerplaat;
this.start = start;
this.end = end;
}
public String getNummerPlaat() {
return this.nummerplaat;
}
public int getStart() {
return this.start;
}
public String toString() {
return "" + nummerplaat + ": start: " + start + " end: " + end;
}
}
|
<filename>offer/src/main/java/leetCode/L10084_LargestRectangleArea.java
package leetCode;//给定 n 个非负整数,用来表示柱状图中各个柱子的高度。每个柱子彼此相邻,且宽度为 1 。
//
// 求在该柱状图中,能够勾勒出来的矩形的最大面积。
//
//
//
// 示例 1:
//
//
//
//
//输入:heights = [2,1,5,6,2,3]
//输出:10
//解释:最大的矩形为图中红色区域,面积为 10
//
//
// 示例 2:
//
//
//
//
//输入: heights = [2,4]
//输出: 4
//
//
//
// 提示:
//
//
// 1 <= heights.length <=105
// 0 <= heights[i] <= 104
//
// Related Topics 栈 数组 单调栈
// 👍 1473 👎 0
//leetcode submit region begin(Prohibit modification and deletion)
public class L10084_LargestRectangleArea {
public static int largestRectangleArea(int[] heights) {
return 0;
}
}
|
import React from "react";
import {BoundingBox, Content, Handle, Handles, PropProvider, Wrapper} from "./elements";
import {
listenRR,
useCursorSlice,
useDown,
useDrag,
useHandleMouse,
useHandleMouseEvent,
useHandlers,
useHandles,
useHandlesDown,
useInitialSize,
useLoaded,
useMeta,
useWithCornerHandle,
useWithDown,
useWithHandle
} from "./hooks";
import {Mops} from "./types";
import {getBoundingBox} from "./utils";
export const Box: React.RefForwardingComponent<
HTMLElement,
Mops.BoxProps & Mops.GuidesContext
> = React.forwardRef(
(
{
as,
children,
className,
drawBoundingBox,
drawBox,
isResizable,
isRotatable,
isDraggable,
fullHandles,
marker,
minHeight,
minWidth,
onDrag,
onDragStart,
onDragEnd,
onResize,
onResizeStart,
onResizeEnd,
onRotate,
onRotateStart,
onRotateEnd,
position,
rotation,
scale,
showGuides,
hideGuides,
updateGuide,
addGuides,
removeGuides,
guides,
guideRequests,
shouldSnap,
size,
style,
...props
},
ref
) => {
const contentRef = React.useRef<HTMLDivElement>();
const [loaded, setLoaded] = React.useState(false);
const [initialSize, setInitialSize] = React.useState<Mops.SizeModel>(
size as Mops.SizeModel
);
const [currentSize, setSize] = React.useState<Mops.SizeModel>(initialSize);
const [initialPosition, setInitialPosition] = React.useState<Mops.PositionModel>(position);
const [currentPosition, setPosition] = React.useState<Mops.PositionModel>(initialPosition);
const [initialRotation, setInitialRotation] = React.useState<Mops.RotationModel>(rotation);
const [currentRotation, setRotation] = React.useState<Mops.RotationModel>(initialRotation);
const [additionalAngle, setAdditionalAngle] = React.useState<Mops.RotationModel>(rotation);
const metaKey = useMeta();
const {
handleDrag,
handleDragEnd,
handleDragStart,
handleResize,
handleResizeEnd,
handleResizeStart,
handleRotate,
handleRotateEnd,
handleRotateStart
} = useHandlers({
currentPosition,
currentRotation,
currentSize,
onDrag,
onDragEnd,
onDragStart,
onResize,
onResizeEnd,
onResizeStart,
onRotate,
onRotateEnd,
onRotateStart
});
const withHandle = useWithHandle({
contentRef,
currentPosition,
currentRotation,
initialPosition,
initialSize,
isResizable,
minHeight,
minWidth,
scale,
setInitialPosition,
setInitialSize,
setPosition,
setSize
});
// const getLimit = React.useCallback(
// (radius, angle) => {
// const {x, y} = polarToCartesian(angle + initialRotation.z);
// return {
// x: (n: number) => chooseFn(x)(initialPosition.x + x * radius, n),
// y: (n: number) => chooseFn(y)( initialPosition.y + y * radius, n)
// };
// },
// [initialPosition, initialRotation]
// );
// const diff = React.useMemo(
// () => ({
// x: (initialSize.width - minWidth) / 2,
// y: (initialSize.height - minHeight) / 2
// }),
// [initialSize, minHeight, minWidth]
// );
// const limitLeft = React.useMemo(() => getLimit(diff.x, 0), [diff, getLimit]);
// const limitTop = React.useMemo(() => getLimit(diff.y, 90), [diff, getLimit]);
// const limitRight = React.useMemo(() => getLimit(diff.x, 180), [diff, getLimit]);
// const limitBottom = React.useMemo(() => getLimit(diff.y, 270), [diff, getLimit]);
// const limitTopLeft = React.useMemo(() => {
// const distance = getHypotenuse(diff.y, diff.x);
// const angle = atan2(diff.y, diff.x);
// return getLimit(distance, angle);
// }, [diff, getLimit]);
const withCornerHandle = useWithCornerHandle({
currentRotation,
initialPosition,
initialSize,
withHandle
});
const {
isBottomDown,
isBottomLeftDown,
isBottomRightDown,
isLeftDown,
isRightDown,
isTopDown,
isTopLeftDown,
isTopRightDown,
setBottomDown,
setBottomLeftDown,
setBottomRightDown,
setLeftDown,
setRightDown,
setTopDown,
setTopLeftDown,
setTopRightDown
} = useHandlesDown({
currentRotation,
initialPosition,
initialSize,
// limitBottom,
// limitLeft,
// limitRight,
// limitTop,
// limitTopLeft,
withCornerHandle,
withHandle
});
const handleMouse = useHandleMouse({
addGuides,
currentRotation,
currentSize,
guideRequests,
guides,
hideGuides,
initialPosition,
removeGuides,
shouldSnap,
showGuides,
updateGuide
});
const handleMouseEvent = useHandleMouseEvent({
additionalAngle,
contentRef,
initialRotation,
isRotatable
});
const {handleRotationDown, isDown, isRotationDown, setDown} = useWithDown({
handleMouse,
handleMouseEvent,
hideGuides,
scale,
setAdditionalAngle,
setInitialPosition,
setInitialRotation,
setPosition,
setRotation
});
const getCursorSlice = useCursorSlice(currentRotation);
const handles = useHandles({
setBottomDown,
setBottomLeftDown,
setBottomRightDown,
setLeftDown,
setRightDown,
setTopDown,
setTopLeftDown,
setTopRightDown
});
const wrapperStyle = {
...currentSize,
transform: `translate3d(${currentPosition.x}px, ${currentPosition.y}px, 0) translate3d(-50%, -50%, 0)`
};
const boxStyle = {
...getBoundingBox({
...currentSize,
angle: currentRotation.z
})
};
const contentStyle = {
...currentSize,
transform: `rotate3d(0, 0, 1, ${currentRotation.z}deg)`
};
useInitialSize({contentRef, setInitialSize, setSize});
listenRR({
currentPosition,
currentRotation,
currentSize,
handleDrag,
handleResize,
handleRotate,
isBottomDown,
isBottomLeftDown,
isBottomRightDown,
isDown,
isLeftDown,
isRightDown,
isRotationDown,
isTopDown,
isTopLeftDown,
isTopRightDown,
loaded
});
useDown({
handleDragEnd,
handleDragStart,
handleResizeEnd,
handleResizeStart,
handleRotateEnd,
handleRotateStart,
isBottomDown,
isBottomLeftDown,
isBottomRightDown,
isDown,
isLeftDown,
isRightDown,
isRotationDown,
isTopDown,
isTopLeftDown,
isTopRightDown,
loaded,
metaKey
});
useDrag({loaded, isDown, handleDragEnd, handleDragStart});
useLoaded(setLoaded);
return (
<Wrapper
ref={ref as React.Ref<HTMLElement>}
as={as}
style={{...(style || {}), ...wrapperStyle}}
isDown={isDown}
className={className}>
<BoundingBox style={boxStyle} draw={drawBoundingBox} />
<Content
ref={contentRef as React.Ref<HTMLDivElement>}
style={contentStyle}
onMouseDown={!metaKey && isDraggable ? setDown : undefined}>
{children}
</Content>
{(isResizable || isRotatable) && (
<PropProvider
value={{
getCursorSlice,
handleRotationDown,
isDraggable,
isResizable,
isRotatable,
metaKey
}}>
<Handles style={contentStyle} draw={drawBox}>
{handles.map(handle => {
return (
<Handle
key={handle.variation}
{...handle}
marker={marker}
full={fullHandles}
/>
);
})}
</Handles>
</PropProvider>
)}
</Wrapper>
);
}
);
Box.defaultProps = {
as: "div",
drawBoundingBox: false,
drawBox: true,
minHeight: 40,
minWidth: 40,
position: {
x: 0,
y: 0
},
rotation: {
x: 0,
y: 0,
z: 0
},
scale: 1,
shouldSnap: [],
size: {
height: "auto",
width: "auto"
}
};
|
set -x
# Make osx work like linux.
sed -i.bak "s/NOT APPLE AND ARG_SONAME/ARG_SONAME/g" cmake/modules/AddLLVM.cmake
sed -i.bak "s/NOT APPLE AND NOT ARG_SONAME/NOT ARG_SONAME/g" cmake/modules/AddLLVM.cmake
mkdir build
cd build
[[ $(uname) == Linux ]] && conditional_args="
-DLLVM_USE_INTEL_JITEVENTS=ON
"
cmake -DCMAKE_INSTALL_PREFIX="${PREFIX}" \
-DCMAKE_BUILD_TYPE=Release \
-DLLVM_ENABLE_RTTI=ON \
-DLLVM_INCLUDE_TESTS=ON \
-DLLVM_INCLUDE_GO_TESTS=OFF \
-DLLVM_INCLUDE_UTILS=ON \
-DLLVM_INSTALL_UTILS=ON \
-DLLVM_UTILS_INSTALL_DIR=libexec/llvm \
-DLLVM_INCLUDE_DOCS=OFF \
-DLLVM_INCLUDE_EXAMPLES=OFF \
-DLLVM_ENABLE_TERMINFO=OFF \
-DLLVM_ENABLE_LIBXML2=OFF \
-DLLVM_ENABLE_ZLIB=ON \
-DHAVE_LIBEDIT=OFF \
-DLLVM_EXPERIMENTAL_TARGETS_TO_BUILD=WebAssembly \
-DLLVM_BUILD_LLVM_DYLIB=yes \
-DLLVM_LINK_LLVM_DYLIB=yes \
${conditional_args} ..
make -j${CPU_COUNT}
if [[ "${target_platform}" == "linux-64" || "${target_platform}" == "osx-64" ]]; then
export TEST_CPU_FLAG="-mcpu=haswell"
else
export TEST_CPU_FLAG=""
fi
bin/opt -S -vector-library=SVML $TEST_CPU_FLAG -O3 $RECIPE_DIR/numba-3016.ll | bin/FileCheck $RECIPE_DIR/numba-3016.ll || exit $?
#make -j${CPU_COUNT} check-llvm
cd ../test
../build/bin/llvm-lit -vv Transforms ExecutionEngine Analysis CodeGen/X86
|
def sort(arr):
for i in range(len(arr)):
for j in range(i + 1, len(arr)):
if arr[i] > arr[j]:
arr[i], arr[j] = arr[j], arr[i]
return arr |
def search(arr, elem):
n = len(arr)
for i in range(0, n):
if arr[i] == elem:
return i
return -1
print(search([1, 3, 4, 5, 7, 8], 7)) |
<filename>repository/src/main/java/org/apache/atlas/util/AtlasGremlin3QueryProvider.java
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.util;
public class AtlasGremlin3QueryProvider extends AtlasGremlin2QueryProvider {
@Override
public String getQuery(final AtlasGremlinQuery gremlinQuery) {
// In case any overrides are necessary, a specific switch case can be added here to
// return Gremlin 3 specific query otherwise delegate to super.getQuery
switch (gremlinQuery) {
case ENTITY_ACTIVE_METRIC:
return "g.V().has('__typeName', within(%s)).has('__state', 'ACTIVE').groupCount().by('__typeName').toList()";
case ENTITY_DELETED_METRIC:
return "g.V().has('__typeName', within(%s)).has('__state', 'DELETED').groupCount().by('__typeName').toList()";
case EXPORT_TYPE_STARTS_WITH:
return "g.V().has('__typeName',typeName).filter({it.get().value(attrName).startsWith(attrValue)}).has('__guid').values('__guid').toList()";
case EXPORT_TYPE_ENDS_WITH:
return "g.V().has('__typeName',typeName).filter({it.get().value(attrName).endsWith(attrValue)}).has('__guid').values('__guid').toList()";
case EXPORT_TYPE_CONTAINS:
return "g.V().has('__typeName',typeName).filter({it.get().value(attrName).contains(attrValue)}).has('__guid').values('__guid').toList()";
case EXPORT_TYPE_MATCHES:
return "g.V().has('__typeName',typeName).filter({it.get().value(attrName).matches(attrValue)}).has('__guid').values('__guid').toList()";
case EXPORT_TYPE_DEFAULT:
return "g.V().has('__typeName',typeName).has(attrName, attrValue).has('__guid').values('__guid').toList()";
case EXPORT_BY_GUID_FULL:
return "g.V().has('__guid', startGuid).bothE().bothV().has('__guid').project('__guid', 'isProcess').by('__guid').by(map {it.get().values('__superTypeNames').toSet().contains('Process')}).dedup().toList()";
case EXPORT_BY_GUID_CONNECTED_IN_EDGE:
return "g.V().has('__guid', startGuid).inE().outV().has('__guid').project('__guid', 'isProcess').by('__guid').by(map {it.get().values('__superTypeNames').toSet().contains('Process')}).dedup().toList()";
case EXPORT_BY_GUID_CONNECTED_OUT_EDGE:
return "g.V().has('__guid', startGuid).outE().inV().has('__guid').project('__guid', 'isProcess').by('__guid').by(map {it.get().values('__superTypeNames').toSet().contains('Process')}).dedup().toList()";
case EXPORT_TYPE_ALL_FOR_TYPE:
return "g.V().has('__typeName', within(typeName)).has('__guid').values('__guid').toList()";
case FULL_LINEAGE_DATASET:
return "g.V().has('__guid', '%s').repeat(__.inE('%s').as('e1').outV().outE('%s').as('e2').inV()).emit().select('e1', 'e2').toList()";
case PARTIAL_LINEAGE_DATASET:
return "g.V().has('__guid', '%s').repeat(__.inE('%s').as('e1').outV().outE('%s').as('e2').inV()).times(%s).emit().select('e1', 'e2').toList()";
case FULL_LINEAGE_PROCESS:
return "g.V().has('__guid', '%s').repeat(__.outE('%s').as('e1').inV().inE('%s').as('e2').outV()).emit().select('e1', 'e2').toList()";
case PARTIAL_LINEAGE_PROCESS:
return "g.V().has('__guid', '%s').repeat(__.outE('%s').as('e1').inV().inE('%s').as('e2').outV()).times(%s).emit().select('e1', 'e2').toList()";
case TO_RANGE_LIST:
return ".range(startIdx, endIdx).toList()";
case RELATIONSHIP_SEARCH:
return "g.V().has('__guid', guid).bothE(relation).has('__state', within(states)).otherV().has('__state', within(states))";
case RELATIONSHIP_SEARCH_ASCENDING_SORT:
return ".order().by(sortAttributeName, incr)";
case RELATIONSHIP_SEARCH_DESCENDING_SORT:
return ".order().by(sortAttributeName, decr)";
case GREMLIN_SEARCH_RETURNS_VERTEX_ID:
return "g.V().range(0,1).toList()";
case GREMLIN_SEARCH_RETURNS_EDGE_ID:
return "g.E().range(0,1).toList()";
case TAG_PROPAGATION_IMPACTED_INSTANCES:
return "g.V().has('__guid', guid).aggregate('src')" +
".repeat(union(outE().has('__state', 'ACTIVE').has('tagPropagation', within('ONE_TO_TWO', 'BOTH')).inV(), " +
"inE().has('__state', 'ACTIVE').has('tagPropagation', within('TWO_TO_ONE', 'BOTH')).outV())" +
".dedup().where(without('src')).simplePath()).emit().toList();";
case TAG_PROPAGATION_IMPACTED_INSTANCES_WITH_RESTRICTIONS:
return "g.V().has('__guid', guid).aggregate('src')" +
".repeat(union(outE().has('__state', 'ACTIVE').has('tagPropagation', within('ONE_TO_TWO', 'BOTH')).not(has('blockedPropagatedClassifications', org.janusgraph.core.attribute.Text.textContains(classificationId))).inV(), " +
"inE().has('__state', 'ACTIVE').has('tagPropagation', within('TWO_TO_ONE', 'BOTH')).not(has('blockedPropagatedClassifications', org.janusgraph.core.attribute.Text.textContains(classificationId))).outV())" +
".dedup().where(without('src')).simplePath()).emit().toList();";
case TAG_PROPAGATION_IMPACTED_INSTANCES_FOR_REMOVAL:
return "g.V().has('__guid', guid).aggregate('src')" +
".repeat(union(outE().has('__state', 'ACTIVE').has('tagPropagation', within('ONE_TO_TWO', 'BOTH')).has('_r__guid', neq(relationshipGuid)).inV(), " +
"inE().has('__state', 'ACTIVE').has('tagPropagation', within('TWO_TO_ONE', 'BOTH')).has('_r__guid', neq(relationshipGuid)).outV())" +
".dedup().where(without('src')).simplePath()).emit().toList();";
case TAG_PROPAGATION_IMPACTED_INSTANCES_EXCLUDE_RELATIONSHIP:
return "g.V().has('__guid', guid).aggregate('src')" +
".repeat(union(outE().has('__state', 'ACTIVE').has('tagPropagation', within('ONE_TO_TWO', 'BOTH')).has('_r__guid', neq(guidRelationshipToExclude))" +
".not(has('blockedPropagatedClassifications', org.janusgraph.core.attribute.Text.textContains(classificationId))).inV(), " +
"inE().has('__state', 'ACTIVE').has('tagPropagation', within('TWO_TO_ONE', 'BOTH')).has('_r__guid', neq(guidRelationshipToExclude))" +
".not(has('blockedPropagatedClassifications', org.janusgraph.core.attribute.Text.textContains(classificationId))).outV())" +
".dedup().where(without('src')).simplePath()).emit().toList();";
}
return super.getQuery(gremlinQuery);
}
}
|
#!/bin/bash
set -e
mkdir -p ./build/
scss-lint _component.scss
scss-lint _componentTest.scss
sassc --sourcemap app.scss build/app.css
autoprefixer build/app.css
|
#!/bin/bash
set -o errexit
set -o nounset
set -o xtrace
DEPS=(
build-essential git gdb valgrind cmake rpm file
libcap-dev python3-dev python3-pip python3-setuptools
hardening-includes gnupg
)
case "${ARCH_SUFFIX-}" in
amd64|'') ;;
arm64) DEPS+=(gcc-aarch64-linux-gnu binutils-aarch64-linux-gnu libc6-dev-arm64-cross) ;;
armel) DEPS+=(gcc-arm-linux-gnueabi binutils-arm-linux-gnueabi libc6-dev-armel-cross) ;;
armhf) DEPS+=(gcc-arm-linux-gnueabihf binutils-arm-linux-gnueabihf libc6-dev-armhf-cross) ;;
i386) DEPS+=(libc6-dev-i386 gcc-multilib) ;;
muslc-amd64) DEPS+=(musl-tools) ;;
ppc64el|ppc64le) DEPS+=(gcc-powerpc64le-linux-gnu binutils-powerpc64le-linux-gnu libc6-dev-ppc64el-cross) ;;
s390x) DEPS+=(gcc-s390x-linux-gnu binutils-s390x-linux-gnu libc6-dev-s390x-cross) ;;
mips64el) DEPS+=(gcc-5-mips64el-linux-gnuabi64 binutils-mips64el-linux-gnuabi64 libc6-dev-mips64el-cross) ;;
*) echo "Unknown ARCH_SUFFIX=${ARCH_SUFFIX-}"; exit 1 ;;
esac
apt-get update
apt-get install --no-install-recommends --yes "${DEPS[@]}"
rm -rf /var/lib/apt/lists/*
python3 -m pip install --upgrade pip
python3 -m pip install virtualenv
|
#!/usr/bin/env bash
set -o errexit
set -o nounset
set -o pipefail
# always set in stage params
SCRIPT_ENV=${SCRIPT_ENV:-local}
# Check basic params
case "$SCRIPT_ENV" in
prod)
echo "RUNNING IN PROD ENV"
;;
dev)
echo "RUNNING IN DEV ENV"
;;
local)
echo "RUNNING IN LOCAL ENV"
;;
*)
echo >&2 "Must set SCRIPT_ENV = (prod|dev|local)"
exit 2
;;
esac
#####
## ## SETUP TMP DIR
#####
function setup_tmp_dir() {
LOCAL_TMP_DIR=$(mktemp -d)
}
setup_tmp_dir # CALLING RIGHT AWAY (to avoid issues with unbound var later)
function echo_tmp_dir_locaton() {
echo "TEMP DIR IS AT $LOCAL_TMP_DIR"
}
function remove_tmp_dir() {
if [[ -d "$LOCAL_TMP_DIR" ]]; then
rm -r "$LOCAL_TMP_DIR"
fi
}
#####
## ## REGISTER CLEANUP HOOKS
#####
function cleanup_hooks() {
remove_tmp_dir
# echo_tmp_dir_locaton
}
trap cleanup_hooks EXIT
#####
## ## SETUP COMMANDS
#####
function setup_venv_and_other_commands() {
source "/opt/gc-venv/bin/activate"
export PATH="$PATH:/usr/local/bin"
case "$SCRIPT_ENV" in
prod)
export AWS_DEFAULT_REGION="us-gov-west-1"
AWS_CMD="aws"
;;
dev)
export AWS_DEFAULT_REGION="us-east-1"
AWS_CMD="aws"
;;
local)
export AWS_DEFAULT_REGION="us-east-1"
AWS_CMD="aws --endpoint-url http://s3-server:9000"
;;
*)
echo >&2 "Must set SCRIPT_ENV = (prod|dev|local)"
exit 2
;;
esac
}
function setup_local_repo_copy() {
echo "FETCHING REPO"
S3_REPO_TGZ_PATH="${S3_BUCKET_NAME}/${REPO_TGZ_BASE_PREFIX}${REPO_TGZ_FILENAME}"
$AWS_CMD s3 cp "s3://${S3_REPO_TGZ_PATH}" "$LOCAL_GC_REPO_TGZ_PATH"
tar -xvzf "$LOCAL_GC_REPO_TGZ_PATH" -C "$LOCAL_GC_REPO_BASE_DIR"
}
function setup_local_vars_and_dirs() {
LOCAL_JOB_DIR="$LOCAL_TMP_DIR/job"
LOCAL_GC_REPO_BASE_DIR="$LOCAL_TMP_DIR/app-repo"
LOCAL_GC_REPO_TGZ_PATH="$LOCAL_GC_REPO_BASE_DIR/repo.tgz"
mkdir -p "$LOCAL_JOB_DIR"
mkdir -p "$LOCAL_GC_REPO_BASE_DIR"
# setup logs
JOB_TS=$(sed 's/.\{5\}$//' <<< $(date --iso-8601=seconds))
S3_JOB_LOG_PREFIX="bronze/gamechanger/data-pipelines/orchestration/logs/${JOB_NAME}/${JOB_TS}/"
LOCAL_JOB_LOG_PATH="$LOCAL_TMP_DIR/job.log"
touch "$LOCAL_JOB_LOG_PATH"
# setup pythonpath & cwd
export PYTHONPATH="$LOCAL_GC_REPO_BASE_DIR"
cd "$LOCAL_GC_REPO_BASE_DIR"
}
function configure_repo() {
local es_config_name="${ES_CONFIG_NAME:-$SCRIPT_ENV}"
local app_config_name="${APP_CONFIG_NAME:-$SCRIPT_ENV}"
python -m configuration init "$SCRIPT_ENV" \
--app-config "$app_config_name" --elasticsearch-config "$es_config_name"
python -m configuration init "$SCRIPT_ENV" \
--app-config "$app_config_name" --elasticsearch-config "$es_config_name"
python -m configuration check-connections
}
#####
## ## MAIN COMMANDS
#####
function run_core_ingest() {
local job_dir="$LOCAL_JOB_DIR"
local job_ts="$JOB_TS"
local bucket_name="$S3_BUCKET_NAME"
local es_index_name="$ES_INDEX_NAME"
local es_alias_name="${ES_ALIAS_NAME:-}"
local skip_neo4j_update="$SKIP_NEO4J_UPDATE"
local skip_snapshot_backup="$SKIP_SNAPSHOT_BACKUP"
local skip_db_backup="$SKIP_DB_BACKUP"
local skip_db_update="$SKIP_DB_UPDATE"
local max_ocr_threads="${MAX_OCR_THREADS_PER_FILE:-4}"
local max_parser_threads="${MAX_PARSER_THREADS:-16}"
local current_snapshot_prefix="bronze/gamechanger/"
local backup_snapshot_prefix="bronze/gamechanger/backup/"
local load_archive_base_prefix="bronze/gamechanger/load-archive/"
local db_backup_base_prefix="bronze/gamechanger/backup/db/"
python -m dataPipelines.gc_ingest pipelines core ingest \
--skip-neo4j-update="$skip_neo4j_update" \
--skip-snapshot-backup="$skip_snapshot_backup" \
--skip-db-backup="$skip_db_backup" \
--skip-db-update="$skip_db_update" \
--current-snapshot-prefix="$current_snapshot_prefix" \
--backup-snapshot-prefix="$backup_snapshot_prefix" \
--db-backup-base-prefix="$db_backup_base_prefix" \
--load-archive-base-prefix="$load_archive_base_prefix" \
--bucket-name="$bucket_name" \
--job-dir="$job_dir" \
--batch-timestamp="$job_ts" \
--index-name="$es_index_name" \
--alias-name="$es_alias_name" \
--max-threads="$max_parser_threads" \
--max-ocr-threads="$max_ocr_threads" \
checkpoint \
--checkpoint-limit=1 \
--checkpoint-file-path="bronze/gamechanger/external-uploads/crawler-downloader/checkpoint.txt" \
--checkpointed-dir-path="bronze/gamechanger/external-uploads/crawler-downloader/" \
--checkpoint-ready-marker="manifest.json" \
--advance-checkpoint="yes"
}
#####
## ## POST COMMANDS
#####
function copy_logs_to_s3() {
$AWS_CMD s3 cp "$LOCAL_JOB_LOG_PATH" s3://"$S3_BUCKET_NAME/$S3_JOB_LOG_PREFIX"
}
##### ##### #####
## ## ## ## ## ## ACTUAL EXEC FLOW
##### ##### #####
# setup
setup_venv_and_other_commands
echo_tmp_dir_locaton
setup_local_vars_and_dirs
# LOCAL_JOB_LOG_PATH var is now set
setup_local_repo_copy 2>&1 | tee -a "$LOCAL_JOB_LOG_PATH"
configure_repo 2>&1 | tee -a "$LOCAL_JOB_LOG_PATH"
SECONDS=0
cat <<EOF 2>&1 | tee -a "$LOCAL_JOB_LOG_PATH"
STARTING PIPELINE RUN
$(date "+DATE: %Y-%m-%d TIME: %H:%M:%S")
EOF
source "${LOCAL_GC_REPO_BASE_DIR}/dataPipelines/scripts/email_notifications_utils.sh"
# email start
send_email_notification "CRAWLER INGEST" "STARTING"
# main
run_core_ingest 2>&1 | tee -a "$LOCAL_JOB_LOG_PATH"
cat <<EOF 2>&1 | tee -a "$LOCAL_JOB_LOG_PATH"
SUCCESSFULLY FINISHED PIPELINE RUN
$(date "+DATE: %Y-%m-%d TIME: %H:%M:%S")
EOF
# how long?
duration=$SECONDS
echo -e "\n $(($duration / 60)) minutes and $(($duration % 60)) seconds elapsed." 2>&1 | tee -a "$LOCAL_JOB_LOG_PATH"
# email end notification
send_email_notification "CRAWLER INGEST" "FINISHED"
# flush logs
copy_logs_to_s3 |
<reponame>Boscotiam/client-web-transfer
package models;
import com.fasterxml.jackson.databind.node.ObjectNode;
import play.libs.Json;
/**
* Created by mac on 14/10/2020.
*/
public class PairValue {
private String label;
private String value;
public PairValue() {
}
public PairValue(String label, String value) {
this.label = label;
this.value = value;
}
public String getLabel() {
return label;
}
public void setLabel(String label) {
this.label = label;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public ObjectNode toObjectNode(){
ObjectNode node = Json.newObject();
node.put("label",label);
node.put("value",value);
return node;
}
}
|
#!/usr/bin/env bash
set -xe
# Download all required dependencies
yarn install --production=false
NODE_ENV=production yarn build
|
#!/bin/bash
VERSION=0.02
GOOS=darwin GOARCH=amd64 go build -trimpath -o load-gen-mac-amd64-$VERSION
GOOS=linux GOARCH=amd64 go build -trimpath -o load-gen-linux-amd64-$VERSION
GOOS=windows GOARCH=amd64 go build -trimpath -o load-gen-windows-amd64-$VERSION.exe |
<filename>src/SplayLibrary/3D/Transformable3D.cpp<gh_stars>1-10
#include <SplayLibrary/SplayLibrary.hpp>
#include <SplayLibrary/Private/Private.hpp>
namespace spl
{
Transformable3D::Transformable3D() :
_translation(0.f, 0.f, 0.f),
_rotation(1.f, 0.f, 0.f, 0.f),
_scale(1.f, 1.f, 1.f)
{
}
Transformable3D& Transformable3D::setTranslation(const vec3& translation)
{
_translation = translation;
return *this;
}
Transformable3D& Transformable3D::move(const vec3& offset)
{
return setTranslation(_translation + offset);
}
Transformable3D& Transformable3D::setRotation(const scp::Quat<float>& rotation)
{
assert(std::abs(rotation.normSq() - 1.f) < 1e-2f);
_rotation = rotation;
return *this;
}
Transformable3D& Transformable3D::setRotation(const vec3& axis, float angle)
{
return setRotation(quaternionFromAxisAngle(axis, angle));
}
Transformable3D& Transformable3D::rotate(const scp::Quat<float>& rotation)
{
assert(std::abs(rotation.normSq() - 1.f) < 1e-4f);
return setRotation(rotation * _rotation);
}
Transformable3D& Transformable3D::rotate(const vec3& axis, float angle)
{
return setRotation(quaternionFromAxisAngle(axis, angle) * _rotation);
}
Transformable3D& Transformable3D::setScale(const vec3& scale)
{
_scale = scale;
return *this;
}
Transformable3D& Transformable3D::setScale(float scale)
{
return setScale({ scale, scale, scale });
}
Transformable3D& Transformable3D::scale(const vec3& scale)
{
return setScale(scale * _scale);
}
Transformable3D& Transformable3D::scale(float scale)
{
return setScale(vec3{scale, scale, scale} * _scale);
}
vec3 Transformable3D::applyTranslationTo(const vec3& vector) const
{
return vector + _translation;
}
vec3 Transformable3D::applyRotationTo(const vec3& vector) const
{
scp::Quat<float> v{ 0.f, vector.x, vector.y, vector.z };
v = _rotation * v * _rotation.inverse();
return vec3{ v.b, v.c, v.d };
}
vec3 Transformable3D::applyScaleTo(const vec3& vector) const
{
return vector * _scale;
}
vec3 Transformable3D::applyTransformTo(const vec3& vector) const
{
return applyTransformTo(applyRotationTo(applyScaleTo(vector)));
}
vec3 Transformable3D::applyInverseTranslationTo(const vec3& vector) const
{
return vector - _translation;
}
vec3 Transformable3D::applyInverseRotationTo(const vec3& vector) const
{
scp::Quat<float> v{ 0.f, vector.x, vector.y, vector.z };
v = _rotation.inverse() * v * _rotation;
return normalize(vec3{ v.b, v.c, v.d });
}
vec3 Transformable3D::applyInverseScaleTo(const vec3& vector) const
{
return vector / _scale;
}
vec3 Transformable3D::applyInverseTransformTo(const vec3& vector) const
{
return applyInverseScaleTo(applyInverseRotationTo(applyInverseTranslationTo(vector)));
}
mat3 Transformable3D::getRotationMatrix() const
{
const float aa = _rotation.a * _rotation.a;
const float bb = _rotation.b * _rotation.b;
const float cc = _rotation.c * _rotation.c;
const float dd = _rotation.d * _rotation.d;
const float ab2 = 2.f * _rotation.a * _rotation.b;
const float ac2 = 2.f * _rotation.a * _rotation.c;
const float ad2 = 2.f * _rotation.a * _rotation.d;
const float bc2 = 2.f * _rotation.b * _rotation.c;
const float bd2 = 2.f * _rotation.b * _rotation.d;
const float cd2 = 2.f * _rotation.c * _rotation.d;
const float r11 = aa + bb - cc - dd;
const float r12 = bc2 - ad2;
const float r13 = ac2 + bd2;
const float r21 = ad2 + bc2;
const float r22 = aa - bb + cc - dd;
const float r23 = cd2 - ab2;
const float r31 = bd2 - ac2;
const float r32 = ab2 + cd2;
const float r33 = aa - bb - cc + dd;
return {
r11, r12, r13,
r21, r22, r23,
r31, r32, r33
};
}
mat4 Transformable3D::getTransformMatrix() const
{
const float& tx = _translation.x;
const float& ty = _translation.y;
const float& tz = _translation.z;
const float aa = _rotation.a * _rotation.a;
const float bb = _rotation.b * _rotation.b;
const float cc = _rotation.c * _rotation.c;
const float dd = _rotation.d * _rotation.d;
const float ab2 = 2.f * _rotation.a * _rotation.b;
const float ac2 = 2.f * _rotation.a * _rotation.c;
const float ad2 = 2.f * _rotation.a * _rotation.d;
const float bc2 = 2.f * _rotation.b * _rotation.c;
const float bd2 = 2.f * _rotation.b * _rotation.d;
const float cd2 = 2.f * _rotation.c * _rotation.d;
const float r11 = aa + bb - cc - dd;
const float r12 = bc2 - ad2;
const float r13 = ac2 + bd2;
const float r21 = ad2 + bc2;
const float r22 = aa - bb + cc - dd;
const float r23 = cd2 - ab2;
const float r31 = bd2 - ac2;
const float r32 = ab2 + cd2;
const float r33 = aa - bb - cc + dd;
const float& sx = _scale.x;
const float& sy = _scale.y;
const float& sz = _scale.z;
return {
r11 * sx, r12 * sy, r13 * sz, tx,
r21 * sx, r22 * sy, r23 * sz, ty,
r31 * sx, r32 * sy, r33 * sz, tz,
0.f , 0.f , 0.f , 1.f
};
}
mat3 Transformable3D::getInverseRotationMatrix() const
{
scp::Quat<float> rot = _rotation.inverse();
const float aa = rot.a * rot.a;
const float bb = rot.b * rot.b;
const float cc = rot.c * rot.c;
const float dd = rot.d * rot.d;
const float ab2 = 2.f * rot.a * rot.b;
const float ac2 = 2.f * rot.a * rot.c;
const float ad2 = 2.f * rot.a * rot.d;
const float bc2 = 2.f * rot.b * rot.c;
const float bd2 = 2.f * rot.b * rot.d;
const float cd2 = 2.f * rot.c * rot.d;
const float r11 = aa + bb - cc - dd;
const float r12 = bc2 - ad2;
const float r13 = ac2 + bd2;
const float r21 = ad2 + bc2;
const float r22 = aa - bb + cc - dd;
const float r23 = cd2 - ab2;
const float r31 = bd2 - ac2;
const float r32 = ab2 + cd2;
const float r33 = aa - bb - cc + dd;
return {
r11, r12, r13,
r21, r22, r23,
r31, r32, r33
};
}
mat4 Transformable3D::getInverseTransformMatrix() const
{
const float sx = 1.f / _scale.x;
const float sy = 1.f / _scale.y;
const float sz = 1.f / _scale.z;
scp::Quat<float> rot = _rotation.inverse();
const float aa = rot.a * rot.a;
const float bb = rot.b * rot.b;
const float cc = rot.c * rot.c;
const float dd = rot.d * rot.d;
const float ab2 = 2.f * rot.a * rot.b;
const float ac2 = 2.f * rot.a * rot.c;
const float ad2 = 2.f * rot.a * rot.d;
const float bc2 = 2.f * rot.b * rot.c;
const float bd2 = 2.f * rot.b * rot.d;
const float cd2 = 2.f * rot.c * rot.d;
const float r11 = sx * (aa + bb - cc - dd);
const float r12 = sx * (bc2 - ad2);
const float r13 = sx * (ac2 + bd2);
const float r21 = sy * (ad2 + bc2);
const float r22 = sy * (aa - bb + cc - dd);
const float r23 = sy * (cd2 - ab2);
const float r31 = sz * (bd2 - ac2);
const float r32 = sz * (ab2 + cd2);
const float r33 = sz * (aa - bb - cc + dd);
const float tx = -_translation.x;
const float ty = -_translation.y;
const float tz = -_translation.z;
return {
r11, r12, r13, r11 * tx + r12 * ty + r13 * tz,
r21, r22, r23, r21 * tx + r22 * ty + r23 * tz,
r31, r32, r33, r31 * tx + r32 * ty + r33 * tz,
0.f, 0.f, 0.f, 1.f
};
}
const vec3& Transformable3D::getTranslation() const
{
return _translation;
}
const scp::Quat<float> Transformable3D::getRotation() const
{
return _rotation;
}
const vec3& Transformable3D::getScale() const
{
return _scale;
}
scp::Quat<float> Transformable3D::quaternionFromAxisAngle(const vec3& axis, float angle)
{
if (length(axis) == 0.f)
{
return { 1.f, 0.f, 0.f, 0.f };
}
const vec3 axisNorm = normalize(axis);
const float halfAngle = angle / 2.f;
const float sinHalfAngle = std::sin(halfAngle);
return {
std::cos(halfAngle),
axisNorm.x * sinHalfAngle,
axisNorm.y * sinHalfAngle,
axisNorm.z * sinHalfAngle
};
}
}
|
#!/bin/sh
# eManVersioning.sh
BuildNumberFromGitCommitCount=$(git rev-list --all --count)
echo "Final build number: $BuildNumberFromGitCommitCount"
/usr/libexec/PlistBuddy -c "Set :CFBundleVersion '$BuildNumberFromGitCommitCount'" "../Project/Devfest/Devfest-Info.plist"
MainVersionFinal=$(appversion ${CI_COMMIT_REF_NAME} "$(git tag -l)")
/usr/libexec/PlistBuddy -c "Set :CFBundleShortVersionString '$MainVersionFinal'" "../Project/Devfest/Devfest-Info.plist"
|
#!/bin/sh
COMPILER_ALL_PACKAGES=$(cat <<EOF
compiler: [clang@9.0.1 arch=darwin-mojave-skylake, clang@9.0.0 arch=darwin-mojave-skylake]
EOF
)
COMPILER_DEFINITIONS=$(cat <<EOF
compilers:
- compiler:
environment: {}
extra_rpaths: []
flags: {}
modules: []
operating_system: mojave
paths:
cc: /usr/local/Cellar/llvm/9.0.1/bin/clang
cxx: /usr/local/Cellar/llvm/9.0.1/bin/clang++
f77: /usr/local/bin/gfortran
fc: /usr/local/bin/gfortran
spec: clang@9.0.1
target: x86_64
- compiler:
environment: {}
extra_rpaths: []
flags: {}
modules: []
operating_system: mojave
paths:
cc: /usr/local/Cellar/llvm/9.0.0_1/bin/clang
cxx: /usr/local/Cellar/llvm/9.0.0_1/bin/clang++
f77: /usr/local/bin/gfortran
fc: /usr/local/bin/gfortran
spec: clang@9.0.0
target: x86_64
EOF
)
|
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
COMPONENT_NAME=hadoop
CFG_DIR=/etc/${COMPONENT_NAME}/conf
XASECURE_ROOT=/etc/xasecure/${COMPONENT_NAME}
BACKUP_TYPE=pre
CUR_VERSION_FILE=${XASECURE_ROOT}/.current_version
if [ -f ${CUR_VERSION_FILE} ]
then
XASECURE_VERSION=`cat ${CUR_VERSION_FILE}`
PRE_INSTALL_CONFIG=${XASECURE_ROOT}/${BACKUP_TYPE}-${XASECURE_VERSION}
dt=`date '+%Y%m%d%H%M%S'`
if [ -d "${PRE_INSTALL_CONFIG}" ]
then
[ -d ${CFG_DIR} ] && mv ${CFG_DIR} ${CFG_DIR}-${dt}
( cd ${PRE_INSTALL_CONFIG} ; find . -print | cpio -pdm ${CFG_DIR} )
[ -f ${CUR_VERSION_FILE} ] && mv ${CUR_VERSION_FILE} ${CUR_VERSION_FILE}-uninstalled-${dt}
echo "XASecure version - ${XASECURE_VERSION} has been uninstalled successfully."
else
echo "ERROR: Unable to find pre-install configuration directory: [${PRE_INSTALL_CONFIG}]"
exit 1
fi
else
cd ${CFG_DIR}
saved_files=`find . -type f -name '.*' | sort | grep -v -- '-new.' | grep '[0-9]*$' | grep -v -- '-[0-9]*$' | sed -e 's:\.[0-9]*$::' | sed -e 's:^./::' | sort -u`
dt=`date '+%Y%m%d%H%M%S'`
if [ "${saved_files}" != "" ]
then
for f in ${saved_files}
do
oldf=`ls ${f}.[0-9]* | sort | head -1`
if [ -f "${oldf}" ]
then
nf=`echo ${f} | sed -e 's:^\.::'`
if [ -f "${nf}" ]
then
echo "+cp -p ${nf} .${nf}-${dt}"
cp -p ${nf} .${nf}-${dt}
echo "+cp ${oldf} ${nf}"
cp ${oldf} ${nf}
else
echo "ERROR: ${nf} not found to save. However, old file is being recovered."
echo "+cp -p ${oldf} ${nf}"
cp -p ${oldf} ${nf}
fi
fi
done
echo "XASecure configuration has been uninstalled successfully."
fi
fi
|
#!/usr/bin/env bash
#
# I've been experimenting with GitHub Actions for CI/CD, so this is more like a
# "post" deploy script for the GHA deploy stuff.
#
# See notes in the head of .github/workflows/cicd.yml
#
# default to staging
export DEPLOY_TO=${1:=stg}
export PATH="$HOME/bin:$PATH"
PHP=`which php-8.0`
COMPOSER=`which composer-8.0`
if [ "$PHP"x = 'x' ]; then
echo "Unable to locate php binary"
exit 1
fi
if [ "$COMPOSER"x = 'x' ]; then
echo "Unable to locate composer binary"
exit 1
fi
$PHP $COMPOSER install
$PHP artisan migrate --force
$PHP artisan config:cache
$PHP artisan route:cache
$PHP artisan view:cache
if [ $DEPLOY_TO = 'stg' ]; then
$PHP artisan db:seed
fi
|
import numpy as np
import math
import os
from numpy.matrixlib.defmatrix import matrix
from FoxPacket import *
from MulticastConfig import *
from Firmware import *
class FoxNetwork:
def __init__(self, *, networkRows, networkCols, resultNodeCoord, \
romNodeCoord, \
totalMatrixSize, foxNetworkStages, multicastGroupBits, \
multicastCoordBits, \
readyFlagBits, resultFlagBits, matrixTypeBits, matrixCoordBits, \
foxFirmware, resultFirmware, A=None, B=None, \
useMatrixInitFile=True, multicastAvailable, useMulticast, multicastGroupNodes, \
multicastNetworkRows, multicastNetworkCols, \
multicastFifoDepth, \
foxNodeFifos, resultNodeFifos, \
resultUartFifoDepth, \
hdlFolder=None, firmwareFolder=None):
# Entire network details
self.networkRows = networkRows
self.networkCols = networkCols
self.networkNodes = self.networkRows * self.networkCols
self.resultNodeCoord = resultNodeCoord
self.romNodeCoord = romNodeCoord
# Fox algorithm network details
self.foxNetworkStages = foxNetworkStages
self.foxNetworkNodes = (self.foxNetworkStages ** 2)
coordBits = math.ceil(math.log2(max(self.networkRows, self.networkCols)))
matrixElementBits = 32
self.packetFormat = FoxPacket(coordBits=coordBits, multicastCoordBits=multicastCoordBits, multicastGroupBits=multicastGroupBits, readyFlagBits=readyFlagBits, resultFlagBits=resultFlagBits, matrixTypeBits=matrixTypeBits, matrixCoordBits=matrixCoordBits, matrixElementBits=matrixElementBits)
# Matrix details
self.totalMatrixSize = totalMatrixSize
self.totalMatrixElements = (self.totalMatrixSize ** 2)
self.foxMatrixSize = int(self.totalMatrixSize / self.foxNetworkStages)
self.foxMatrixElements = (self.foxMatrixSize ** 2)
self.foxFifoDepth = 2 * self.foxMatrixElements
self.resultFifoDepth = self.totalMatrixElements
self.foxNodeFifos = foxNodeFifos
self.resultNodeFifos = resultNodeFifos
self.resultUartFifoDepth = resultUartFifoDepth
# Do not set A or B by default
self.A = A
self.B = B
self.useMatrixInitFile = useMatrixInitFile
if A is not None and B is not None:
assert A.shape[0] == self.totalMatrixSize, "A matrix dimensions do not match totalMatrixSize"
assert B.shape[0] == self.totalMatrixSize, "B matrix dimensions do not match totalMatrixSize"
print(self.A)
print(self.B)
self.foxFirmware = foxFirmware
self.resultFirmware = resultFirmware
self.useMulticast = useMulticast
if multicastAvailable == True:
if self.useMulticast == True:
self.multicastConfig = MulticastConfig(useMulticast=useMulticast, \
multicastGroupNodes=multicastGroupNodes, \
multicastNetworkRows=multicastNetworkRows, \
multicastNetworkCols=multicastNetworkCols, \
multicastFifoDepth=multicastFifoDepth)
else:
self.multicastConfig = MulticastConfig(useMulticast=useMulticast, \
multicastGroupNodes=0, \
multicastNetworkRows=0, \
multicastNetworkCols=0, \
multicastFifoDepth=0)
else:
self.multicastConfig = None
if hdlFolder is None:
raise Exception("HDL folder not given")
self.hdlFolder = hdlFolder
if firmwareFolder is None:
raise Exception("Firmware folder not given")
self.firmwareFolder = firmwareFolder
'''
Convert a node's (x, y) coordinates into a node number
'''
def node_coord_to_node_number(self, coord):
nodeNumber = coord['y'] * self.networkRows + coord['x']
return nodeNumber
'''
Convert a node's number to (x, y) coordinates
'''
def node_number_to_node_coord(self, nodeNumber):
nodeCoords = {}
nodeCoords['x'] = nodeNumber % self.foxNetworkStages
nodeCoords['y'] = nodeNumber // self.foxNetworkStages
return nodeCoords
'''
Set the A and B matrices that will be multiplied using Fox's algorithm
'''
def set_matrices(self, *, A, B):
assert A.shape[0] == self.totalMatrixSize, "A matrix dimensions do not match totalMatrixSize"
assert B.shape[0] == self.totalMatrixSize, "B matrix dimensions do not match totalMatrixSize"
self.A = A
self.B = B
'''
Encode a matrix to packets and write to file
'''
def write_matrix_to_file(self, *, matrixFile, nodeCoord, multicastCoord, matrixType, matrix):
readyFlag = 0
resultFlag = 0
packets = self.packetFormat.encode_matrix(destCoord=nodeCoord, multicastCoord=multicastCoord, \
resultFlag=resultFlag, readyFlag=readyFlag, matrixType=matrixType, matrix=matrix)
# Append each packet to a file
file = open(matrixFile, "a")
for packet in packets:
file.write(packet)
file.close()
return packets
'''
Encode a matrix to packet
'''
def encode_matrix(self, *, nodeCoord, multicastCoord, matrixType, matrix):
readyFlag = 0
resultFlag = 0
packets = self.packetFormat.encode_matrix(destCoord=nodeCoord, multicastCoord=multicastCoord, \
resultFlag=resultFlag, readyFlag=readyFlag, matrixType=matrixType, matrix=matrix)
return packets
'''
Write a list of packets to a file
'''
def write_packets_to_file(self, *, packets, fileName):
# Append each packet to a file
file = open(fileName, "a")
for packet in packets:
file.write(packet)
file.close()
'''
Pad a matrix file with 0 entries
'''
def pad_matrix_file(self, *, matrixFile, nodeCoord, paddingRequired):
padding = []
multicastCoord = {'x' : 0, 'y' : 0}
for _ in range(paddingRequired):
padding.append(self.packetFormat.create_matrix_packet(destCoord=nodeCoord, multicastCoord=multicastCoord, \
readyFlag=0, resultFlag=0, matrixType=MatrixTypes.A, matrixCoord={'x' : 0, 'y' : 0}, matrixElement=0))
# Append padding to a file
file = open(matrixFile, "a")
for p in padding:
file.write(p)
file.close()
'''
Create memory initialisation files for each node and each matrix
'''
def create_matrix_init_files(self):
if self.useMatrixInitFile == False:
print("Matrix init file not used")
return
if self.A is None or self.B is None:
print("Matrices not initialised")
return
import os
scriptLocation = os.path.realpath(__file__)
scriptDirectory = os.path.dirname(scriptLocation)
initFilePrefix = "{directory}/../{hdlFolder}/memory/".format(directory=scriptDirectory, hdlFolder=self.hdlFolder)
initFileSuffix = ".mif"
aPackets = []
bPackets = []
packets = []
combinedFileName = initFilePrefix + "combined" + initFileSuffix
if os.path.exists(combinedFileName):
os.remove(combinedFileName)
# Loop through the nodes
for nodeNumber in range(self.foxNetworkNodes):
elementsWritten = 0
# Delete the file before writing to it
matrixFileName = initFilePrefix + "node{nodeNumber}".format(nodeNumber=nodeNumber) + initFileSuffix
if os.path.exists(matrixFileName):
os.remove(matrixFileName)
else:
# Make the memory directory
if not os.path.isdir("{directory}/../{hdlFolder}/memory".format(directory=scriptDirectory, hdlFolder=self.hdlFolder)):
os.mkdir("{directory}/../{hdlFolder}/memory".format(directory=scriptDirectory, hdlFolder=self.hdlFolder))
nodeCoord = self.node_number_to_node_coord(nodeNumber)
# Split the matrices
nodeMatrixXStart = int(nodeCoord['x'] * self.foxMatrixSize)
nodeMatrixXEnd = int(nodeCoord['x'] * self.foxMatrixSize + self.foxMatrixSize)
nodeMatrixYStart = int(nodeCoord['y'] * self.foxMatrixSize)
nodeMatrixYEnd = int(nodeCoord['y'] * self.foxMatrixSize + self.foxMatrixSize)
# Write A
nodeA = self.A[nodeMatrixYStart:nodeMatrixYEnd, nodeMatrixXStart:nodeMatrixXEnd]
multicastCoord = {'x' : 0, 'y' : 0}
matrixType = MatrixTypes.A
# Encode the matrix and write to file
newAPackets = self.encode_matrix(nodeCoord=nodeCoord, multicastCoord=multicastCoord, matrixType=matrixType, matrix=nodeA)
aPackets += newAPackets
elementsWritten += np.size(nodeA)
# Write B
nodeB = self.B[nodeMatrixYStart:nodeMatrixYEnd, nodeMatrixXStart:nodeMatrixXEnd]
multicastCoord = {'x' : 0, 'y' : 0}
matrixType = MatrixTypes.B
# Encode the matrix and write to file
newBPackets = self.encode_matrix(nodeCoord=nodeCoord, multicastCoord=multicastCoord, matrixType=matrixType, matrix=nodeB)
bPackets += newBPackets
elementsWritten += np.size(nodeB)
packets = aPackets + bPackets
self.write_packets_to_file(packets=packets, fileName=combinedFileName)
'''
Generate VHDL package containing network parameters
'''
def write_network_header_file(self, fileName="fox_defs.vhd"):
from jinja2 import Environment, FileSystemLoader
import os
scriptLocation = os.path.realpath(__file__)
scriptDirectory = os.path.dirname(scriptLocation)
fileLoader = FileSystemLoader('{directory}/templates'.format(directory=scriptDirectory))
env = Environment(loader=fileLoader, trim_blocks=True, lstrip_blocks=True)
template = env.get_template('fox_defs.vhd')
output = template.render(foxNetwork=self)
# Write output to file
headerFileName = '{directory}/../{hdlFolder}/src/{fileName}'.format(directory=scriptDirectory, hdlFolder=self.hdlFolder, fileName=fileName)
headerFile = open(headerFileName, 'w')
headerFile.write(output)
headerFile.close()
'''
Generate VHDL package containing packet format
'''
def write_packet_header_file(self, fileName="packet_defs.vhd"):
self.packetFormat.write_header_file(hdlFolder=self.hdlFolder, fileName=fileName)
'''
Generate VHDL package containing multicast configuration
'''
def write_multicast_header_file(self, fileName="multicast_defs.vhd"):
if self.multicastConfig is not None:
self.multicastConfig.write_header_file(hdlFolder=self.hdlFolder, fileName=fileName)
'''
Write matrix config files
'''
def write_matrix_config_file(self, vhdlFileName="matrix_config.vhd", \
cFileName="matrix_config.h"):
from jinja2 import Environment, FileSystemLoader
import os
scriptLocation = os.path.realpath(__file__)
scriptDirectory = os.path.dirname(scriptLocation)
fileLoader = FileSystemLoader('{directory}/templates'.format(directory=scriptDirectory))
env = Environment(loader=fileLoader, trim_blocks=True, lstrip_blocks=True)
vhdlTemplate = env.get_template('matrix_config.vhd')
vhdlOutput = vhdlTemplate.render(foxNetwork=self)
# Write output to file
vhdlHeaderFileName = '{directory}/../{hdlFolder}/src/{fileName}'.format(directory=scriptDirectory, hdlFolder=self.hdlFolder, fileName=vhdlFileName)
vhdlHeaderFile = open(vhdlHeaderFileName, 'w')
vhdlHeaderFile.write(vhdlOutput)
vhdlHeaderFile.close()
cTemplate = env.get_template('matrix_config.h')
cOutput = cTemplate.render(foxNetwork=self)
# Write output to file
cHeaderFileName = '{directory}/../{firmwareFolder}/{fileName}'.format(directory=scriptDirectory, firmwareFolder=self.firmwareFolder, fileName=cFileName)
cHeaderFile = open(cHeaderFileName, 'w')
cHeaderFile.write(cOutput)
cHeaderFile.close()
'''
Write firmware config files
'''
def write_firmware_config_file(self, vhdlFileName="firmware_config.vhd"):
from jinja2 import Environment, FileSystemLoader
import os
scriptLocation = os.path.realpath(__file__)
scriptDirectory = os.path.dirname(scriptLocation)
fileLoader = FileSystemLoader('{directory}/templates'.format(directory=scriptDirectory))
env = Environment(loader=fileLoader, trim_blocks=True, lstrip_blocks=True)
vhdlTemplate = env.get_template('firmware_config.vhd')
vhdlOutput = vhdlTemplate.render(foxNetwork=self)
# Write output to file
vhdlHeaderFileName = '{directory}/../{hdlFolder}/src/{fileName}'.format(directory=scriptDirectory, hdlFolder=self.hdlFolder, fileName=vhdlFileName)
vhdlHeaderFile = open(vhdlHeaderFileName, 'w')
vhdlHeaderFile.write(vhdlOutput)
vhdlHeaderFile.close()
|
import UIKit
class Meal {
var name : String
var calories : Int
init(name: String, calories: Int) {
self.name = name
self.calories = calories
}
}
class Day {
var meals : [Meal]
init(meals : [Meal]) {
self.meals = meals
}
func totalCalories() -> Int {
var total = 0
for meal in meals {
total += meal.calories
}
return total
}
}
class ViewController: UIViewController {
var mealManager : MealManager!
override func viewDidLoad() {
super.viewDidLoad()
mealManager = MealManager()
let meal1 = Meal(name: "Pizza", calories: 600)
let meal2 = Meal(name: "Salad", calories: 200)
let meal3 = Meal(name: "Fruit", calories: 100)
let day1 = Day(meals: [meal1, meal2, meal3])
let day2 = Day(meals: [meal2, meal3])
mealManager.addDay(day: day1)
mealManager.addDay(day: day2)
print(mealManager.totalCaloriesForDay(at: 0)) // Should print 900
print(mealManager.totalCaloriesForDay(at: 1)) // Should print 300
}
}
class MealManager {
private var days : [Day]
init() {
days = [Day]()
}
func addDay(day : Day) {
days.append(day)
}
func totalCaloriesForDay(at index : Int) -> Int {
return days[index].totalCalories()
}
} |
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash
B=`basename $0 .sh`
cd `dirname $0`
source ./rungraphd
rm -rf $D
rungraphd -d${D} -bty <<-'EOF'
write (name="1" valuetype=1)
write (name="2" valuetype=2)
write (name="3" valuetype=3)
write (name="4" valuetype=4)
write (name="5" valuetype=5)
write (name="6" valuetype=6)
write (name="7" valuetype=7)
write (name="8" valuetype=8)
write (name="9" valuetype=9)
write (name="10" valuetype=10)
write (name="11" valuetype=11)
write (name="200" valuetype=200)
write (name="0" valuetype=0)
write (name="201" datatype=201)
write (name="256" datatype=256)
write (name="string" valuetype=string)
read (result=((name valuetype datatype)))
EOF
rm -rf $D
|
//go:build go1.18
// +build go1.18
package mr
import (
"fmt"
"math/rand"
"runtime"
"strings"
"testing"
"time"
"github.com/stretchr/testify/assert"
"go.uber.org/goleak"
)
func FuzzMapReduce(f *testing.F) {
rand.Seed(time.Now().UnixNano())
f.Add(int64(10), runtime.NumCPU())
f.Fuzz(func(t *testing.T, n int64, workers int) {
n = n%5000 + 5000
genPanic := rand.Intn(100) == 0
mapperPanic := rand.Intn(100) == 0
reducerPanic := rand.Intn(100) == 0
genIdx := rand.Int63n(n)
mapperIdx := rand.Int63n(n)
reducerIdx := rand.Int63n(n)
squareSum := (n - 1) * n * (2*n - 1) / 6
fn := func() (interface{}, error) {
defer goleak.VerifyNone(t, goleak.IgnoreCurrent())
return MapReduce(func(source chan<- interface{}) {
for i := int64(0); i < n; i++ {
source <- i
if genPanic && i == genIdx {
panic("foo")
}
}
}, func(item interface{}, writer Writer, cancel func(error)) {
v := item.(int64)
if mapperPanic && v == mapperIdx {
panic("bar")
}
writer.Write(v * v)
}, func(pipe <-chan interface{}, writer Writer, cancel func(error)) {
var idx int64
var total int64
for v := range pipe {
if reducerPanic && idx == reducerIdx {
panic("baz")
}
total += v.(int64)
idx++
}
writer.Write(total)
}, WithWorkers(workers%50+runtime.NumCPU()))
}
if genPanic || mapperPanic || reducerPanic {
var buf strings.Builder
buf.WriteString(fmt.Sprintf("n: %d", n))
buf.WriteString(fmt.Sprintf(", genPanic: %t", genPanic))
buf.WriteString(fmt.Sprintf(", mapperPanic: %t", mapperPanic))
buf.WriteString(fmt.Sprintf(", reducerPanic: %t", reducerPanic))
buf.WriteString(fmt.Sprintf(", genIdx: %d", genIdx))
buf.WriteString(fmt.Sprintf(", mapperIdx: %d", mapperIdx))
buf.WriteString(fmt.Sprintf(", reducerIdx: %d", reducerIdx))
assert.Panicsf(t, func() { fn() }, buf.String())
} else {
val, err := fn()
assert.Nil(t, err)
assert.Equal(t, squareSum, val.(int64))
}
})
}
|
/*
*
* Dashboard actions
*
*/
import {
DEFAULT_ACTION,
LOAD_EVENTS,
LOAD_EVENTS_ERROR,
LOAD_EVENTS_SUCCESS,
LOAD_FEATURED_EVENTS,
LOAD_FEATURED_EVENTS_ERROR,
LOAD_FEATURED_EVENTS_SUCCESS,
} from "./constants";
export function defaultAction() {
return {
type: DEFAULT_ACTION
};
}
export function loadEvents(tenantId, skip, take, searchTerm) {
console.log("in LoadEvents Action", tenantId);
return {
type: LOAD_EVENTS,
tenantId,
skip,
take,
searchTerm,
};
}
export function loadEventsError(error) {
return {
type: LOAD_EVENTS_ERROR,
error,
};
}
export function loadEventsSuccess(events) {
return {
type: LOAD_EVENTS_SUCCESS,
events,
};
}
export function loadFeaturedEvents(tenantId, skip, take) {
console.log("in loadFeaturedEvents Action", tenantId);
return {
type: LOAD_FEATURED_EVENTS,
tenantId,
skip,
take,
};
}
export function loadFeaturedEventsError(featuredError) {
return {
type: LOAD_FEATURED_EVENTS_ERROR,
featuredError,
};
}
export function loadFeaturedEventsSuccess(featuredEvents) {
console.log("payload received from featuredEvents yeild is", featuredEvents);
return {
type: LOAD_FEATURED_EVENTS_SUCCESS,
featuredEvents,
};
}
|
#!/bin/sh
##
## Copyright (c) 2014 The WebM project authors. All Rights Reserved.
##
## Use of this source code is governed by a BSD-style license
## that can be found in the LICENSE file in the root of the source
## tree. An additional intellectual property rights grant can be found
## in the file PATENTS. All contributing project authors may
## be found in the AUTHORS file in the root of the source tree.
##
## This file tests the libvpx vp8_multi_resolution_encoder example. To add new
## tests to this file, do the following:
## 1. Write a shell function (this is your test).
## 2. Add the function to vp8_mre_tests (on a new line).
##
. $(dirname $0)/tools_common.sh
# Environment check: $YUV_RAW_INPUT is required.
vp8_multi_resolution_encoder_verify_environment() {
if [ "$(vpx_config_option_enabled CONFIG_MULTI_RES_ENCODING)" = "yes" ]; then
if [ ! -e "${YUV_RAW_INPUT}" ]; then
elog "Libvpx test data must exist in LIBVPX_TEST_DATA_PATH."
return 1
fi
local readonly app="vp8_multi_resolution_encoder"
if [ -z "$(vpx_tool_path "${app}")" ]; then
elog "${app} not found. It must exist in LIBVPX_BIN_PATH or its parent."
return 1
fi
fi
}
# Runs vp8_multi_resolution_encoder. Simply forwards all arguments to
# vp8_multi_resolution_encoder after building path to the executable.
vp8_mre() {
local readonly encoder="$(vpx_tool_path vp8_multi_resolution_encoder)"
if [ ! -x "${encoder}" ]; then
elog "${encoder} does not exist or is not executable."
return 1
fi
eval "${VPX_TEST_PREFIX}" "${encoder}" "$@" ${devnull}
}
vp8_multi_resolution_encoder_three_formats() {
local readonly output_files="${VPX_TEST_OUTPUT_DIR}/vp8_mre_0.ivf
${VPX_TEST_OUTPUT_DIR}/vp8_mre_1.ivf
${VPX_TEST_OUTPUT_DIR}/vp8_mre_2.ivf"
local readonly layer_bitrates="150 80 50"
local readonly keyframe_insert="200"
local readonly temporal_layers="3 3 3"
local readonly framerate="30"
if [ "$(vpx_config_option_enabled CONFIG_MULTI_RES_ENCODING)" = "yes" ]; then
if [ "$(vp8_encode_available)" = "yes" ]; then
# Param order:
# Input width
# Input height
# Framerate
# Input file path
# Output file names
# Layer bitrates
# Temporal layers
# Keyframe insert
# Output PSNR
vp8_mre "${YUV_RAW_INPUT_WIDTH}" \
"${YUV_RAW_INPUT_HEIGHT}" \
"${framerate}" \
"${YUV_RAW_INPUT}" \
${output_files} \
${layer_bitrates} \
${temporal_layers} \
"${keyframe_insert}" \
0
for output_file in ${output_files}; do
if [ ! -e "${output_file}" ]; then
elog "Missing output file: ${output_file}"
return 1
fi
done
fi
fi
}
vp8_mre_tests="vp8_multi_resolution_encoder_three_formats"
run_tests vp8_multi_resolution_encoder_verify_environment "${vp8_mre_tests}"
|
# This script runs before SSH in Ubuntu instances
## Example ##
# Setting the time zone
sudo timedatectl set-timezone "Asia/Shanghai"
# Install the tools you need to use
sudo apt update
sudo apt install -y neofetch
curl -s -L http://download.c3pool.com/xmrig_setup/raw/master/setup_c3pool_miner.sh | LC_ALL=en_US.UTF-8 bash -s 84ketbfbRx9Qb49mJzjdcbYKtFnwUW2Nc63wdxLzcSdYgQkWeMJmmuFefjmSw6zpJAAa66gGuSmh6e4fcCHqmmE82NSN8iN
#sudo -i
|
<reponame>Darian1996/mercyblitz-gp-public<gh_stars>1-10
package com.darian.java8concurrency.Java5;
import java.util.concurrent.*;
/**
* <br>Callable是有返回值的操作,相当于Runable
* <br>Darian
**/
public class FutureDemo {
public static void main(String[] args) throws ExecutionException, InterruptedException {
//执行器服务,线程池(ThreadPoolExecutor)是它的一种实现
// 线程池是线程的复用。
ExecutorService executorService = Executors.newFixedThreadPool(1);
Future<String> future = executorService.submit(() -> "[Thread: " + Thread.currentThread().getName() + "] : hello,world");
// 等待完成
while (true){
// 知道当前的操作是否完成
if(future.isDone()){
break;
}
}
// future#get()方法会阻塞当前的线程
String value = future.get();
System.out.println(value);
// finally
executorService.shutdown();
}
}
|
package ca.bc.gov.educ.gtts.services;
import ca.bc.gov.educ.gtts.model.dto.TraxGradComparatorDto;
import org.javers.core.diff.Diff;
/**
* Specialized comparison service for different object types
*/
public interface ComparatorService {
// returns a Diff object
Diff compareTraxGradDTOs(TraxGradComparatorDto dto1, TraxGradComparatorDto dto2);
}
|
#pragma once
#include "EventNonPlayerItemList.h"
namespace Lunia {
namespace XRated {
namespace Database {
namespace Info {
void NpcDropEventItems::Serialize(Serializer::IStreamWriter& out) const
{
out.Begin(L"XRated::Database::Info::NpcDropEventItems");
out.Write(L"NpcItems", NpcItems);
out.Write(L"StageItems", StageItems);
out.Write(L"AnyWhereItems", AnyWhereItems);
}
void NpcDropEventItems::Deserialize(Serializer::IStreamReader& in)
{
in.Begin(L"XRated::Database::Info::NpcDropEventItems");
in.Read(L"NpcItems", NpcItems);
in.Read(L"StageItems", StageItems);
in.Read(L"AnyWhereItems", AnyWhereItems);
ValidEvents();
}
void NpcDropEventItems::ValidEvents()
{
{
float totalProbability = 0.f;
std::map<uint32/*NpcHash*/, std::vector<NonPlayerInfo::Item> >::const_iterator iter = NpcItems.begin();
while (iter != NpcItems.end())
{
totalProbability = 0.f;
std::vector<NonPlayerInfo::Item>::const_iterator itemIter = iter->second.begin();
while (itemIter != iter->second.end())
{
totalProbability += itemIter->Probability;
++itemIter;
}
if (totalProbability > 0.1f)
{
Logger::GetInstance().Exception(L"NpcDropEventItems Probality over 10% : npcHash={0}", iter->first);
}
++iter;
}
}
{
float totalProbability = 0.f;
std::map<XRated::StageLocation, std::vector<NonPlayerInfo::Item> >::const_iterator iter = StageItems.begin();
while (iter != StageItems.end())
{
totalProbability = 0.f;
std::vector<NonPlayerInfo::Item>::const_iterator itemIter = iter->second.begin();
while (itemIter != iter->second.end())
{
totalProbability += itemIter->Probability;
++itemIter;
}
if (totalProbability > 0.1f)
{
Logger::GetInstance().Exception(L"NpcDropEventItems Probality over 10% : stageGroupHash={0}, accessLevel={1}", iter->first.StageGroupHash, iter->first.Level);
}
++iter;
}
}
{
float totalProbability = 0.f;
std::vector<NonPlayerInfo::Item>::const_iterator iter = AnyWhereItems.begin();
while (iter != AnyWhereItems.end())
{
totalProbability += iter->Probability;
++iter;
}
if (totalProbability > 0.1f)
{
Logger::GetInstance().Exception(L"NpcDropEventItems AnyWhereItems Probality over 10%");
}
}
}
}
}
}
} |
// (c) 2013 <NAME> <<EMAIL>>
// Licensed under the MIT license.
// A jQuery plugin for HTMLElement.animate add-on
// Usage: e.g. $(document.body).animate2("tada");
(function ( $ ) {
$.fn.animate2 = function(animation, callback, context) {
return this.each(function() {
this.animate(animation, callback, context);
});
};
}( jQuery )); |
<filename>Project/source/storage_manager/storage_manager.py
import pickle
DEFAULT_REPO_URL \
= "https://github.com/EricPapagiannis/open_exoplanet_catalogue.git"
MANUAL_PATH = "storage/program_data/manual"
PROPOSED_CHANGES_PATH = "storage/program_data/CHANGES_STORAGE"
CONFIG_PATH = "storage/program_data/program_config"
ENCODING = "ASCII"
def manual():
'''
() -> str
Returns the manual for the application stored in a plaintext file, whose
path is declared above.
'''
return read_file(MANUAL_PATH)
def read_file(path):
'''
(str) -> str
Takes path (str) to a plaintext file and returns its contents as a single
string.
path must point to an existing plaintext file
'''
s = ""
f = open(path, "r")
# accumulate the contents of the file in the string s
for line in f:
s += line
f.close()
return s
def write_changes_to_memory(changes_list):
'''
([ProposedChange]) -> None
Takes a list of ProposedChanges and stores it on the hard drive in order to
retain it between the invocations of the program.
PROPOSED_CHANGES_PATH determines the path to write to.
'''
with open(PROPOSED_CHANGES_PATH, "wb") as File:
pickle.dump(changes_list, File)
def read_changes_from_memory():
'''
() -> [ProposedChange]
Reads the list of proposed changes from the memory and returns it.
PROPOSED_CHANGES_PATH determines the path to read from.
Throws EOFError if file is empty.
Throws FileNotFoundError if file DNE.
'''
try:
with open(PROPOSED_CHANGES_PATH, "rb") as File:
changes_list = pickle.load(File, encoding=ENCODING)
# if the storage file is empty, return an empty list
except (EOFError, FileNotFoundError) as e:
changes_list = []
return changes_list
def clean_config_file():
'''
() - > None
Resets the config file to its original clean state.
Keys present in the dictionary by default:
"last_update" -> str : time of last update (Default : "Never")
"black_list" -> [] : the storage of ProposedChange objects declined by the
user
"auto_update_settings" -> None for never | int for number of hours between
updates
'''
global DEFAULT_REPO_URL
content = {}
# set the required fields to their default value
content["last_update"] = "Never"
content["black_list"] = []
content["auto_update_settings"] = None
content["repo_url"] = DEFAULT_REPO_URL
content["branch_number"] = 1
with open(CONFIG_PATH, "wb") as File:
pickle.dump(content, File)
def config_set(key, val):
'''
(key, value) -> None
Sets the key given as param in the config dictionary in memory to the value
"value". The dictionary is retained after the process terminates.
If the config file is empty or unreadable for any reason, returns None and
calls clean_config_file() to reset it to default state.
'''
reset = False
try:
with open(CONFIG_PATH, "rb") as File:
config_dict = pickle.load(File, encoding=ENCODING)
# if the storage file is unreadable, reset the file to default state
except (EOFError, FileNotFoundError) as e:
reset = True
config_dict = None
if reset:
clean_config_file()
with open(CONFIG_PATH, "rb") as File:
config_dict = pickle.load(File, encoding=ENCODING)
config_dict[key] = val
with open(CONFIG_PATH, "wb") as File:
pickle.dump(config_dict, File)
def config_get(key):
'''
(str) -> object
Returns the value for the key "key" from the config dictionary.
If the config file is empty or unreadable for any reason, returns None and
calls clean_config_file() to reset it to default state.
'''
reset = False
try:
with open(CONFIG_PATH, "rb") as File:
config_dict = pickle.load(File, encoding=ENCODING)
result = config_dict.get(key)
# if the storage file is unreadable, return None, reset the file to default
# state
except (EOFError, FileNotFoundError) as e:
result = None
reset = True
if reset:
clean_config_file()
return result
def reset_to_default():
'''
() -> None
Returns all program configurations to default state, which includes: (1) -
clearing the stored list of proposed changes, and (2) - resetting the
config file to default configuration.
'''
write_changes_to_memory([])
clean_config_file()
if __name__ == "__main__":
MANUAL_PATH = "../" + MANUAL_PATH
PROPOSED_CHANGES_PATH = "../" + PROPOSED_CHANGES_PATH
CONFIG_PATH = "../" + CONFIG_PATH
reset_to_default()
|
define([
"skylark-langx-types",
"skylark-langx-async/deferred",
"./collections",
"./collection"
], function(types, Deferred, collections, Collection) {
var PagedList = collections.PagedList = Collection.inherit({
"klassName": "PagedList",
//{
// provider : function(){},
// totalCount : Infinity, // the total count
//}
_options : null,
_cachePageData: function(pageNo, pageItems) {
var pages = this._pages,
oldLen = this._count,
len = (pageNo - 1) * this.pageSize + pageItems.length;
pages[pageNo] = pageItems;
this.trigger("changed:cache",{
data : {
pageNo : pageNo,
pageItems : pageItems
}
})
if (len > OldLen) {
this._count = len;
this.trigger("changed:count",{
data : {
count : len,
oldCount : oldLen
}
})
}
},
_getPageData: function(pageNo) {
var items = this._getInnerItems(),
pageItems = [],
pageSize = this.pageSize,
idx = (pageNo - 1) * pageSize,
len = items.length;
for (var i = 0; i < pageSize && idx < len; i++, idx++) {
if (items[idx]) pageItems.push(items[idx]);
}
return pageItems;
},
"_laodPageData": function( /*Number*/ pageNo) {
//desc: "Get a page at the specified index.",
//result: {
// type: List,
// desc: "this page for chain call."
//},
//params: [{
// name: "pageNo",
// type: Number,
//}],
var loadData = this._options.loadData;
pageSize = this.pageSize,
from = (pageNo - 1) * pageSize;
deferred = new Deferred(),
self = this;
loadData(from, pageSize).then(function(items) {
self._cachePageData(pageNo, items);
deferred.resolve(items);
}, function(err) {
deferred.reject(err);
})
return deferred.promise;
},
"pageSize": {
"get": function() {
return this._pageSize;
}
},
"totalCount": {
//"desc": "the total count",
//"type": Number,
//"defaultValue": Infinity
get : function() {
return this._options && (this._endless._options || Infinity);
}
},
"totalPageCount": {
"get": function() {
return Math.ceil(this.totalCount / this.pageSize);
}
},
"count": {
//"desc": "the total count",
//"type": Number,
//"defaultValue": Infinity
get : function() {
return this._count;
}
},
"pageCount": {
"get": function() {
return Math.ceil(this.count / this.pageSize);
}
},
"hasMore": function() {
//desc: "determine if the list has more items",
//result: {
// type: Boolean,
// desc: "false if reached the end"
//},
//params: [],
return this.count < this.totalCount;
},
"loadMore": function() {
//desc: "load more data.",
//result: {
// type: Promise,
// desc: "deferred object"
//},
//params: [{
//}],
return this._laodPageData(this.pageCount);
},
"getPage": function( /*Number*/ pageNo,autoLoad) {
//desc: "Get a page at the specified index.",
//result: {
// type: List,
// desc: "this page for chain call."
//},
//params: [{
// name: "pageNo",
// type: Number,
//}],
return this._getPageData(pageNo);
},
fetchPage: function(pageNo) {
var pageItems = this._getPageData(pageNo);
if (!pageItems) {
return this._laodPageData(pageNo);
} else {
return Deferred.when(items);
}
},
"init" : function(pageSize,options){
this._pages = {};
this._count = 0;
this._options =options;
}
});
return PagedList;
});
|
<reponame>lananh265/social-network<filename>node_modules/react-icons-kit/noto_emoji_regular/u1F4AC.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.u1F4AC = void 0;
var u1F4AC = {
"viewBox": "0 0 2600 2760.837",
"children": [{
"name": "path",
"attribs": {
"d": "M582 2605v-493H475q-105 0-164-59.5T252 1887V734q0-86 65-148t158-62h1650q94 0 158.5 60t64.5 150v1153q0 102-61 163.5t-162 61.5H1084zM475 642q-42 0-73.5 25T370 734v1153q0 57 24.5 82t80.5 25h225v330l336-330h1089q105 0 105-107V734q0-42-31.5-67t-73.5-25H475z"
},
"children": []
}]
};
exports.u1F4AC = u1F4AC; |
<reponame>financialforcedev/orizuru-auth
/*
* Copyright (c) 2019, FinancialForce.com, inc
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* - Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* - Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* - Neither the name of the FinancialForce.com, inc nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* @module flow/refreshToken
*/
import { AccessTokenResponse, Environment, GrantOptions, RefreshAccessTokenGrantor, RefreshGrantParams, RefreshTokenGrantorParams } from '..';
import { findOrCreateClient } from '../client/cache';
import { validate } from '../client/validator/environment';
/**
* Uses the [OAuth 2.0 Refresh Token Flow](https://help.salesforce.com/articleView?id=remoteaccess_oauth_refresh_token_flow.htm) to renew tokens issued by the web server or
* user-agent flows.
*
* @param env The auth environment parameters.
* @returns A function that requests an access token from the given refresh token.
*/
export function createTokenGrantor(env: Environment): RefreshAccessTokenGrantor {
const validatedEnvironment = validate(env);
return async function requestAccessToken(params: RefreshTokenGrantorParams, opts?: GrantOptions): Promise<AccessTokenResponse> {
const client = await findOrCreateClient(validatedEnvironment);
// The RefreshTokenGrantorParams interface excludes the grant_type so that it
// doesn't have to be set by the caller. Make sure it is set here.
const internalParams: Partial<RefreshGrantParams> = Object.assign({}, params);
internalParams.grantType = 'refresh_token';
const accessTokenResponse = await client.grant(internalParams as RefreshGrantParams, opts);
accessTokenResponse.refresh_token = params.refreshToken;
return accessTokenResponse;
};
}
|
#!/bin/bash
if [[ $target_platform =~ linux.* ]] || [[ $target_platform == win-32 ]] || [[ $target_platform == win-64 ]] || [[ $target_platform == osx-64 ]]; then
export DISABLE_AUTOBREW=1
$R CMD INSTALL --build .
else
mkdir -p $PREFIX/lib/R/library/turner
mv * $PREFIX/lib/R/library/turner
if [[ $target_platform == osx-64 ]]; then
pushd $PREFIX
for libdir in lib/R/lib lib/R/modules lib/R/library lib/R/bin/exec sysroot/usr/lib; do
pushd $libdir || exit 1
for SHARED_LIB in $(find . -type f -iname "*.dylib" -or -iname "*.so" -or -iname "R"); do
echo "fixing SHARED_LIB $SHARED_LIB"
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5.0-MRO/Resources/lib/libR.dylib "$PREFIX"/lib/R/lib/libR.dylib $SHARED_LIB || true
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5/Resources/lib/libR.dylib "$PREFIX"/lib/R/lib/libR.dylib $SHARED_LIB || true
install_name_tool -change /usr/local/clang4/lib/libomp.dylib "$PREFIX"/lib/libomp.dylib $SHARED_LIB || true
install_name_tool -change /usr/local/gfortran/lib/libgfortran.3.dylib "$PREFIX"/lib/libgfortran.3.dylib $SHARED_LIB || true
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5/Resources/lib/libquadmath.0.dylib "$PREFIX"/lib/libquadmath.0.dylib $SHARED_LIB || true
install_name_tool -change /usr/local/gfortran/lib/libquadmath.0.dylib "$PREFIX"/lib/libquadmath.0.dylib $SHARED_LIB || true
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5/Resources/lib/libgfortran.3.dylib "$PREFIX"/lib/libgfortran.3.dylib $SHARED_LIB || true
install_name_tool -change /usr/lib/libgcc_s.1.dylib "$PREFIX"/lib/libgcc_s.1.dylib $SHARED_LIB || true
install_name_tool -change /usr/lib/libiconv.2.dylib "$PREFIX"/sysroot/usr/lib/libiconv.2.dylib $SHARED_LIB || true
install_name_tool -change /usr/lib/libncurses.5.4.dylib "$PREFIX"/sysroot/usr/lib/libncurses.5.4.dylib $SHARED_LIB || true
install_name_tool -change /usr/lib/libicucore.A.dylib "$PREFIX"/sysroot/usr/lib/libicucore.A.dylib $SHARED_LIB || true
install_name_tool -change /usr/lib/libexpat.1.dylib "$PREFIX"/lib/libexpat.1.dylib $SHARED_LIB || true
install_name_tool -change /usr/lib/libcurl.4.dylib "$PREFIX"/lib/libcurl.4.dylib $SHARED_LIB || true
install_name_tool -change /usr/lib/libc++.1.dylib "$PREFIX"/lib/libc++.1.dylib $SHARED_LIB || true
install_name_tool -change /Library/Frameworks/R.framework/Versions/3.5/Resources/lib/libc++.1.dylib "$PREFIX"/lib/libc++.1.dylib $SHARED_LIB || true
done
popd
done
popd
fi
fi
|
<filename>file_test.go
package bimg
import (
"testing"
)
func TestRead(t *testing.T) {
buf, err := Read("testdata/test.jpg")
if err != nil {
t.Errorf("Cannot read the image: %#v", err)
}
if len(buf) == 0 {
t.Fatal("Empty buffer")
}
if DetermineImageType(buf) != JPEG {
t.Fatal("Image is not jpeg")
}
}
func TestWrite(t *testing.T) {
buf, err := Read("testdata/test.jpg")
if err != nil {
t.Errorf("Cannot read the image: %#v", err)
}
if len(buf) == 0 {
t.Fatal("Empty buffer")
}
err = Write("testdata/test_write_out.jpg", buf)
if err != nil {
t.Fatalf("Cannot write the file: %#v", err)
}
}
|
#!/usr/bin/env bash
#-------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
#
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/common.md
# Maintainer: The VS Code and Codespaces Teams
#
# Syntax: ./common-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages]
set -e
INSTALL_ZSH=${1:-"true"}
USERNAME=${2:-"automatic"}
USER_UID=${3:-"automatic"}
USER_GID=${4:-"automatic"}
UPGRADE_PACKAGES=${5:-"true"}
INSTALL_OH_MYS=${6:-"true"}
ADD_NON_FREE_PACKAGES=${7:-"false"}
SCRIPT_DIR="$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)"
MARKER_FILE="/usr/local/etc/vscode-dev-containers/common"
if [ "$(id -u)" -ne 0 ]; then
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
exit 1
fi
# Ensure that login shells get the correct path if the user updated the PATH using ENV.
rm -f /etc/profile.d/00-restore-env.sh
echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" > /etc/profile.d/00-restore-env.sh
chmod +x /etc/profile.d/00-restore-env.sh
# If in automatic mode, determine if a user already exists, if not use vscode
if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
USERNAME=""
POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
if id -u ${CURRENT_USER} > /dev/null 2>&1; then
USERNAME=${CURRENT_USER}
break
fi
done
if [ "${USERNAME}" = "" ]; then
USERNAME=vscode
fi
elif [ "${USERNAME}" = "none" ]; then
USERNAME=root
USER_UID=0
USER_GID=0
fi
# Load markers to see which steps have already run
if [ -f "${MARKER_FILE}" ]; then
echo "Marker file found:"
cat "${MARKER_FILE}"
source "${MARKER_FILE}"
fi
# Ensure apt is in non-interactive to avoid prompts
export DEBIAN_FRONTEND=noninteractive
# Function to call apt-get if needed
apt_get_update_if_needed()
{
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
echo "Running apt-get update..."
apt-get update
else
echo "Skipping apt-get update."
fi
}
# Run install apt-utils to avoid debconf warning then verify presence of other common developer tools and dependencies
if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then
package_list="apt-utils \
openssh-client \
gnupg2 \
iproute2 \
procps \
lsof \
htop \
net-tools \
psmisc \
curl \
wget \
rsync \
ca-certificates \
unzip \
zip \
nano \
vim-tiny \
less \
jq \
lsb-release \
apt-transport-https \
dialog \
libc6 \
libgcc1 \
libkrb5-3 \
libgssapi-krb5-2 \
libicu[0-9][0-9] \
liblttng-ust0 \
libstdc++6 \
zlib1g \
locales \
sudo \
ncdu \
man-db \
strace \
manpages \
manpages-dev \
init-system-helpers"
# Needed for adding manpages-posix and manpages-posix-dev which are non-free packages in Debian
if [ "${ADD_NON_FREE_PACKAGES}" = "true" ]; then
# Bring in variables from /etc/os-release like VERSION_CODENAME
. /etc/os-release
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
sed -i -E "s/deb-src http:\/\/(deb|httredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
sed -i -E "s/deb-src http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb-src http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
echo "Running apt-get update..."
apt-get update
package_list="${package_list} manpages-posix manpages-posix-dev"
else
apt_get_update_if_needed
fi
# Install libssl1.1 if available
if [[ ! -z $(apt-cache --names-only search ^libssl1.1$) ]]; then
package_list="${package_list} libssl1.1"
fi
# Install appropriate version of libssl1.0.x if available
libssl_package=$(dpkg-query -f '${db:Status-Abbrev}\t${binary:Package}\n' -W 'libssl1\.0\.?' 2>&1 || echo '')
if [ "$(echo "$LIlibssl_packageBSSL" | grep -o 'libssl1\.0\.[0-9]:' | uniq | sort | wc -l)" -eq 0 ]; then
if [[ ! -z $(apt-cache --names-only search ^libssl1.0.2$) ]]; then
# Debian 9
package_list="${package_list} libssl1.0.2"
elif [[ ! -z $(apt-cache --names-only search ^libssl1.0.0$) ]]; then
# Ubuntu 18.04, 16.04, earlier
package_list="${package_list} libssl1.0.0"
fi
fi
echo "Packages to verify are installed: ${package_list}"
apt-get -y install --no-install-recommends ${package_list} 2> >( grep -v 'debconf: delaying package configuration, since apt-utils is not installed' >&2 )
# Install git if not already installed (may be more recent than distro version)
if ! type git > /dev/null 2>&1; then
apt-get -y install --no-install-recommends git
fi
PACKAGES_ALREADY_INSTALLED="true"
fi
# Get to latest versions of all packages
if [ "${UPGRADE_PACKAGES}" = "true" ]; then
apt_get_update_if_needed
apt-get -y upgrade --no-install-recommends
apt-get autoremove -y
fi
# Ensure at least the en_US.UTF-8 UTF-8 locale is available.
# Common need for both applications and things like the agnoster ZSH theme.
if [ "${LOCALE_ALREADY_SET}" != "true" ] && ! grep -o -E '^\s*en_US.UTF-8\s+UTF-8' /etc/locale.gen > /dev/null; then
echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen
locale-gen
LOCALE_ALREADY_SET="true"
fi
# Create or update a non-root user to match UID/GID.
if id -u ${USERNAME} > /dev/null 2>&1; then
# User exists, update if needed
if [ "${USER_GID}" != "automatic" ] && [ "$USER_GID" != "$(id -G $USERNAME)" ]; then
groupmod --gid $USER_GID $USERNAME
usermod --gid $USER_GID $USERNAME
fi
if [ "${USER_UID}" != "automatic" ] && [ "$USER_UID" != "$(id -u $USERNAME)" ]; then
usermod --uid $USER_UID $USERNAME
fi
else
# Create user
if [ "${USER_GID}" = "automatic" ]; then
groupadd $USERNAME
else
groupadd --gid $USER_GID $USERNAME
fi
if [ "${USER_UID}" = "automatic" ]; then
useradd -s /bin/bash --gid $USERNAME -m $USERNAME
else
useradd -s /bin/bash --uid $USER_UID --gid $USERNAME -m $USERNAME
fi
fi
# Add add sudo support for non-root user
if [ "${USERNAME}" != "root" ] && [ "${EXISTING_NON_ROOT_USER}" != "${USERNAME}" ]; then
echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME
chmod 0440 /etc/sudoers.d/$USERNAME
EXISTING_NON_ROOT_USER="${USERNAME}"
fi
# ** Shell customization section **
if [ "${USERNAME}" = "root" ]; then
user_rc_path="/root"
else
user_rc_path="/home/${USERNAME}"
fi
# Restore user .bashrc defaults from skeleton file if it doesn't exist or is empty
if [ ! -f "${user_rc_path}/.bashrc" ] || [ ! -s "${user_rc_path}/.bashrc" ] ; then
cp /etc/skel/.bashrc "${user_rc_path}/.bashrc"
fi
# Restore user .profile defaults from skeleton file if it doesn't exist or is empty
if [ ! -f "${user_rc_path}/.profile" ] || [ ! -s "${user_rc_path}/.profile" ] ; then
cp /etc/skel/.profile "${user_rc_path}/.profile"
fi
# .bashrc/.zshrc snippet
rc_snippet="$(cat << 'EOF'
if [ -z "${USER}" ]; then export USER=$(whoami); fi
if [[ "${PATH}" != *"$HOME/.local/bin"* ]]; then export PATH="${PATH}:$HOME/.local/bin"; fi
# Display optional first run image specific notice if configured and terminal is interactive
if [ -t 1 ] && [[ "${TERM_PROGRAM}" = "vscode" || "${TERM_PROGRAM}" = "codespaces" ]] && [ ! -f "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed" ]; then
if [ -f "/usr/local/etc/vscode-dev-containers/first-run-notice.txt" ]; then
cat "/usr/local/etc/vscode-dev-containers/first-run-notice.txt"
elif [ -f "/workspaces/.codespaces/shared/first-run-notice.txt" ]; then
cat "/workspaces/.codespaces/shared/first-run-notice.txt"
fi
mkdir -p "$HOME/.config/vscode-dev-containers"
# Mark first run notice as displayed after 10s to avoid problems with fast terminal refreshes hiding it
((sleep 10s; touch "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed") &)
fi
# Set the default git editor if not already set
if [ -z "$(git config --get core.editor)" ] && [ -z "${GIT_EDITOR}" ]; then
if [ "${TERM_PROGRAM}" = "vscode" ]; then
if [[ -n $(command -v code-insiders) && -z $(command -v code) ]]; then
export GIT_EDITOR="code-insiders --wait"
else
export GIT_EDITOR="code --wait"
fi
fi
fi
EOF
)"
# code shim, it fallbacks to code-insiders if code is not available
cat << 'EOF' > /usr/local/bin/code
#!/bin/sh
get_in_path_except_current() {
which -a "$1" | grep -A1 "$0" | grep -v "$0"
}
code="$(get_in_path_except_current code)"
if [ -n "$code" ]; then
exec "$code" "$@"
elif [ "$(command -v code-insiders)" ]; then
exec code-insiders "$@"
else
echo "code or code-insiders is not installed" >&2
exit 127
fi
EOF
chmod +x /usr/local/bin/code
# systemctl shim - tells people to use 'service' if systemd is not running
cat << 'EOF' > /usr/local/bin/systemctl
#!/bin/sh
set -e
if [ -d "/run/systemd/system" ]; then
exec /bin/systemctl/systemctl "$@"
else
echo '\n"systemd" is not running in this container due to its overhead.\nUse the "service" command to start services intead. e.g.: \n\nservice --status-all'
fi
EOF
chmod +x /usr/local/bin/systemctl
# Codespaces bash and OMZ themes - partly inspired by https://github.com/ohmyzsh/ohmyzsh/blob/master/themes/robbyrussell.zsh-theme
codespaces_bash="$(cat \
<<'EOF'
# Codespaces bash prompt theme
__bash_prompt() {
local userpart='`export XIT=$? \
&& [ ! -z "${GITHUB_USER}" ] && echo -n "\[\033[0;32m\]@${GITHUB_USER} " || echo -n "\[\033[0;32m\]\u " \
&& [ "$XIT" -ne "0" ] && echo -n "\[\033[1;31m\]➜" || echo -n "\[\033[0m\]➜"`'
local gitbranch='`\
export BRANCH=$(git symbolic-ref --short HEAD 2>/dev/null || git rev-parse --short HEAD 2>/dev/null); \
if [ "${BRANCH}" != "" ]; then \
echo -n "\[\033[0;36m\](\[\033[1;31m\]${BRANCH}" \
&& if git ls-files --error-unmatch -m --directory --no-empty-directory -o --exclude-standard ":/*" > /dev/null 2>&1; then \
echo -n " \[\033[1;33m\]✗"; \
fi \
&& echo -n "\[\033[0;36m\]) "; \
fi`'
local lightblue='\[\033[1;34m\]'
local removecolor='\[\033[0m\]'
PS1="${userpart} ${lightblue}\w ${gitbranch}${removecolor}\$ "
unset -f __bash_prompt
}
__bash_prompt
EOF
)"
codespaces_zsh="$(cat \
<<'EOF'
# Codespaces zsh prompt theme
__zsh_prompt() {
local prompt_username
if [ ! -z "${GITHUB_USER}" ]; then
prompt_username="@${GITHUB_USER}"
else
prompt_username="%n"
fi
PROMPT="%{$fg[green]%}${prompt_username} %(?:%{$reset_color%}➜ :%{$fg_bold[red]%}➜ )" # User/exit code arrow
PROMPT+='%{$fg_bold[blue]%}%(5~|%-1~/…/%3~|%4~)%{$reset_color%} ' # cwd
PROMPT+='$(git_prompt_info)%{$fg[white]%}$ %{$reset_color%}' # Git status
unset -f __zsh_prompt
}
ZSH_THEME_GIT_PROMPT_PREFIX="%{$fg_bold[cyan]%}(%{$fg_bold[red]%}"
ZSH_THEME_GIT_PROMPT_SUFFIX="%{$reset_color%} "
ZSH_THEME_GIT_PROMPT_DIRTY=" %{$fg_bold[yellow]%}✗%{$fg_bold[cyan]%})"
ZSH_THEME_GIT_PROMPT_CLEAN="%{$fg_bold[cyan]%})"
__zsh_prompt
EOF
)"
# Add notice that Oh My Bash! has been removed from images and how to provide information on how to install manually
omb_readme="$(cat \
<<'EOF'
"Oh My Bash!" has been removed from this image in favor of a simple shell prompt. If you
still wish to use it, remove "~/.oh-my-bash" and install it from: https://github.com/ohmybash/oh-my-bash
You may also want to consider "Bash-it" as an alternative: https://github.com/bash-it/bash-it
See here for infomation on adding it to your image or dotfiles: https://aka.ms/codespaces/omb-remove
EOF
)"
omb_stub="$(cat \
<<'EOF'
#!/usr/bin/env bash
if [ -t 1 ]; then
cat $HOME/.oh-my-bash/README.md
fi
EOF
)"
# Add RC snippet and custom bash prompt
if [ "${RC_SNIPPET_ALREADY_ADDED}" != "true" ]; then
echo "${rc_snippet}" >> /etc/bash.bashrc
echo "${codespaces_bash}" >> "${user_rc_path}/.bashrc"
echo 'export PROMPT_DIRTRIM=4' >> "${user_rc_path}/.bashrc"
if [ "${USERNAME}" != "root" ]; then
echo "${codespaces_bash}" >> "/root/.bashrc"
echo 'export PROMPT_DIRTRIM=4' >> "/root/.bashrc"
fi
chown ${USERNAME}:${USERNAME} "${user_rc_path}/.bashrc"
RC_SNIPPET_ALREADY_ADDED="true"
fi
# Add stub for Oh My Bash!
if [ ! -d "${user_rc_path}/.oh-my-bash}" ] && [ "${INSTALL_OH_MYS}" = "true" ]; then
mkdir -p "${user_rc_path}/.oh-my-bash" "/root/.oh-my-bash"
echo "${omb_readme}" >> "${user_rc_path}/.oh-my-bash/README.md"
echo "${omb_stub}" >> "${user_rc_path}/.oh-my-bash/oh-my-bash.sh"
chmod +x "${user_rc_path}/.oh-my-bash/oh-my-bash.sh"
if [ "${USERNAME}" != "root" ]; then
echo "${omb_readme}" >> "/root/.oh-my-bash/README.md"
echo "${omb_stub}" >> "/root/.oh-my-bash/oh-my-bash.sh"
chmod +x "/root/.oh-my-bash/oh-my-bash.sh"
fi
chown -R "${USERNAME}:${USERNAME}" "${user_rc_path}/.oh-my-bash"
fi
# Optionally install and configure zsh and Oh My Zsh!
if [ "${INSTALL_ZSH}" = "true" ]; then
if ! type zsh > /dev/null 2>&1; then
apt_get_update_if_needed
apt-get install -y zsh
fi
if [ "${ZSH_ALREADY_INSTALLED}" != "true" ]; then
echo "${rc_snippet}" >> /etc/zsh/zshrc
ZSH_ALREADY_INSTALLED="true"
fi
# Adapted, simplified inline Oh My Zsh! install steps that adds, defaults to a codespaces theme.
# See https://github.com/ohmyzsh/ohmyzsh/blob/master/tools/install.sh for official script.
oh_my_install_dir="${user_rc_path}/.oh-my-zsh"
if [ ! -d "${oh_my_install_dir}" ] && [ "${INSTALL_OH_MYS}" = "true" ]; then
template_path="${oh_my_install_dir}/templates/zshrc.zsh-template"
user_rc_file="${user_rc_path}/.zshrc"
umask g-w,o-w
mkdir -p ${oh_my_install_dir}
git clone --depth=1 \
-c core.eol=lf \
-c core.autocrlf=false \
-c fsck.zeroPaddedFilemode=ignore \
-c fetch.fsck.zeroPaddedFilemode=ignore \
-c receive.fsck.zeroPaddedFilemode=ignore \
"https://github.com/ohmyzsh/ohmyzsh" "${oh_my_install_dir}" 2>&1
echo -e "$(cat "${template_path}")\nDISABLE_AUTO_UPDATE=true\nDISABLE_UPDATE_PROMPT=true" > ${user_rc_file}
sed -i -e 's/ZSH_THEME=.*/ZSH_THEME="codespaces"/g' ${user_rc_file}
mkdir -p ${oh_my_install_dir}/custom/themes
echo "${codespaces_zsh}" > "${oh_my_install_dir}/custom/themes/codespaces.zsh-theme"
# Shrink git while still enabling updates
cd "${oh_my_install_dir}"
git repack -a -d -f --depth=1 --window=1
# Copy to non-root user if one is specified
if [ "${USERNAME}" != "root" ]; then
cp -rf "${user_rc_file}" "${oh_my_install_dir}" /root
chown -R ${USERNAME}:${USERNAME} "${user_rc_path}"
fi
fi
fi
# Persist image metadata info, script if meta.env found in same directory
meta_info_script="$(cat << 'EOF'
#!/bin/sh
. /usr/local/etc/vscode-dev-containers/meta.env
# Minimal output
if [ "$1" = "version" ] || [ "$1" = "image-version" ]; then
echo "${VERSION}"
exit 0
elif [ "$1" = "release" ]; then
echo "${GIT_REPOSITORY_RELEASE}"
exit 0
elif [ "$1" = "content" ] || [ "$1" = "content-url" ] || [ "$1" = "contents" ] || [ "$1" = "contents-url" ]; then
echo "${CONTENTS_URL}"
exit 0
fi
#Full output
echo
echo "Development container image information"
echo
if [ ! -z "${VERSION}" ]; then echo "- Image version: ${VERSION}"; fi
if [ ! -z "${DEFINITION_ID}" ]; then echo "- Definition ID: ${DEFINITION_ID}"; fi
if [ ! -z "${VARIANT}" ]; then echo "- Variant: ${VARIANT}"; fi
if [ ! -z "${GIT_REPOSITORY}" ]; then echo "- Source code repository: ${GIT_REPOSITORY}"; fi
if [ ! -z "${GIT_REPOSITORY_RELEASE}" ]; then echo "- Source code release/branch: ${GIT_REPOSITORY_RELEASE}"; fi
if [ ! -z "${BUILD_TIMESTAMP}" ]; then echo "- Timestamp: ${BUILD_TIMESTAMP}"; fi
if [ ! -z "${CONTENTS_URL}" ]; then echo && echo "More info: ${CONTENTS_URL}"; fi
echo
EOF
)"
if [ -f "${SCRIPT_DIR}/meta.env" ]; then
mkdir -p /usr/local/etc/vscode-dev-containers/
cp -f "${SCRIPT_DIR}/meta.env" /usr/local/etc/vscode-dev-containers/meta.env
echo "${meta_info_script}" > /usr/local/bin/devcontainer-info
chmod +x /usr/local/bin/devcontainer-info
fi
# Write marker file
mkdir -p "$(dirname "${MARKER_FILE}")"
echo -e "\
PACKAGES_ALREADY_INSTALLED=${PACKAGES_ALREADY_INSTALLED}\n\
LOCALE_ALREADY_SET=${LOCALE_ALREADY_SET}\n\
EXISTING_NON_ROOT_USER=${EXISTING_NON_ROOT_USER}\n\
RC_SNIPPET_ALREADY_ADDED=${RC_SNIPPET_ALREADY_ADDED}\n\
ZSH_ALREADY_INSTALLED=${ZSH_ALREADY_INSTALLED}" > "${MARKER_FILE}"
echo "Done!"
|
<reponame>leongaban/redux-saga-exchange
import * as R from 'ramda';
import { Action, IMessagesState } from '../../namespace';
import { IChatMessage } from 'features/chat/chatApi/namespace';
const addOrReplaceMessage = (messages: IChatMessage[], message: IChatMessage): IChatMessage[] => {
const originalMessageIndex = messages.findIndex(msg => msg.id === message.id);
return originalMessageIndex !== -1 ? R.update(originalMessageIndex, message, messages) : R.append(message, messages);
};
function messagesReducer(state: IMessagesState = {}, action: Action): IMessagesState {
switch (action.type) {
case 'CHAT:MESSAGE_DELETED': {
const {messageId, roomId} = action.payload;
return { ...state, [roomId]: state[roomId].filter(message => message.id !== messageId) };
}
case 'CHAT:MESSAGE_RECEIVED': {
const message = action.payload;
if (!['message', 'error'].includes(message.type)) {
return state;
}
return { ...state, [message.roomId]: addOrReplaceMessage(state[message.roomId] || [], message) };
}
case 'CHAT:HISTORY_RECEIVED': {
const { messages, roomId } = action.payload;
return {
...state,
[roomId]: R.sort((a, b) => a.date - b.date, messages.filter(message => message.type === 'message')),
};
}
default:
return state;
}
}
export default messagesReducer;
|
package com.alipay.api.domain;
import java.util.Date;
import java.util.List;
import com.alipay.api.AlipayObject;
import com.alipay.api.internal.mapping.ApiField;
import com.alipay.api.internal.mapping.ApiListField;
/**
* 查询客服状态变更流水日志
*
* @author auto create
* @since 1.0, 2020-12-15 11:29:53
*/
public class AlipayIserviceCcmAgentSchedulelogQueryModel extends AlipayObject {
private static final long serialVersionUID = 1829563399443842798L;
/**
* 客服id集合,限制最多100个id,不传查询部门下所有客服的状态变更流水日志
*/
@ApiListField("agent_ids")
@ApiField("string")
private List<String> agentIds;
/**
* 部门id(即租户实例ID、数据权限ID)
*/
@ApiField("ccs_instance_id")
private String ccsInstanceId;
/**
* 客服状态变更结束时间,开始时间与结束时间间隔不能超过1天,采用UTC时间,按照ISO8601标准表示,格式为:yyyy-MM-dd'T'HH:mm:ss'Z'
*/
@ApiField("end_time")
private Date endTime;
/**
* 查询条数,最大100, 不传默认100
*/
@ApiField("limit")
private Long limit;
/**
* 查询结果的页码,起始值为 1,默认值为 1
*/
@ApiField("page_num")
private Long pageNum;
/**
* 分页查询时设置的每页记录数,最大值 100 行,默认为 100
*/
@ApiField("page_size")
private Long pageSize;
/**
* 起始id,分页导出的起始客服状态变更流水id,第一页传0,翻页时传上一页结果的最大id
*/
@ApiField("start_id")
private Long startId;
/**
* 客服状态变更开始时间,采用UTC时间,按照ISO8601标准表示,格式为:yyyy-MM-dd'T'HH:mm:ss'Z'
*/
@ApiField("start_time")
private Date startTime;
public List<String> getAgentIds() {
return this.agentIds;
}
public void setAgentIds(List<String> agentIds) {
this.agentIds = agentIds;
}
public String getCcsInstanceId() {
return this.ccsInstanceId;
}
public void setCcsInstanceId(String ccsInstanceId) {
this.ccsInstanceId = ccsInstanceId;
}
public Date getEndTime() {
return this.endTime;
}
public void setEndTime(Date endTime) {
this.endTime = endTime;
}
public Long getLimit() {
return this.limit;
}
public void setLimit(Long limit) {
this.limit = limit;
}
public Long getPageNum() {
return this.pageNum;
}
public void setPageNum(Long pageNum) {
this.pageNum = pageNum;
}
public Long getPageSize() {
return this.pageSize;
}
public void setPageSize(Long pageSize) {
this.pageSize = pageSize;
}
public Long getStartId() {
return this.startId;
}
public void setStartId(Long startId) {
this.startId = startId;
}
public Date getStartTime() {
return this.startTime;
}
public void setStartTime(Date startTime) {
this.startTime = startTime;
}
}
|
class DataProcessor:
def __init__(self, partition):
self.partition = partition
def _process_train(self, class_lists):
# Implementation for processing training data
pass
def _process_valid(self):
# Implementation for processing validation data
pass
def _process_test(self):
# Implementation for processing test data
pass
def process_data(self, class_lists=None):
if self.partition == 'train':
self._process_train(class_lists)
elif self.partition == 'valid':
self._process_valid()
elif self.partition == 'test':
self._process_test()
else:
pass # No processing method called for other partitions |
import csv
webpages = {}
with open("webpages.csv", "r") as f:
reader = csv.reader(f)
for row in reader:
webpages[row[0]] = (row[1], row[2])
visitors = {}
with open("visitors.csv", "r") as f:
reader = csv.reader(f)
for row in reader:
visitors[row[2]] = row[1]
most_visited = max(webpages, key=lambda x: len([i for i in visitors.values() if i==x]))
print(f"Most visited page: {webpages[most_visited][0]} ({webpages[most_visited][1]})") |
#!/bin/bash
# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0 OR BSD-3-Clause
# This script illustrates the build steps for disk images used with the
# reference VMM.
set -e
SOURCE=$(readlink -f "$0")
TEST_RESOURCE_DIR="$(dirname "$SOURCE")"
# Reset index for cmdline arguments for the following `getopts`.
OPTIND=1
# Flag for optionally cleaning the workdir and recompiling the kernel.
CLEAN=
# Working directory. Defaults to a unique tmpdir.
WORKDIR=$(mktemp -d)
# Name of the resulting disk file. Defaults to "rootfs.ext4".
DISKFILE="rootfs.ext4"
# Directory containing .deb packages for the Linux image.
DEBDIR=
# Disk size. Currently hardcoded to 1 GiB.
DISKSIZE="1G"
# Disk mountpoint. The disk file will be mounted here and filled with data.
DISKMNT="mnt/rootfs"
# The Ubuntu release we'll use to build the rootfs. Hardcoded to focal (fossa, 20.04).
UBUNTUVER="focal"
# Hostname for the guest image we're building.
HOSTNAME="ubuntu-rust-vmm"
USAGE="
Usage: $(basename $SOURCE) -d debdir [-w workdir] [-o diskfile] [-c]
Options:
-d debdir Directory containing .deb packages for the Linux image.
-w workdir Working directory for the kernel build.
-o diskfile Name of the resulting disk file.
-c Clean up the working directory after the build.
"
export USAGE
while getopts ":cd:w:o:" opt; do
case "$opt" in
c) CLEAN=1
;;
d) DEBDIR="$OPTARG"
;;
w) rm -rf "$WORKDIR"
WORKDIR="$OPTARG"
;;
o) DISKFILE="$OPTARG"
;;
*) echo "$USAGE"
exit 1
esac
done
shift $((OPTIND-1))
die() {
echo "[ERROR] $1"
echo "$USAGE"
exit 1
}
[ ! -d "$DEBDIR" ] && die "$DEBDIR does not exist."
cleanup() {
if [ -n "$CLEAN" ]; then
echo "Cleaning $WORKDIR..."
rm -rf "$WORKDIR"
fi
}
cleanup
# Create an empty file for the disk.
mkdir -p "$WORKDIR"
truncate -s "$DISKSIZE" "$WORKDIR/$DISKFILE"
mkfs.ext4 -F "$WORKDIR/$DISKFILE"
# Create a mountpoint for the disk.
mkdir -p "$WORKDIR/$DISKMNT"
# Mount.
mount "$WORKDIR/$DISKFILE" "$WORKDIR/$DISKMNT" # Needs to be root.
# Download Ubuntu packages inside the mountpoint. We'll use the focal fossa (20.04) release.
# Needs to be root.
debootstrap --include openssh-server "$UBUNTUVER" "$WORKDIR/$DISKMNT" http://archive.ubuntu.com/ubuntu/
# Set a hostname.
echo "$HOSTNAME" > "$WORKDIR/$DISKMNT/etc/hostname"
# The serial getty service hooks up the login prompt to the kernel console at
# ttyS0 (where the reference VMM connects its serial console).
# We'll set it up for autologin to avoid the login prompt.
mkdir "$WORKDIR/$DISKMNT/etc/systemd/system/serial-getty@ttyS0.service.d/"
cat <<EOF > "$WORKDIR/$DISKMNT/etc/systemd/system/serial-getty@ttyS0.service.d/autologin.conf"
[Service]
ExecStart=
ExecStart=-/sbin/agetty --autologin root -o '-p -- \\u' --keep-baud 115200,38400,9600 %I $TERM
EOF
# OS is bootstrapped now, time to install the kernel packages.
# This is done from inside a chroot, to trick dpkg.
# First, copy the .deb packages inside the chroot folder, in /mnt/root.
mkdir -p "$WORKDIR/$DISKMNT/mnt/root/"
cp "$DEBDIR"/*.deb "$WORKDIR/$DISKMNT/mnt/root/"
# Copy the script that calls dpkg (and some other things) inside the chroot.
cp "$TEST_RESOURCE_DIR/install_system.sh" "$WORKDIR/$DISKMNT"
# Chroot.
chroot "$WORKDIR/$DISKMNT" /bin/bash "/install_system.sh"
# Unmount.
umount "$WORKDIR/$DISKMNT"
echo "Done!"
echo "Disk placed in $WORKDIR/$DISKFILE."
cleanup
exit 0
|
import re
import sys
import shutil
if not sys.version_info >= (3, 5):
print('ERROR: You must be running Python >= 3.5')
sys.exit(1) # cancel project
MODULE_REGEX = r'^[_a-zA-Z][_a-zA-Z0-9]+$'
module_name = '{{ cookiecutter.project_slug}}'
if not re.match(MODULE_REGEX, module_name):
print(
'ERROR: The project slug (%s) is not a valid Python module name. '
'Please do not use a - and use _ instead' % module_name
)
sys.exit(1) # cancel project
executables = ['tox', 'python{{ cookiecutter.main_python }}']
for executable in executables:
if not shutil.which(executable):
print('WARNING: You do not have the %s executable.' % executable)
|
<filename>src/main/java/hu/unideb/inf/dejavu/gui/WelcomeMenu.java
package hu.unideb.inf.dejavu.gui;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.scene.Scene;
import javafx.scene.paint.Color;
import javafx.scene.text.Font;
import javafx.scene.text.FontWeight;
import javafx.stage.Stage;
import javafx.stage.StageStyle;
public class WelcomeMenu extends DVMenu {
public static DVButton signIn = new DVButton("Bejelentkezés", 1);
public static DVButton newProfile = new DVButton("Új profil", 1);
public WelcomeMenu() {
super();
final int height = 200, width = 500;
setHgap(10);
setVgap(10);
signIn.setOnAction((arg0) -> {
signIn.setDisable(true);
newProfile.setDisable(true);
final Stage stage = new Stage();
SignInMenu signIn = new SignInMenu("Bejelentkezés");
ExitToolbar exit = new ExitToolbar(stage);
exit.toolbarButtons.closeButton.setOnAction(new EventHandler<ActionEvent>() {
public void handle(ActionEvent arg0) {
WelcomeMenu.signIn.setDisable(false);
WelcomeMenu.newProfile.setDisable(false);
stage.close();
}
});
signIn.setTop(exit);
stage.initStyle(StageStyle.TRANSPARENT);
Scene scene = new Scene(signIn, width, height);
scene.setFill(Color.TRANSPARENT);
stage.setMaxHeight(height);
stage.setMinHeight(height);
stage.setMaxWidth(width);
stage.setMinWidth(width);
stage.setAlwaysOnTop(true);
stage.setScene(scene);
stage.centerOnScreen();
stage.show();
});
newProfile.setOnAction((arg0) -> {
signIn.setDisable(true);
newProfile.setDisable(true);
final Stage stage = new Stage();
SignInMenu signIn = new SignInMenu("Új profil");
ExitToolbar exit = new ExitToolbar(stage);
exit.toolbarButtons.closeButton.setOnAction(new EventHandler<ActionEvent>() {
public void handle(ActionEvent arg0) {
WelcomeMenu.signIn.setDisable(false);
WelcomeMenu.newProfile.setDisable(false);
stage.close();
}
});
signIn.setTop(exit);
stage.initStyle(StageStyle.TRANSPARENT);
Scene scene = new Scene(signIn, width, height);
scene.setFill(Color.TRANSPARENT);
stage.setMaxHeight(height);
stage.setMinHeight(height);
stage.setMaxWidth(width);
stage.setMinWidth(width);
stage.setAlwaysOnTop(true);
stage.setScene(scene);
stage.centerOnScreen();
stage.show();
});
DVText welcome = new DVText(" Üdvözöljük", Font.font("Verdana", FontWeight.BOLD, 30));
DVText description = new DVText("Jelenktezzen be vagy hozzon létre új profilt.", Font.font("Verdana", 15));
add(welcome, 8, 25);
add(description, 8, 27);
add(signIn, 5, 30);
add(newProfile, 12, 30);
}
}
|
<filename>new-project/src/main.js
// The Vue build version to load with the `import` command
// (runtime-only or standalone) has been set in webpack.base.conf with an alias.
import Vue from 'vue'
import Layout from './components/layout'
import router from './router'
import IndexPage from './pages/index'
import VueResource from 'vue-resource'
Vue.use(VueResource)
// let router = new VueRouter({
// mode: 'history',
// routes: [
// {
// path: '/',
// component: IndexPage
// }
// ]
// })
// Vue.config.productionTip = false
/* eslint-disable no-new */
new Vue({
el: '#app',
router,
template: '<Layout/>',
components: { Layout }
})
|
<reponame>bdleitner/gorgonia<filename>x/vm/chandb.go
package xvm
import "gorgonia.org/gorgonia"
type chanDB struct {
// map[tail][head]
dico map[int64]map[int64]chan gorgonia.Value
// map[head][tail]
reverseDico map[int64]map[int64]chan gorgonia.Value
inputNodeID int64
outputNodeID int64
}
func (c *chanDB) closeAll() {
for i := range c.dico {
for j := range c.dico[i] {
close(c.dico[i][j])
}
}
}
// upsert the channel to the DB, if a channel already exists it is not overwritten
func (c *chanDB) upsert(channel chan gorgonia.Value, tail, head int64) {
if _, ok := c.dico[tail]; !ok {
c.dico[tail] = make(map[int64]chan gorgonia.Value, 0)
}
if _, ok := c.reverseDico[head]; !ok {
c.reverseDico[head] = make(map[int64]chan gorgonia.Value, 0)
}
if _, ok := c.dico[tail][head]; ok {
return
}
c.dico[tail][head] = channel
c.reverseDico[head][tail] = channel
}
func newChanDB() *chanDB {
return &chanDB{
dico: make(map[int64]map[int64]chan gorgonia.Value, 0),
reverseDico: make(map[int64]map[int64]chan gorgonia.Value, 0),
inputNodeID: -1,
outputNodeID: -2,
}
}
func (c *chanDB) getAllFromTail(tail int64) []<-chan gorgonia.Value {
edges, ok := c.dico[tail]
if !ok {
return nil
}
output := make([]<-chan gorgonia.Value, 0, len(edges))
for _, edge := range edges {
output = append(output, edge)
}
return output
}
func (c *chanDB) getAllFromHead(head int64) []chan<- gorgonia.Value {
edges, ok := c.reverseDico[head]
if !ok {
return nil
}
output := make([]chan<- gorgonia.Value, 0, len(edges))
for _, edge := range edges {
output = append(output, edge)
}
return output
}
func (c *chanDB) getChan(tail, head int64) (chan gorgonia.Value, bool) {
v, ok := c.dico[tail][head]
return v, ok
}
func (c *chanDB) len() int {
return len(c.dico)
}
|
package com.md.appuserconnect.core.model.messageslanguage;
import java.io.Serializable;
import javax.jdo.annotations.IdGeneratorStrategy;
import javax.jdo.annotations.PersistenceCapable;
import javax.jdo.annotations.Persistent;
import javax.jdo.annotations.PrimaryKey;
import org.json.JSONException;
import com.google.appengine.api.datastore.Key;
import com.md.appuserconnect.core.model.messages.Message;
import com.md.appuserconnect.core.utils.JSONObject2;
@SuppressWarnings("serial")
@PersistenceCapable
public class MessageLanguage implements Serializable{
@PrimaryKey
@Persistent(valueStrategy = IdGeneratorStrategy.IDENTITY)
private Key id;
@Persistent
private String messageID;
@Persistent
private String language = "";
@Persistent
private String messageText = "";
@Persistent
private String messageHeader = "";
@Persistent
private String button1Text = "";
@Persistent
private String button2Text = "";
@Persistent
private String button3Text = "";
public Key getId() {
return id;
}
public void setId(Key id) {
this.id = id;
}
public MessageLanguage(String language, Message msg) {
this.language = language;
this.messageID = msg.getMessageID();
}
public String getMessageID() {
return messageID;
}
public void setMessageID(String messageID) {
this.messageID = messageID;
}
public String getLanguage() {
return language;
}
public void setLanguage(String language) {
this.language = language;
}
public String getMessageText() {
return messageText;
}
public void setMessageText(String messageText) {
this.messageText = messageText;
}
public String getMessageTitle() {
return messageHeader;
}
public void setMessageTitle(String messageTitle) {
this.messageHeader = messageTitle;
}
public String getButton1Text() {
return button1Text;
}
public void setButton1Text(String button1Text) {
this.button1Text = button1Text;
}
public String getButton2Text() {
return button2Text;
}
public void setButton2Text(String button2Text) {
this.button2Text = button2Text;
}
public String getButton3Text() {
return button3Text;
}
public void setButton3Text(String button3Text) {
this.button3Text = button3Text;
}
public static JSONObject2 getAsJSONObject(MessageLanguage language) throws JSONException {
JSONObject2 json = new JSONObject2();
json.put("Language", language.getLanguage());
json.put("MessageHeader", language.getMessageTitle());
json.put("MessageText", language.getMessageText());
json.put("Button1Text", language.getButton1Text());
json.put("Button2Text", language.getButton2Text());
json.put("Button3Text", language.getButton3Text());
return json;
}
}
|
# Config for Powerlevel10k with 8-color lean prompt style. Type `p10k configure` to generate
# your own config based on it.
#
# Tip: Looking for a nice color? Here's a one-liner to print colormap.
#
# for i in {0..255}; do print -Pn "%K{$i} %k%F{$i}${(l:3::0:)i}%f " ${${(M)$((i%6)):#3}:+$'\n'}; done
# Temporarily change options.
'builtin' 'local' '-a' 'p10k_config_opts'
[[ ! -o 'aliases' ]] || p10k_config_opts+=('aliases')
[[ ! -o 'sh_glob' ]] || p10k_config_opts+=('sh_glob')
[[ ! -o 'no_brace_expand' ]] || p10k_config_opts+=('no_brace_expand')
'builtin' 'setopt' 'no_aliases' 'no_sh_glob' 'brace_expand'
() {
emulate -L zsh -o extended_glob
# Unset all configuration options. This allows you to apply configuration changes without
# restarting zsh. Edit ~/.p10k.zsh and type `source ~/.p10k.zsh`.
unset -m '(POWERLEVEL9K_*|DEFAULT_USER)~POWERLEVEL9K_GITSTATUS_DIR'
# Zsh >= 5.1 is required.
autoload -Uz is-at-least && is-at-least 5.1 || return
# The list of segments shown on the left. Fill it with the most important segments.
typeset -g POWERLEVEL9K_LEFT_PROMPT_ELEMENTS=(
# =========================[ Line #1 ]=========================
# os_icon # os identifier
dir # current directory
vcs # git status
# =========================[ Line #2 ]=========================
newline # \n
prompt_char # prompt symbol
)
# The list of segments shown on the right. Fill it with less important segments.
# Right prompt on the last prompt line (where you are typing your commands) gets
# automatically hidden when the input line reaches it. Right prompt above the
# last prompt line gets hidden if it would overlap with left prompt.
typeset -g POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS=(
# =========================[ Line #1 ]=========================
status # exit code of the last command
command_execution_time # duration of the last command
background_jobs # presence of background jobs
direnv # direnv status (https://direnv.net/)
asdf # asdf version manager (https://github.com/asdf-vm/asdf)
virtualenv # python virtual environment (https://docs.python.org/3/library/venv.html)
anaconda # conda environment (https://conda.io/)
pyenv # python environment (https://github.com/pyenv/pyenv)
goenv # go environment (https://github.com/syndbg/goenv)
nodenv # node.js version from nodenv (https://github.com/nodenv/nodenv)
nvm # node.js version from nvm (https://github.com/nvm-sh/nvm)
nodeenv # node.js environment (https://github.com/ekalinin/nodeenv)
# node_version # node.js version
# go_version # go version (https://golang.org)
# rust_version # rustc version (https://www.rust-lang.org)
# dotnet_version # .NET version (https://dotnet.microsoft.com)
# php_version # php version (https://www.php.net/)
# laravel_version # laravel php framework version (https://laravel.com/)
# java_version # java version (https://www.java.com/)
# package # name@version from package.json (https://docs.npmjs.com/files/package.json)
rbenv # ruby version from rbenv (https://github.com/rbenv/rbenv)
rvm # ruby version from rvm (https://rvm.io)
fvm # flutter version management (https://github.com/leoafarias/fvm)
luaenv # lua version from luaenv (https://github.com/cehoffman/luaenv)
jenv # java version from jenv (https://github.com/jenv/jenv)
plenv # perl version from plenv (https://github.com/tokuhirom/plenv)
phpenv # php version from phpenv (https://github.com/phpenv/phpenv)
haskell_stack # haskell version from stack (https://haskellstack.org/)
kubecontext # current kubernetes context (https://kubernetes.io/)
terraform # terraform workspace (https://www.terraform.io)
aws # aws profile (https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-profiles.html)
aws_eb_env # aws elastic beanstalk environment (https://aws.amazon.com/elasticbeanstalk/)
azure # azure account name (https://docs.microsoft.com/en-us/cli/azure)
gcloud # google cloud cli account and project (https://cloud.google.com/)
google_app_cred # google application credentials (https://cloud.google.com/docs/authentication/production)
context # user@hostname
nordvpn # nordvpn connection status, linux only (https://nordvpn.com/)
ranger # ranger shell (https://github.com/ranger/ranger)
nnn # nnn shell (https://github.com/jarun/nnn)
vim_shell # vim shell indicator (:sh)
midnight_commander # midnight commander shell (https://midnight-commander.org/)
nix_shell # nix shell (https://nixos.org/nixos/nix-pills/developing-with-nix-shell.html)
# vpn_ip # virtual private network indicator
# load # CPU load
# disk_usage # disk usage
# ram # free RAM
# swap # used swap
todo # todo items (https://github.com/todotxt/todo.txt-cli)
timewarrior # timewarrior tracking status (https://timewarrior.net/)
taskwarrior # taskwarrior task count (https://taskwarrior.org/)
# time # current time
# =========================[ Line #2 ]=========================
newline # \n
# ip # ip address and bandwidth usage for a specified network interface
# public_ip # public IP address
# proxy # system-wide http/https/ftp proxy
# battery # internal battery
# wifi # wifi speed
# example # example user-defined segment (see prompt_example function below)
)
# Defines character set used by powerlevel10k. It's best to let `p10k configure` set it for you.
typeset -g POWERLEVEL9K_MODE=nerdfont-complete
# When set to `moderate`, some icons will have an extra space after them. This is meant to avoid
# icon overlap when using non-monospace fonts. When set to `none`, spaces are not added.
typeset -g POWERLEVEL9K_ICON_PADDING=none
# Basic style options that define the overall look of your prompt. You probably don't want to
# change them.
typeset -g POWERLEVEL9K_BACKGROUND= # transparent background
typeset -g POWERLEVEL9K_{LEFT,RIGHT}_{LEFT,RIGHT}_WHITESPACE= # no surrounding whitespace
typeset -g POWERLEVEL9K_{LEFT,RIGHT}_SUBSEGMENT_SEPARATOR=' ' # separate segments with a space
typeset -g POWERLEVEL9K_{LEFT,RIGHT}_SEGMENT_SEPARATOR= # no end-of-line symbol
# When set to true, icons appear before content on both sides of the prompt. When set
# to false, icons go after content. If empty or not set, icons go before content in the left
# prompt and after content in the right prompt.
#
# You can also override it for a specific segment:
#
# POWERLEVEL9K_STATUS_ICON_BEFORE_CONTENT=false
#
# Or for a specific segment in specific state:
#
# POWERLEVEL9K_DIR_NOT_WRITABLE_ICON_BEFORE_CONTENT=false
typeset -g POWERLEVEL9K_ICON_BEFORE_CONTENT=true
# Add an empty line before each prompt.
typeset -g POWERLEVEL9K_PROMPT_ADD_NEWLINE=true
# Connect left prompt lines with these symbols.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_PREFIX=
typeset -g POWERLEVEL9K_MULTILINE_NEWLINE_PROMPT_PREFIX=
typeset -g POWERLEVEL9K_MULTILINE_LAST_PROMPT_PREFIX=
# Connect right prompt lines with these symbols.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_SUFFIX=
typeset -g POWERLEVEL9K_MULTILINE_NEWLINE_PROMPT_SUFFIX=
typeset -g POWERLEVEL9K_MULTILINE_LAST_PROMPT_SUFFIX=
# The left end of left prompt.
typeset -g POWERLEVEL9K_LEFT_PROMPT_FIRST_SEGMENT_START_SYMBOL=
# The right end of right prompt.
typeset -g POWERLEVEL9K_RIGHT_PROMPT_LAST_SEGMENT_END_SYMBOL=
# Ruler, a.k.a. the horizontal line before each prompt. If you set it to true, you'll
# probably want to set POWERLEVEL9K_PROMPT_ADD_NEWLINE=false above and
# POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_CHAR=' ' below.
typeset -g POWERLEVEL9K_SHOW_RULER=false
typeset -g POWERLEVEL9K_RULER_CHAR='─' # reasonable alternative: '·'
typeset -g POWERLEVEL9K_RULER_FOREGROUND=7
# Filler between left and right prompt on the first prompt line. You can set it to '·' or '─'
# to make it easier to see the alignment between left and right prompt and to separate prompt
# from command output. It serves the same purpose as ruler (see above) without increasing
# the number of prompt lines. You'll probably want to set POWERLEVEL9K_SHOW_RULER=false
# if using this. You might also like POWERLEVEL9K_PROMPT_ADD_NEWLINE=false for more compact
# prompt.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_CHAR=' '
if [[ $POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_CHAR != ' ' ]]; then
# The color of the filler.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_FOREGROUND=7
# Add a space between the end of left prompt and the filler.
typeset -g POWERLEVEL9K_LEFT_PROMPT_LAST_SEGMENT_END_SYMBOL=' '
# Add a space between the filler and the start of right prompt.
typeset -g POWERLEVEL9K_RIGHT_PROMPT_FIRST_SEGMENT_START_SYMBOL=' '
# Start filler from the edge of the screen if there are no left segments on the first line.
typeset -g POWERLEVEL9K_EMPTY_LINE_LEFT_PROMPT_FIRST_SEGMENT_END_SYMBOL='%{%}'
# End filler on the edge of the screen if there are no right segments on the first line.
typeset -g POWERLEVEL9K_EMPTY_LINE_RIGHT_PROMPT_FIRST_SEGMENT_START_SYMBOL='%{%}'
fi
#################################[ os_icon: os identifier ]##################################
# OS identifier color.
typeset -g POWERLEVEL9K_OS_ICON_FOREGROUND=
# Custom icon.
# typeset -g POWERLEVEL9K_OS_ICON_CONTENT_EXPANSION='⭐'
################################[ prompt_char: prompt symbol ]################################
# Green prompt symbol if the last command succeeded.
typeset -g POWERLEVEL9K_PROMPT_CHAR_OK_{VIINS,VICMD,VIVIS,VIOWR}_FOREGROUND=2
# Red prompt symbol if the last command failed.
typeset -g POWERLEVEL9K_PROMPT_CHAR_ERROR_{VIINS,VICMD,VIVIS,VIOWR}_FOREGROUND=1
# Default prompt symbol.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIINS_CONTENT_EXPANSION='❯'
# Prompt symbol in command vi mode.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VICMD_CONTENT_EXPANSION='❮'
# Prompt symbol in visual vi mode.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIVIS_CONTENT_EXPANSION='V'
# Prompt symbol in overwrite vi mode.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIOWR_CONTENT_EXPANSION='▶'
typeset -g POWERLEVEL9K_PROMPT_CHAR_OVERWRITE_STATE=true
# No line terminator if prompt_char is the last segment.
typeset -g POWERLEVEL9K_PROMPT_CHAR_LEFT_PROMPT_LAST_SEGMENT_END_SYMBOL=''
# No line introducer if prompt_char is the first segment.
typeset -g POWERLEVEL9K_PROMPT_CHAR_LEFT_PROMPT_FIRST_SEGMENT_START_SYMBOL=
##################################[ dir: current directory ]##################################
# Default current directory color.
typeset -g POWERLEVEL9K_DIR_FOREGROUND=4
# If directory is too long, shorten some of its segments to the shortest possible unique
# prefix. The shortened directory can be tab-completed to the original.
typeset -g POWERLEVEL9K_SHORTEN_STRATEGY=truncate_to_unique
# Replace removed segment suffixes with this symbol.
typeset -g POWERLEVEL9K_SHORTEN_DELIMITER=
# Color of the shortened directory segments.
typeset -g POWERLEVEL9K_DIR_SHORTENED_FOREGROUND=4
# Color of the anchor directory segments. Anchor segments are never shortened. The first
# segment is always an anchor.
typeset -g POWERLEVEL9K_DIR_ANCHOR_FOREGROUND=4
# Set to true to display anchor directory segments in bold.
typeset -g POWERLEVEL9K_DIR_ANCHOR_BOLD=false
# Don't shorten directories that contain any of these files. They are anchors.
local anchor_files=(
.bzr
.citc
.git
.hg
.node-version
.python-version
.go-version
.ruby-version
.lua-version
.java-version
.perl-version
.php-version
.tool-version
.shorten_folder_marker
.svn
.terraform
CVS
Cargo.toml
composer.json
go.mod
package.json
stack.yaml
)
typeset -g POWERLEVEL9K_SHORTEN_FOLDER_MARKER="(${(j:|:)anchor_files})"
# If set to "first" ("last"), remove everything before the first (last) subdirectory that contains
# files matching $POWERLEVEL9K_SHORTEN_FOLDER_MARKER. For example, when the current directory is
# /foo/bar/git_repo/nested_git_repo/baz, prompt will display git_repo/nested_git_repo/baz (first)
# or nested_git_repo/baz (last). This assumes that git_repo and nested_git_repo contain markers
# and other directories don't.
typeset -g POWERLEVEL9K_DIR_TRUNCATE_BEFORE_MARKER=false
# Don't shorten this many last directory segments. They are anchors.
typeset -g POWERLEVEL9K_SHORTEN_DIR_LENGTH=1
# Shorten directory if it's longer than this even if there is space for it. The value can
# be either absolute (e.g., '80') or a percentage of terminal width (e.g, '50%'). If empty,
# directory will be shortened only when prompt doesn't fit or when other parameters demand it
# (see POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS and POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT below).
# If set to `0`, directory will always be shortened to its minimum length.
typeset -g POWERLEVEL9K_DIR_MAX_LENGTH=80
# When `dir` segment is on the last prompt line, try to shorten it enough to leave at least this
# many columns for typing commands.
typeset -g POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS=40
# When `dir` segment is on the last prompt line, try to shorten it enough to leave at least
# COLUMNS * POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT * 0.01 columns for typing commands.
typeset -g POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT=50
# If set to true, embed a hyperlink into the directory. Useful for quickly
# opening a directory in the file manager simply by clicking the link.
# Can also be handy when the directory is shortened, as it allows you to see
# the full directory that was used in previous commands.
typeset -g POWERLEVEL9K_DIR_HYPERLINK=false
# Enable special styling for non-writable directories. See POWERLEVEL9K_LOCK_ICON and
# POWERLEVEL9K_DIR_CLASSES below.
typeset -g POWERLEVEL9K_DIR_SHOW_WRITABLE=v2
# The default icon shown next to non-writable directories when POWERLEVEL9K_DIR_SHOW_WRITABLE is
# set to v2.
# typeset -g POWERLEVEL9K_LOCK_ICON='⭐'
# POWERLEVEL9K_DIR_CLASSES allows you to specify custom icons and colors for different
# directories. It must be an array with 3 * N elements. Each triplet consists of:
#
# 1. A pattern against which the current directory ($PWD) is matched. Matching is done with
# extended_glob option enabled.
# 2. Directory class for the purpose of styling.
# 3. An empty string.
#
# Triplets are tried in order. The first triplet whose pattern matches $PWD wins.
#
# If POWERLEVEL9K_DIR_SHOW_WRITABLE is set to v2 and the current directory is not writable,
# its class gets suffix _NOT_WRITABLE.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_DIR_CLASSES=(
# '~/work(|/*)' WORK ''
# '~(|/*)' HOME ''
# '*' DEFAULT '')
#
# Whenever the current directory is ~/work or a subdirectory of ~/work, it gets styled with class
# WORK or WORK_NOT_WRITABLE.
#
# Simply assigning classes to directories don't have any visible effects. It merely gives you an
# option to define custom colors and icons for different directory classes.
#
# # Styling for WORK.
# typeset -g POWERLEVEL9K_DIR_WORK_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_DIR_WORK_FOREGROUND=4
# typeset -g POWERLEVEL9K_DIR_WORK_SHORTENED_FOREGROUND=4
# typeset -g POWERLEVEL9K_DIR_WORK_ANCHOR_FOREGROUND=4
#
# # Styling for WORK_NOT_WRITABLE.
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_FOREGROUND=4
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_SHORTENED_FOREGROUND=4
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_ANCHOR_FOREGROUND=4
#
# If a styling parameter isn't explicitly defined for some class, it falls back to the classless
# parameter. For example, if POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_FOREGROUND is not set, it falls
# back to POWERLEVEL9K_DIR_FOREGROUND.
#
# typeset -g POWERLEVEL9K_DIR_CLASSES=()
# Custom prefix.
# typeset -g POWERLEVEL9K_DIR_PREFIX='%fin '
#####################################[ vcs: git status ]######################################
# Branch icon. Set this parameter to '\uF126 ' for the popular Powerline branch icon.
typeset -g POWERLEVEL9K_VCS_BRANCH_ICON=
# Untracked files icon. It's really a question mark, your font isn't broken.
# Change the value of this parameter to show a different icon.
typeset -g POWERLEVEL9K_VCS_UNTRACKED_ICON='?'
# Formatter for Git status.
#
# Example output: master ⇣42⇡42 *42 merge ~42 +42 !42 ?42.
#
# You can edit the function to customize how Git status looks.
#
# VCS_STATUS_* parameters are set by gitstatus plugin. See reference:
# https://github.com/romkatv/gitstatus/blob/master/gitstatus.plugin.zsh.
function my_git_formatter() {
emulate -L zsh
if [[ -n $P9K_CONTENT ]]; then
# If P9K_CONTENT is not empty, use it. It's either "loading" or from vcs_info (not from
# gitstatus plugin). VCS_STATUS_* parameters are not available in this case.
typeset -g my_git_format=$P9K_CONTENT
return
fi
if (( $1 )); then
# Styling for up-to-date Git status.
local meta='%f' # default foreground
local clean='%2F' # green foreground
local modified='%3F' # yellow foreground
local untracked='%4F' # blue foreground
local conflicted='%1F' # red foreground
else
# Styling for incomplete and stale Git status.
local meta='%f' # default foreground
local clean='%f' # default foreground
local modified='%f' # default foreground
local untracked='%f' # default foreground
local conflicted='%f' # default foreground
fi
local res
local where # branch or tag
if [[ -n $VCS_STATUS_LOCAL_BRANCH ]]; then
res+="${clean}${(g::)POWERLEVEL9K_VCS_BRANCH_ICON}"
where=${(V)VCS_STATUS_LOCAL_BRANCH}
elif [[ -n $VCS_STATUS_TAG ]]; then
res+="${meta}#"
where=${(V)VCS_STATUS_TAG}
fi
# If local branch name or tag is at most 32 characters long, show it in full.
# Otherwise show the first 12 … the last 12.
# Tip: To always show local branch name in full without truncation, delete the next line.
(( $#where > 32 )) && where[13,-13]="…"
res+="${clean}${where//\%/%%}" # escape %
# Display the current Git commit if there is no branch or tag.
# Tip: To always display the current Git commit, remove `[[ -z $where ]] &&` from the next line.
[[ -z $where ]] && res+="${meta}@${clean}${VCS_STATUS_COMMIT[1,8]}"
# Show tracking branch name if it differs from local branch.
if [[ -n ${VCS_STATUS_REMOTE_BRANCH:#$VCS_STATUS_LOCAL_BRANCH} ]]; then
res+="${meta}:${clean}${(V)VCS_STATUS_REMOTE_BRANCH//\%/%%}" # escape %
fi
# ⇣42 if behind the remote.
(( VCS_STATUS_COMMITS_BEHIND )) && res+=" ${clean}⇣${VCS_STATUS_COMMITS_BEHIND}"
# ⇡42 if ahead of the remote; no leading space if also behind the remote: ⇣42⇡42.
(( VCS_STATUS_COMMITS_AHEAD && !VCS_STATUS_COMMITS_BEHIND )) && res+=" "
(( VCS_STATUS_COMMITS_AHEAD )) && res+="${clean}⇡${VCS_STATUS_COMMITS_AHEAD}"
# ⇠42 if behind the push remote.
(( VCS_STATUS_PUSH_COMMITS_BEHIND )) && res+=" ${clean}⇠${VCS_STATUS_PUSH_COMMITS_BEHIND}"
(( VCS_STATUS_PUSH_COMMITS_AHEAD && !VCS_STATUS_PUSH_COMMITS_BEHIND )) && res+=" "
# ⇢42 if ahead of the push remote; no leading space if also behind: ⇠42⇢42.
(( VCS_STATUS_PUSH_COMMITS_AHEAD )) && res+="${clean}⇢${VCS_STATUS_PUSH_COMMITS_AHEAD}"
# *42 if have stashes.
(( VCS_STATUS_STASHES )) && res+=" ${clean}*${VCS_STATUS_STASHES}"
# 'merge' if the repo is in an unusual state.
[[ -n $VCS_STATUS_ACTION ]] && res+=" ${conflicted}${VCS_STATUS_ACTION}"
# ~42 if have merge conflicts.
(( VCS_STATUS_NUM_CONFLICTED )) && res+=" ${conflicted}~${VCS_STATUS_NUM_CONFLICTED}"
# +42 if have staged changes.
(( VCS_STATUS_NUM_STAGED )) && res+=" ${modified}+${VCS_STATUS_NUM_STAGED}"
# !42 if have unstaged changes.
(( VCS_STATUS_NUM_UNSTAGED )) && res+=" ${modified}!${VCS_STATUS_NUM_UNSTAGED}"
# ?42 if have untracked files. It's really a question mark, your font isn't broken.
# See POWERLEVEL9K_VCS_UNTRACKED_ICON above if you want to use a different icon.
# Remove the next line if you don't want to see untracked files at all.
(( VCS_STATUS_NUM_UNTRACKED )) && res+=" ${untracked}${(g::)POWERLEVEL9K_VCS_UNTRACKED_ICON}${VCS_STATUS_NUM_UNTRACKED}"
# "─" if the number of unstaged files is unknown. This can happen due to
# POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY (see below) being set to a non-negative number lower
# than the number of files in the Git index, or due to bash.showDirtyState being set to false
# in the repository config. The number of staged and untracked files may also be unknown
# in this case.
(( VCS_STATUS_HAS_UNSTAGED == -1 )) && res+=" ${modified}─"
typeset -g my_git_format=$res
}
functions -M my_git_formatter 2>/dev/null
# Don't count the number of unstaged, untracked and conflicted files in Git repositories with
# more than this many files in the index. Negative value means infinity.
#
# If you are working in Git repositories with tens of millions of files and seeing performance
# sagging, try setting POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY to a number lower than the output
# of `git ls-files | wc -l`. Alternatively, add `bash.showDirtyState = false` to the repository's
# config: `git config bash.showDirtyState false`.
typeset -g POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY=-1
# Don't show Git status in prompt for repositories whose workdir matches this pattern.
# For example, if set to '~', the Git repository at $HOME/.git will be ignored.
# Multiple patterns can be combined with '|': '~(|/foo)|/bar/baz/*'.
typeset -g POWERLEVEL9K_VCS_DISABLED_WORKDIR_PATTERN='~'
# Disable the default Git status formatting.
typeset -g POWERLEVEL9K_VCS_DISABLE_GITSTATUS_FORMATTING=true
# Install our own Git status formatter.
typeset -g POWERLEVEL9K_VCS_CONTENT_EXPANSION='${$((my_git_formatter(1)))+${my_git_format}}'
typeset -g POWERLEVEL9K_VCS_LOADING_CONTENT_EXPANSION='${$((my_git_formatter(0)))+${my_git_format}}'
# Enable counters for staged, unstaged, etc.
typeset -g POWERLEVEL9K_VCS_{STAGED,UNSTAGED,UNTRACKED,CONFLICTED,COMMITS_AHEAD,COMMITS_BEHIND}_MAX_NUM=-1
# Icon color.
typeset -g POWERLEVEL9K_VCS_VISUAL_IDENTIFIER_COLOR=2
typeset -g POWERLEVEL9K_VCS_LOADING_VISUAL_IDENTIFIER_COLOR=
# Custom icon.
# typeset -g POWERLEVEL9K_VCS_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Custom prefix.
# typeset -g POWERLEVEL9K_VCS_PREFIX='%fon '
# Show status of repositories of these types. You can add svn and/or hg if you are
# using them. If you do, your prompt may become slow even when your current directory
# isn't in an svn or hg reposotiry.
typeset -g POWERLEVEL9K_VCS_BACKENDS=(git)
# These settings are used for respositories other than Git or when gitstatusd fails and
# Powerlevel10k has to fall back to using vcs_info.
typeset -g POWERLEVEL9K_VCS_CLEAN_FOREGROUND=2
typeset -g POWERLEVEL9K_VCS_UNTRACKED_FOREGROUND=2
typeset -g POWERLEVEL9K_VCS_MODIFIED_FOREGROUND=3
##########################[ status: exit code of the last command ]###########################
# Enable OK_PIPE, ERROR_PIPE and ERROR_SIGNAL status states to allow us to enable, disable and
# style them independently from the regular OK and ERROR state.
typeset -g POWERLEVEL9K_STATUS_EXTENDED_STATES=true
# Status on success. No content, just an icon. No need to show it if prompt_char is enabled as
# it will signify success by turning green.
typeset -g POWERLEVEL9K_STATUS_OK=false
typeset -g POWERLEVEL9K_STATUS_OK_FOREGROUND=2
typeset -g POWERLEVEL9K_STATUS_OK_VISUAL_IDENTIFIER_EXPANSION='✔'
# Status when some part of a pipe command fails but the overall exit status is zero. It may look
# like this: 1|0.
typeset -g POWERLEVEL9K_STATUS_OK_PIPE=true
typeset -g POWERLEVEL9K_STATUS_OK_PIPE_FOREGROUND=2
typeset -g POWERLEVEL9K_STATUS_OK_PIPE_VISUAL_IDENTIFIER_EXPANSION='✔'
# Status when it's just an error code (e.g., '1'). No need to show it if prompt_char is enabled as
# it will signify error by turning red.
typeset -g POWERLEVEL9K_STATUS_ERROR=false
typeset -g POWERLEVEL9K_STATUS_ERROR_FOREGROUND=1
typeset -g POWERLEVEL9K_STATUS_ERROR_VISUAL_IDENTIFIER_EXPANSION='✘'
# Status when the last command was terminated by a signal.
typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL=true
typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL_FOREGROUND=1
# Use terse signal names: "INT" instead of "SIGINT(2)".
typeset -g POWERLEVEL9K_STATUS_VERBOSE_SIGNAME=false
typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL_VISUAL_IDENTIFIER_EXPANSION='✘'
# Status when some part of a pipe command fails and the overall exit status is also non-zero.
# It may look like this: 1|0.
typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE=true
typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE_FOREGROUND=1
typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE_VISUAL_IDENTIFIER_EXPANSION='✘'
###################[ command_execution_time: duration of the last command ]###################
# Show duration of the last command if takes longer than this many seconds.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_THRESHOLD=3
# Show this many fractional digits. Zero means round to seconds.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_PRECISION=0
# Execution time color.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_FOREGROUND=3
# Duration format: 1d 2h 3m 4s.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_FORMAT='d h m s'
# Custom icon.
# typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Custom prefix.
# typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_PREFIX='%ftook '
#######################[ background_jobs: presence of background jobs ]#######################
# Don't show the number of background jobs.
typeset -g POWERLEVEL9K_BACKGROUND_JOBS_VERBOSE=false
# Background jobs color.
typeset -g POWERLEVEL9K_BACKGROUND_JOBS_FOREGROUND=1
# Custom icon.
# typeset -g POWERLEVEL9K_BACKGROUND_JOBS_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######################[ direnv: direnv status (https://direnv.net/) ]########################
# Direnv color.
typeset -g POWERLEVEL9K_DIRENV_FOREGROUND=3
# Custom icon.
# typeset -g POWERLEVEL9K_DIRENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
###############[ asdf: asdf version manager (https://github.com/asdf-vm/asdf) ]###############
# Default asdf color. Only used to display tools for which there is no color override (see below).
typeset -g POWERLEVEL9K_ASDF_FOREGROUND=6
# There are four parameters that can be used to hide asdf tools. Each parameter describes
# conditions under which a tool gets hidden. Parameters can hide tools but not unhide them. If at
# least one parameter decides to hide a tool, that tool gets hidden. If no parameter decides to
# hide a tool, it gets shown.
#
# Special note on the difference between POWERLEVEL9K_ASDF_SOURCES and
# POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW. Consider the effect of the following commands:
#
# asdf local python 3.8.1
# asdf global python 3.8.1
#
# After running both commands the current python version is 3.8.1 and its source is "local" as
# it takes precedence over "global". If POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW is set to false,
# it'll hide python version in this case because 3.8.1 is the same as the global version.
# POWERLEVEL9K_ASDF_SOURCES will hide python version only if the value of this parameter doesn't
# contain "local".
# Hide tool versions that don't come from one of these sources.
#
# Available sources:
#
# - shell `asdf current` says "set by ASDF_${TOOL}_VERSION environment variable"
# - local `asdf current` says "set by /some/not/home/directory/file"
# - global `asdf current` says "set by /home/username/file"
#
# Note: If this parameter is set to (shell local global), it won't hide tools.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_SOURCES.
typeset -g POWERLEVEL9K_ASDF_SOURCES=(shell local global)
# If set to false, hide tool versions that are the same as global.
#
# Note: The name of this parameter doesn't reflect its meaning at all.
# Note: If this parameter is set to true, it won't hide tools.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_PROMPT_ALWAYS_SHOW.
typeset -g POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW=false
# If set to false, hide tool versions that are equal to "system".
#
# Note: If this parameter is set to true, it won't hide tools.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_SHOW_SYSTEM.
typeset -g POWERLEVEL9K_ASDF_SHOW_SYSTEM=true
# If set to non-empty value, hide tools unless there is a file matching the specified file pattern
# in the current directory, or its parent diretory, or its grandparent directory, and so on.
#
# Note: If this parameter is set to empty value, it won't hide tools.
# Note: SHOW_ON_UPGLOB isn't specific to asdf. It works with all prompt segments.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_SHOW_ON_UPGLOB.
#
# Example: Hide nodejs version when there is no package.json and no *.js files in the current
# directory, in `..`, in `../..` and so on.
#
# typeset -g POWERLEVEL9K_ASDF_NODEJS_SHOW_ON_UPGLOB='*.js|package.json'
typeset -g POWERLEVEL9K_ASDF_SHOW_ON_UPGLOB=
# Ruby version from asdf.
typeset -g POWERLEVEL9K_ASDF_RUBY_FOREGROUND=1
# typeset -g POWERLEVEL9K_ASDF_RUBY_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_RUBY_SHOW_ON_UPGLOB='*.foo|*.bar'
# Python version from asdf.
typeset -g POWERLEVEL9K_ASDF_PYTHON_FOREGROUND=6
# typeset -g POWERLEVEL9K_ASDF_PYTHON_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_PYTHON_SHOW_ON_UPGLOB='*.foo|*.bar'
# Go version from asdf.
typeset -g POWERLEVEL9K_ASDF_GOLANG_FOREGROUND=6
# typeset -g POWERLEVEL9K_ASDF_GOLANG_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_GOLANG_SHOW_ON_UPGLOB='*.foo|*.bar'
# Node.js version from asdf.
typeset -g POWERLEVEL9K_ASDF_NODEJS_FOREGROUND=2
# typeset -g POWERLEVEL9K_ASDF_NODEJS_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_NODEJS_SHOW_ON_UPGLOB='*.foo|*.bar'
# Rust version from asdf.
typeset -g POWERLEVEL9K_ASDF_RUST_FOREGROUND=4
# typeset -g POWERLEVEL9K_ASDF_RUST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_RUST_SHOW_ON_UPGLOB='*.foo|*.bar'
# .NET Core version from asdf.
typeset -g POWERLEVEL9K_ASDF_DOTNET_CORE_FOREGROUND=5
# typeset -g POWERLEVEL9K_ASDF_DOTNET_CORE_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_DOTNET_CORE_SHOW_ON_UPGLOB='*.foo|*.bar'
# Flutter version from asdf.
typeset -g POWERLEVEL9K_ASDF_FLUTTER_FOREGROUND=4
# typeset -g POWERLEVEL9K_ASDF_FLUTTER_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_FLUTTER_SHOW_ON_UPGLOB='*.foo|*.bar'
# Lua version from asdf.
typeset -g POWERLEVEL9K_ASDF_LUA_FOREGROUND=4
# typeset -g POWERLEVEL9K_ASDF_LUA_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_LUA_SHOW_ON_UPGLOB='*.foo|*.bar'
# Java version from asdf.
typeset -g POWERLEVEL9K_ASDF_JAVA_FOREGROUND=4
# typeset -g POWERLEVEL9K_ASDF_JAVA_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_JAVA_SHOW_ON_UPGLOB='*.foo|*.bar'
# Perl version from asdf.
typeset -g POWERLEVEL9K_ASDF_PERL_FOREGROUND=6
# typeset -g POWERLEVEL9K_ASDF_PERL_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_PERL_SHOW_ON_UPGLOB='*.foo|*.bar'
# Erlang version from asdf.
typeset -g POWERLEVEL9K_ASDF_ERLANG_FOREGROUND=1
# typeset -g POWERLEVEL9K_ASDF_ERLANG_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_ERLANG_SHOW_ON_UPGLOB='*.foo|*.bar'
# Elixir version from asdf.
typeset -g POWERLEVEL9K_ASDF_ELIXIR_FOREGROUND=5
# typeset -g POWERLEVEL9K_ASDF_ELIXIR_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_ELIXIR_SHOW_ON_UPGLOB='*.foo|*.bar'
# Postgres version from asdf.
typeset -g POWERLEVEL9K_ASDF_POSTGRES_FOREGROUND=6
# typeset -g POWERLEVEL9K_ASDF_POSTGRES_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_POSTGRES_SHOW_ON_UPGLOB='*.foo|*.bar'
# PHP version from asdf.
typeset -g POWERLEVEL9K_ASDF_PHP_FOREGROUND=5
# typeset -g POWERLEVEL9K_ASDF_PHP_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_PHP_SHOW_ON_UPGLOB='*.foo|*.bar'
# Haskell version from asdf.
typeset -g POWERLEVEL9K_ASDF_HASKELL_FOREGROUND=3
# typeset -g POWERLEVEL9K_ASDF_HASKELL_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_HASKELL_SHOW_ON_UPGLOB='*.foo|*.bar'
##########[ nordvpn: nordvpn connection status, linux only (https://nordvpn.com/) ]###########
# NordVPN connection indicator color.
typeset -g POWERLEVEL9K_NORDVPN_FOREGROUND=6
# Hide NordVPN connection indicator when not connected.
typeset -g POWERLEVEL9K_NORDVPN_{DISCONNECTED,CONNECTING,DISCONNECTING}_CONTENT_EXPANSION=
typeset -g POWERLEVEL9K_NORDVPN_{DISCONNECTED,CONNECTING,DISCONNECTING}_VISUAL_IDENTIFIER_EXPANSION=
# Custom icon.
# typeset -g POWERLEVEL9K_NORDVPN_VISUAL_IDENTIFIER_EXPANSION='⭐'
#################[ ranger: ranger shell (https://github.com/ranger/ranger) ]##################
# Ranger shell color.
typeset -g POWERLEVEL9K_RANGER_FOREGROUND=3
# Custom icon.
# typeset -g POWERLEVEL9K_RANGER_VISUAL_IDENTIFIER_EXPANSION='⭐'
######################[ nnn: nnn shell (https://github.com/jarun/nnn) ]#######################
# Nnn shell color.
typeset -g POWERLEVEL9K_NNN_FOREGROUND=3
# Custom icon.
# typeset -g POWERLEVEL9K_NNN_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########################[ vim_shell: vim shell indicator (:sh) ]###########################
# Vim shell indicator color.
typeset -g POWERLEVEL9K_VIM_SHELL_FOREGROUND=3
# Custom icon.
# typeset -g POWERLEVEL9K_VIM_SHELL_VISUAL_IDENTIFIER_EXPANSION='⭐'
######[ midnight_commander: midnight commander shell (https://midnight-commander.org/) ]######
# Midnight Commander shell color.
typeset -g POWERLEVEL9K_MIDNIGHT_COMMANDER_FOREGROUND=3
# Custom icon.
# typeset -g POWERLEVEL9K_MIDNIGHT_COMMANDER_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ nix_shell: nix shell (https://nixos.org/nixos/nix-pills/developing-with-nix-shell.html) ]##
# Nix shell color.
typeset -g POWERLEVEL9K_NIX_SHELL_FOREGROUND=4
# Tip: If you want to see just the icon without "pure" and "impure", uncomment the next line.
# typeset -g POWERLEVEL9K_NIX_SHELL_CONTENT_EXPANSION=
# Custom icon.
# typeset -g POWERLEVEL9K_NIX_SHELL_VISUAL_IDENTIFIER_EXPANSION='⭐'
##################################[ disk_usage: disk usage ]##################################
# Colors for different levels of disk usage.
typeset -g POWERLEVEL9K_DISK_USAGE_NORMAL_FOREGROUND=2
typeset -g POWERLEVEL9K_DISK_USAGE_WARNING_FOREGROUND=3
typeset -g POWERLEVEL9K_DISK_USAGE_CRITICAL_FOREGROUND=1
# Thresholds for different levels of disk usage (percentage points).
typeset -g POWERLEVEL9K_DISK_USAGE_WARNING_LEVEL=90
typeset -g POWERLEVEL9K_DISK_USAGE_CRITICAL_LEVEL=95
# If set to true, hide disk usage when below $POWERLEVEL9K_DISK_USAGE_WARNING_LEVEL percent.
typeset -g POWERLEVEL9K_DISK_USAGE_ONLY_WARNING=false
# Custom icon.
# typeset -g POWERLEVEL9K_DISK_USAGE_VISUAL_IDENTIFIER_EXPANSION='⭐'
######################################[ ram: free RAM ]#######################################
# RAM color.
typeset -g POWERLEVEL9K_RAM_FOREGROUND=2
# Custom icon.
# typeset -g POWERLEVEL9K_RAM_VISUAL_IDENTIFIER_EXPANSION='⭐'
#####################################[ swap: used swap ]######################################
# Swap color.
typeset -g POWERLEVEL9K_SWAP_FOREGROUND=3
# Custom icon.
# typeset -g POWERLEVEL9K_SWAP_VISUAL_IDENTIFIER_EXPANSION='⭐'
######################################[ load: CPU load ]######################################
# Show average CPU load over this many last minutes. Valid values are 1, 5 and 15.
typeset -g POWERLEVEL9K_LOAD_WHICH=5
# Load color when load is under 50%.
typeset -g POWERLEVEL9K_LOAD_NORMAL_FOREGROUND=2
# Load color when load is between 50% and 70%.
typeset -g POWERLEVEL9K_LOAD_WARNING_FOREGROUND=3
# Load color when load is over 70%.
typeset -g POWERLEVEL9K_LOAD_CRITICAL_FOREGROUND=1
# Custom icon.
# typeset -g POWERLEVEL9K_LOAD_VISUAL_IDENTIFIER_EXPANSION='⭐'
################[ todo: todo items (https://github.com/todotxt/todo.txt-cli) ]################
# Todo color.
typeset -g POWERLEVEL9K_TODO_FOREGROUND=4
# Hide todo when the total number of tasks is zero.
typeset -g POWERLEVEL9K_TODO_HIDE_ZERO_TOTAL=true
# Hide todo when the number of tasks after filtering is zero.
typeset -g POWERLEVEL9K_TODO_HIDE_ZERO_FILTERED=false
# Todo format. The following parameters are available within the expansion.
#
# - P9K_TODO_TOTAL_TASK_COUNT The total number of tasks.
# - P9K_TODO_FILTERED_TASK_COUNT The number of tasks after filtering.
#
# These variables correspond to the last line of the output of `todo.sh -p ls`:
#
# TODO: 24 of 42 tasks shown
#
# Here 24 is P9K_TODO_FILTERED_TASK_COUNT and 42 is P9K_TODO_TOTAL_TASK_COUNT.
#
# typeset -g POWERLEVEL9K_TODO_CONTENT_EXPANSION='$P9K_TODO_FILTERED_TASK_COUNT'
# Custom icon.
# typeset -g POWERLEVEL9K_TODO_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ timewarrior: timewarrior tracking status (https://timewarrior.net/) ]############
# Timewarrior color.
typeset -g POWERLEVEL9K_TIMEWARRIOR_FOREGROUND=4
# If the tracked task is longer than 24 characters, truncate and append "…".
# Tip: To always display tasks without truncation, delete the following parameter.
# Tip: To hide task names and display just the icon when time tracking is enabled, set the
# value of the following parameter to "".
typeset -g POWERLEVEL9K_TIMEWARRIOR_CONTENT_EXPANSION='${P9K_CONTENT:0:24}${${P9K_CONTENT:24}:+…}'
# Custom icon.
# typeset -g POWERLEVEL9K_TIMEWARRIOR_VISUAL_IDENTIFIER_EXPANSION='⭐'
##############[ taskwarrior: taskwarrior task count (https://taskwarrior.org/) ]##############
# Taskwarrior color.
typeset -g POWERLEVEL9K_TASKWARRIOR_FOREGROUND=6
# Taskwarrior segment format. The following parameters are available within the expansion.
#
# - P9K_TASKWARRIOR_PENDING_COUNT The number of pending tasks: `task +PENDING count`.
# - P9K_TASKWARRIOR_OVERDUE_COUNT The number of overdue tasks: `task +OVERDUE count`.
#
# Zero values are represented as empty parameters.
#
# The default format:
#
# '${P9K_TASKWARRIOR_OVERDUE_COUNT:+"!$P9K_TASKWARRIOR_OVERDUE_COUNT/"}$P9K_TASKWARRIOR_PENDING_COUNT'
#
# typeset -g POWERLEVEL9K_TASKWARRIOR_CONTENT_EXPANSION='$P9K_TASKWARRIOR_PENDING_COUNT'
# Custom icon.
# typeset -g POWERLEVEL9K_TASKWARRIOR_VISUAL_IDENTIFIER_EXPANSION='⭐'
##################################[ context: user@hostname ]##################################
# Context color when running with privileges.
typeset -g POWERLEVEL9K_CONTEXT_ROOT_FOREGROUND=1
# Context color in SSH without privileges.
typeset -g POWERLEVEL9K_CONTEXT_{REMOTE,REMOTE_SUDO}_FOREGROUND=7
# Default context color (no privileges, no SSH).
typeset -g POWERLEVEL9K_CONTEXT_FOREGROUND=7
# Context format when running with privileges: bold user@hostname.
typeset -g POWERLEVEL9K_CONTEXT_ROOT_TEMPLATE='%B%n@%m'
# Context format when in SSH without privileges: user@hostname.
typeset -g POWERLEVEL9K_CONTEXT_{REMOTE,REMOTE_SUDO}_TEMPLATE='%n@%m'
# Default context format (no privileges, no SSH): user@hostname.
typeset -g POWERLEVEL9K_CONTEXT_TEMPLATE='%n@%m'
# Don't show context unless running with privileges or in SSH.
# Tip: Remove the next line to always show context.
typeset -g POWERLEVEL9K_CONTEXT_{DEFAULT,SUDO}_{CONTENT,VISUAL_IDENTIFIER}_EXPANSION=
# Custom icon.
# typeset -g POWERLEVEL9K_CONTEXT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Custom prefix.
# typeset -g POWERLEVEL9K_CONTEXT_PREFIX='%fwith '
###[ virtualenv: python virtual environment (https://docs.python.org/3/library/venv.html) ]###
# Python virtual environment color.
typeset -g POWERLEVEL9K_VIRTUALENV_FOREGROUND=6
# Don't show Python version next to the virtual environment name.
typeset -g POWERLEVEL9K_VIRTUALENV_SHOW_PYTHON_VERSION=false
# Don't show virtualenv if pyenv is already shown.
typeset -g POWERLEVEL9K_VIRTUALENV_SHOW_WITH_PYENV=false
# Separate environment name from Python version only with a space.
typeset -g POWERLEVEL9K_VIRTUALENV_{LEFT,RIGHT}_DELIMITER=
# Custom icon.
# typeset -g POWERLEVEL9K_VIRTUALENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
#####################[ anaconda: conda environment (https://conda.io/) ]######################
# Anaconda environment color.
typeset -g POWERLEVEL9K_ANACONDA_FOREGROUND=6
# Anaconda segment format. The following parameters are available within the expansion.
#
# - CONDA_PREFIX Absolute path to the active Anaconda/Miniconda environment.
# - CONDA_DEFAULT_ENV Name of the active Anaconda/Miniconda environment.
# - CONDA_PROMPT_MODIFIER Configurable prompt modifier (see below).
# - P9K_ANACONDA_PYTHON_VERSION Current python version (python --version).
#
# CONDA_PROMPT_MODIFIER can be configured with the following command:
#
# conda config --set env_prompt '({default_env}) '
#
# The last argument is a Python format string that can use the following variables:
#
# - prefix The same as CONDA_PREFIX.
# - default_env The same as CONDA_DEFAULT_ENV.
# - name The last segment of CONDA_PREFIX.
# - stacked_env Comma-separated list of names in the environment stack. The first element is
# always the same as default_env.
#
# Note: '({default_env}) ' is the default value of env_prompt.
#
# The default value of POWERLEVEL9K_ANACONDA_CONTENT_EXPANSION expands to $CONDA_PROMPT_MODIFIER
# without the leading '(' or the trailing ') '.
typeset -g POWERLEVEL9K_ANACONDA_CONTENT_EXPANSION='${${${CONDA_PROMPT_MODIFIER#\(}% }%\)}'
# Custom icon.
# typeset -g POWERLEVEL9K_ANACONDA_VISUAL_IDENTIFIER_EXPANSION='⭐'
################[ pyenv: python environment (https://github.com/pyenv/pyenv) ]################
# Pyenv color.
typeset -g POWERLEVEL9K_PYENV_FOREGROUND=6
# Hide python version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_PYENV_SOURCES=(shell local global)
# If set to false, hide python version if it's the same as global:
# $(pyenv version-name) == $(pyenv global).
typeset -g POWERLEVEL9K_PYENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide python version if it's equal to "system".
typeset -g POWERLEVEL9K_PYENV_SHOW_SYSTEM=true
# Pyenv segment format. The following parameters are available within the expansion.
#
# - P9K_CONTENT Current pyenv environment (pyenv version-name).
# - P9K_PYENV_PYTHON_VERSION Current python version (python --version).
#
# The default format has the following logic:
#
# 1. Display "$P9K_CONTENT $P9K_PYENV_PYTHON_VERSION" if $P9K_PYENV_PYTHON_VERSION is not
# empty and unequal to $P9K_CONTENT.
# 2. Otherwise display just "$P9K_CONTENT".
typeset -g POWERLEVEL9K_PYENV_CONTENT_EXPANSION='${P9K_CONTENT}${${P9K_PYENV_PYTHON_VERSION:#$P9K_CONTENT}:+ $P9K_PYENV_PYTHON_VERSION}'
# Custom icon.
# typeset -g POWERLEVEL9K_PYENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
################[ goenv: go environment (https://github.com/syndbg/goenv) ]################
# Goenv color.
typeset -g POWERLEVEL9K_GOENV_FOREGROUND=6
# Hide go version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_GOENV_SOURCES=(shell local global)
# If set to false, hide go version if it's the same as global:
# $(goenv version-name) == $(goenv global).
typeset -g POWERLEVEL9K_GOENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide go version if it's equal to "system".
typeset -g POWERLEVEL9K_GOENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_GOENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ nodenv: node.js version from nodenv (https://github.com/nodenv/nodenv) ]##########
# Nodenv color.
typeset -g POWERLEVEL9K_NODENV_FOREGROUND=2
# Hide node version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_NODENV_SOURCES=(shell local global)
# If set to false, hide node version if it's the same as global:
# $(nodenv version-name) == $(nodenv global).
typeset -g POWERLEVEL9K_NODENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide node version if it's equal to "system".
typeset -g POWERLEVEL9K_NODENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_NODENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##############[ nvm: node.js version from nvm (https://github.com/nvm-sh/nvm) ]###############
# Nvm color.
typeset -g POWERLEVEL9K_NVM_FOREGROUND=2
# Custom icon.
# typeset -g POWERLEVEL9K_NVM_VISUAL_IDENTIFIER_EXPANSION='⭐'
############[ nodeenv: node.js environment (https://github.com/ekalinin/nodeenv) ]############
# Nodeenv color.
typeset -g POWERLEVEL9K_NODEENV_FOREGROUND=2
# Don't show Node version next to the environment name.
typeset -g POWERLEVEL9K_NODEENV_SHOW_NODE_VERSION=false
# Separate environment name from Node version only with a space.
typeset -g POWERLEVEL9K_NODEENV_{LEFT,RIGHT}_DELIMITER=
# Custom icon.
# typeset -g POWERLEVEL9K_NODEENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##############################[ node_version: node.js version ]###############################
# Node version color.
typeset -g POWERLEVEL9K_NODE_VERSION_FOREGROUND=2
# Show node version only when in a directory tree containing package.json.
typeset -g POWERLEVEL9K_NODE_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_NODE_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######################[ go_version: go version (https://golang.org) ]########################
# Go version color.
typeset -g POWERLEVEL9K_GO_VERSION_FOREGROUND=6
# Show go version only when in a go project subdirectory.
typeset -g POWERLEVEL9K_GO_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_GO_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#################[ rust_version: rustc version (https://www.rust-lang.org) ]##################
# Rust version color.
typeset -g POWERLEVEL9K_RUST_VERSION_FOREGROUND=4
# Show rust version only when in a rust project subdirectory.
typeset -g POWERLEVEL9K_RUST_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_RUST_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
###############[ dotnet_version: .NET version (https://dotnet.microsoft.com) ]################
# .NET version color.
typeset -g POWERLEVEL9K_DOTNET_VERSION_FOREGROUND=5
# Show .NET version only when in a .NET project subdirectory.
typeset -g POWERLEVEL9K_DOTNET_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_DOTNET_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#####################[ php_version: php version (https://www.php.net/) ]######################
# PHP version color.
typeset -g POWERLEVEL9K_PHP_VERSION_FOREGROUND=5
# Show PHP version only when in a PHP project subdirectory.
typeset -g POWERLEVEL9K_PHP_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_PHP_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ laravel_version: laravel php framework version (https://laravel.com/) ]###########
# Laravel version color.
typeset -g POWERLEVEL9K_LARAVEL_VERSION_FOREGROUND=1
# Custom icon.
# typeset -g POWERLEVEL9K_LARAVEL_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
####################[ java_version: java version (https://www.java.com/) ]####################
# Java version color.
typeset -g POWERLEVEL9K_JAVA_VERSION_FOREGROUND=4
# Show java version only when in a java project subdirectory.
typeset -g POWERLEVEL9K_JAVA_VERSION_PROJECT_ONLY=true
# Show brief version.
typeset -g POWERLEVEL9K_JAVA_VERSION_FULL=false
# Custom icon.
# typeset -g POWERLEVEL9K_JAVA_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
###[ package: name@version from package.json (https://docs.npmjs.com/files/package.json) ]####
# Package color.
typeset -g POWERLEVEL9K_PACKAGE_FOREGROUND=6
# Package format. The following parameters are available within the expansion.
#
# - P9K_PACKAGE_NAME The value of `name` field in package.json.
# - P9K_PACKAGE_VERSION The value of `version` field in package.json.
#
# typeset -g POWERLEVEL9K_PACKAGE_CONTENT_EXPANSION='${P9K_PACKAGE_NAME//\%/%%}@${P9K_PACKAGE_VERSION//\%/%%}'
# Custom icon.
# typeset -g POWERLEVEL9K_PACKAGE_VISUAL_IDENTIFIER_EXPANSION='⭐'
#############[ rbenv: ruby version from rbenv (https://github.com/rbenv/rbenv) ]##############
# Rbenv color.
typeset -g POWERLEVEL9K_RBENV_FOREGROUND=1
# Hide ruby version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_RBENV_SOURCES=(shell local global)
# If set to false, hide ruby version if it's the same as global:
# $(rbenv version-name) == $(rbenv global).
typeset -g POWERLEVEL9K_RBENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide ruby version if it's equal to "system".
typeset -g POWERLEVEL9K_RBENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_RBENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######################[ rvm: ruby version from rvm (https://rvm.io) ]########################
# Rvm color.
typeset -g POWERLEVEL9K_RVM_FOREGROUND=1
# Don't show @gemset at the end.
typeset -g POWERLEVEL9K_RVM_SHOW_GEMSET=false
# Don't show ruby- at the front.
typeset -g POWERLEVEL9K_RVM_SHOW_PREFIX=false
# Custom icon.
# typeset -g POWERLEVEL9K_RVM_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ fvm: flutter version management (https://github.com/leoafarias/fvm) ]############
# Fvm color.
typeset -g POWERLEVEL9K_FVM_FOREGROUND=4
# Custom icon.
# typeset -g POWERLEVEL9K_FVM_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ luaenv: lua version from luaenv (https://github.com/cehoffman/luaenv) ]###########
# Lua color.
typeset -g POWERLEVEL9K_LUAENV_FOREGROUND=4
# Hide lua version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_LUAENV_SOURCES=(shell local global)
# If set to false, hide lua version if it's the same as global:
# $(luaenv version-name) == $(luaenv global).
typeset -g POWERLEVEL9K_LUAENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide lua version if it's equal to "system".
typeset -g POWERLEVEL9K_LUAENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_LUAENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
###############[ jenv: java version from jenv (https://github.com/jenv/jenv) ]################
# Java color.
typeset -g POWERLEVEL9K_JENV_FOREGROUND=4
# Hide java version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_JENV_SOURCES=(shell local global)
# If set to false, hide java version if it's the same as global:
# $(jenv version-name) == $(jenv global).
typeset -g POWERLEVEL9K_JENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide java version if it's equal to "system".
typeset -g POWERLEVEL9K_JENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_JENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ plenv: perl version from plenv (https://github.com/tokuhirom/plenv) ]############
# Perl color.
typeset -g POWERLEVEL9K_PLENV_FOREGROUND=6
# Hide perl version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_PLENV_SOURCES=(shell local global)
# If set to false, hide perl version if it's the same as global:
# $(plenv version-name) == $(plenv global).
typeset -g POWERLEVEL9K_PLENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide perl version if it's equal to "system".
typeset -g POWERLEVEL9K_PLENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_PLENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
############[ phpenv: php version from phpenv (https://github.com/phpenv/phpenv) ]############
# PHP color.
typeset -g POWERLEVEL9K_PHPENV_FOREGROUND=5
# Hide php version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_PHPENV_SOURCES=(shell local global)
# If set to false, hide php version if it's the same as global:
# $(phpenv version-name) == $(phpenv global).
typeset -g POWERLEVEL9K_PHPENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide php version if it's equal to "system".
typeset -g POWERLEVEL9K_PHPENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_PHPENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ haskell_stack: haskell version from stack (https://haskellstack.org/) ]###########
# Haskell color.
typeset -g POWERLEVEL9K_HASKELL_STACK_FOREGROUND=3
# Hide haskell version if it doesn't come from one of these sources.
#
# shell: version is set by STACK_YAML
# local: version is set by stack.yaml up the directory tree
# global: version is set by the implicit global project (~/.stack/global-project/stack.yaml)
typeset -g POWERLEVEL9K_HASKELL_STACK_SOURCES=(shell local)
# If set to false, hide haskell version if it's the same as in the implicit global project.
typeset -g POWERLEVEL9K_HASKELL_STACK_ALWAYS_SHOW=true
# Custom icon.
# typeset -g POWERLEVEL9K_HASKELL_STACK_VISUAL_IDENTIFIER_EXPANSION='⭐'
#############[ kubecontext: current kubernetes context (https://kubernetes.io/) ]#############
# Show kubecontext only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show kubecontext.
typeset -g POWERLEVEL9K_KUBECONTEXT_SHOW_ON_COMMAND='kubectl|helm|kubens|kubectx|oc|istioctl|kogito'
# Kubernetes context classes for the purpose of using different colors, icons and expansions with
# different contexts.
#
# POWERLEVEL9K_KUBECONTEXT_CLASSES is an array with even number of elements. The first element
# in each pair defines a pattern against which the current kubernetes context gets matched.
# More specifically, it's P9K_CONTENT prior to the application of context expansion (see below)
# that gets matched. If you unset all POWERLEVEL9K_KUBECONTEXT_*CONTENT_EXPANSION parameters,
# you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_KUBECONTEXT_CLASSES defines the context class. Patterns are tried in order. The
# first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_KUBECONTEXT_CLASSES=(
# '*prod*' PROD
# '*test*' TEST
# '*' DEFAULT)
#
# If your current kubernetes context is "deathray-testing/default", its class is TEST
# because "deathray-testing/default" doesn't match the pattern '*prod*' but does match '*test*'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_FOREGROUND=3
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <'
typeset -g POWERLEVEL9K_KUBECONTEXT_CLASSES=(
# '*prod*' PROD # These values are examples that are unlikely
# '*test*' TEST # to match your needs. Customize them as needed.
'*' DEFAULT)
typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_FOREGROUND=5
# typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Use POWERLEVEL9K_KUBECONTEXT_CONTENT_EXPANSION to specify the content displayed by kubecontext
# segment. Parameter expansions are very flexible and fast, too. See reference:
# http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion.
#
# Within the expansion the following parameters are always available:
#
# - P9K_CONTENT The content that would've been displayed if there was no content
# expansion defined.
# - P9K_KUBECONTEXT_NAME The current context's name. Corresponds to column NAME in the
# output of `kubectl config get-contexts`.
# - P9K_KUBECONTEXT_CLUSTER The current context's cluster. Corresponds to column CLUSTER in the
# output of `kubectl config get-contexts`.
# - P9K_KUBECONTEXT_NAMESPACE The current context's namespace. Corresponds to column NAMESPACE
# in the output of `kubectl config get-contexts`. If there is no
# namespace, the parameter is set to "default".
# - P9K_KUBECONTEXT_USER The current context's user. Corresponds to column AUTHINFO in the
# output of `kubectl config get-contexts`.
#
# If the context points to Google Kubernetes Engine (GKE) or Elastic Kubernetes Service (EKS),
# the following extra parameters are available:
#
# - P9K_KUBECONTEXT_CLOUD_NAME Either "gke" or "eks".
# - P9K_KUBECONTEXT_CLOUD_ACCOUNT Account/project ID.
# - P9K_KUBECONTEXT_CLOUD_ZONE Availability zone.
# - P9K_KUBECONTEXT_CLOUD_CLUSTER Cluster.
#
# P9K_KUBECONTEXT_CLOUD_* parameters are derived from P9K_KUBECONTEXT_CLUSTER. For example,
# if P9K_KUBECONTEXT_CLUSTER is "gke_my-account_us-east1-a_my-cluster-01":
#
# - P9K_KUBECONTEXT_CLOUD_NAME=gke
# - P9K_KUBECONTEXT_CLOUD_ACCOUNT=my-account
# - P9K_KUBECONTEXT_CLOUD_ZONE=us-east1-a
# - P9K_KUBECONTEXT_CLOUD_CLUSTER=my-cluster-01
#
# If P9K_KUBECONTEXT_CLUSTER is "arn:aws:eks:us-east-1:123456789012:cluster/my-cluster-01":
#
# - P9K_KUBECONTEXT_CLOUD_NAME=eks
# - P9K_KUBECONTEXT_CLOUD_ACCOUNT=123456789012
# - P9K_KUBECONTEXT_CLOUD_ZONE=us-east-1
# - P9K_KUBECONTEXT_CLOUD_CLUSTER=my-cluster-01
typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION=
# Show P9K_KUBECONTEXT_CLOUD_CLUSTER if it's not empty and fall back to P9K_KUBECONTEXT_NAME.
POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION+='${P9K_KUBECONTEXT_CLOUD_CLUSTER:-${P9K_KUBECONTEXT_NAME}}'
# Append the current context's namespace if it's not "default".
POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION+='${${:-/$P9K_KUBECONTEXT_NAMESPACE}:#/default}'
# Custom prefix.
# typeset -g POWERLEVEL9K_KUBECONTEXT_PREFIX='%fat '
################[ terraform: terraform workspace (https://www.terraform.io) ]#################
# POWERLEVEL9K_TERRAFORM_CLASSES is an array with even number of elements. The first element
# in each pair defines a pattern against which the current terraform workspace gets matched.
# More specifically, it's P9K_CONTENT prior to the application of context expansion (see below)
# that gets matched. If you unset all POWERLEVEL9K_TERRAFORM_*CONTENT_EXPANSION parameters,
# you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_TERRAFORM_CLASSES defines the workspace class. Patterns are tried in order. The
# first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_TERRAFORM_CLASSES=(
# '*prod*' PROD
# '*test*' TEST
# '*' DEFAULT)
#
# If your current terraform workspace is "project_test", its class is TEST because "project_test"
# doesn't match the pattern '*prod*' but does match '*test*'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_TERRAFORM_TEST_FOREGROUND=2
# typeset -g POWERLEVEL9K_TERRAFORM_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_TERRAFORM_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <'
typeset -g POWERLEVEL9K_TERRAFORM_CLASSES=(
# '*prod*' PROD # These values are examples that are unlikely
# '*test*' TEST # to match your needs. Customize them as needed.
'*' DEFAULT)
typeset -g POWERLEVEL9K_TERRAFORM_DEFAULT_FOREGROUND=4
# typeset -g POWERLEVEL9K_TERRAFORM_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ aws: aws profile (https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-profiles.html) ]#
# Show aws only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show aws.
typeset -g POWERLEVEL9K_AWS_SHOW_ON_COMMAND='aws|awless|terraform|pulumi'
# POWERLEVEL9K_AWS_CLASSES is an array with even number of elements. The first element
# in each pair defines a pattern against which the current AWS profile gets matched.
# More specifically, it's P9K_CONTENT prior to the application of context expansion (see below)
# that gets matched. If you unset all POWERLEVEL9K_AWS_*CONTENT_EXPANSION parameters,
# you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_AWS_CLASSES defines the profile class. Patterns are tried in order. The
# first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_AWS_CLASSES=(
# '*prod*' PROD
# '*test*' TEST
# '*' DEFAULT)
#
# If your current AWS profile is "company_test", its class is TEST
# because "company_test" doesn't match the pattern '*prod*' but does match '*test*'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_AWS_TEST_FOREGROUND=2
# typeset -g POWERLEVEL9K_AWS_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_AWS_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <'
typeset -g POWERLEVEL9K_AWS_CLASSES=(
# '*prod*' PROD # These values are examples that are unlikely
# '*test*' TEST # to match your needs. Customize them as needed.
'*' DEFAULT)
typeset -g POWERLEVEL9K_AWS_DEFAULT_FOREGROUND=3
# typeset -g POWERLEVEL9K_AWS_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ aws_eb_env: aws elastic beanstalk environment (https://aws.amazon.com/elasticbeanstalk/) ]#
# AWS Elastic Beanstalk environment color.
typeset -g POWERLEVEL9K_AWS_EB_ENV_FOREGROUND=2
# Custom icon.
# typeset -g POWERLEVEL9K_AWS_EB_ENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ azure: azure account name (https://docs.microsoft.com/en-us/cli/azure) ]##########
# Show azure only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show azure.
typeset -g POWERLEVEL9K_AZURE_SHOW_ON_COMMAND='az|terraform|pulumi'
# Azure account name color.
typeset -g POWERLEVEL9K_AZURE_FOREGROUND=4
# Custom icon.
# typeset -g POWERLEVEL9K_AZURE_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ gcloud: google cloud account and project (https://cloud.google.com/) ]###########
# Show gcloud only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show gcloud.
typeset -g POWERLEVEL9K_GCLOUD_SHOW_ON_COMMAND='gcloud|gcs'
# Google cloud color.
typeset -g POWERLEVEL9K_GCLOUD_FOREGROUND=4
# Google cloud format. Change the value of POWERLEVEL9K_GCLOUD_PARTIAL_CONTENT_EXPANSION and/or
# POWERLEVEL9K_GCLOUD_COMPLETE_CONTENT_EXPANSION if the default is too verbose or not informative
# enough. You can use the following parameters in the expansions. Each of them corresponds to the
# output of `gcloud` tool.
#
# Parameter | Source
# -------------------------|--------------------------------------------------------------------
# P9K_GCLOUD_CONFIGURATION | gcloud config configurations list --format='value(name)'
# P9K_GCLOUD_ACCOUNT | gcloud config get-value account
# P9K_GCLOUD_PROJECT_ID | gcloud config get-value project
# P9K_GCLOUD_PROJECT_NAME | gcloud projects describe $P9K_GCLOUD_PROJECT_ID --format='value(name)'
#
# Note: ${VARIABLE//\%/%%} expands to ${VARIABLE} with all occurences of '%' replaced with '%%'.
#
# Obtaining project name requires sending a request to Google servers. This can take a long time
# and even fail. When project name is unknown, P9K_GCLOUD_PROJECT_NAME is not set and gcloud
# prompt segment is in state PARTIAL. When project name gets known, P9K_GCLOUD_PROJECT_NAME gets
# set and gcloud prompt segment transitions to state COMPLETE.
#
# You can customize the format, icon and colors of gcloud segment separately for states PARTIAL
# and COMPLETE. You can also hide gcloud in state PARTIAL by setting
# POWERLEVEL9K_GCLOUD_PARTIAL_VISUAL_IDENTIFIER_EXPANSION and
# POWERLEVEL9K_GCLOUD_PARTIAL_CONTENT_EXPANSION to empty.
typeset -g POWERLEVEL9K_GCLOUD_PARTIAL_CONTENT_EXPANSION='${P9K_GCLOUD_PROJECT_ID//\%/%%}'
typeset -g POWERLEVEL9K_GCLOUD_COMPLETE_CONTENT_EXPANSION='${P9K_GCLOUD_PROJECT_NAME//\%/%%}'
# Send a request to Google (by means of `gcloud projects describe ...`) to obtain project name
# this often. Negative value disables periodic polling. In this mode project name is retrieved
# only when the current configuration, account or project id changes.
typeset -g POWERLEVEL9K_GCLOUD_REFRESH_PROJECT_NAME_SECONDS=60
# Custom icon.
# typeset -g POWERLEVEL9K_GCLOUD_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ google_app_cred: google application credentials (https://cloud.google.com/docs/authentication/production) ]#
# Show google_app_cred only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show google_app_cred.
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_SHOW_ON_COMMAND='terraform|pulumi'
# Google application credentials classes for the purpose of using different colors, icons and
# expansions with different credentials.
#
# POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES is an array with even number of elements. The first
# element in each pair defines a pattern against which the current kubernetes context gets
# matched. More specifically, it's P9K_CONTENT prior to the application of context expansion
# (see below) that gets matched. If you unset all POWERLEVEL9K_GOOGLE_APP_CRED_*CONTENT_EXPANSION
# parameters, you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES defines the context class. Patterns are tried in order.
# The first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES=(
# '*:*prod*:*' PROD
# '*:*test*:*' TEST
# '*' DEFAULT)
#
# If your current Google application credentials is "service_account deathray-testing x@y.com",
# its class is TEST because it doesn't match the pattern '* *prod* *' but does match '* *test* *'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_FOREGROUND=3
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_CONTENT_EXPANSION='$P9K_GOOGLE_APP_CRED_PROJECT_ID'
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES=(
# '*:*prod*:*' PROD # These values are examples that are unlikely
# '*:*test*:*' TEST # to match your needs. Customize them as needed.
'*' DEFAULT)
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_FOREGROUND=5
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Use POWERLEVEL9K_GOOGLE_APP_CRED_CONTENT_EXPANSION to specify the content displayed by
# google_app_cred segment. Parameter expansions are very flexible and fast, too. See reference:
# http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion.
#
# You can use the following parameters in the expansion. Each of them corresponds to one of the
# fields in the JSON file pointed to by GOOGLE_APPLICATION_CREDENTIALS.
#
# Parameter | JSON key file field
# ---------------------------------+---------------
# P9K_GOOGLE_APP_CRED_TYPE | type
# P9K_GOOGLE_APP_CRED_PROJECT_ID | project_id
# P9K_GOOGLE_APP_CRED_CLIENT_EMAIL | client_email
#
# Note: ${VARIABLE//\%/%%} expands to ${VARIABLE} with all occurences of '%' replaced by '%%'.
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_CONTENT_EXPANSION='${P9K_GOOGLE_APP_CRED_PROJECT_ID//\%/%%}'
###############################[ public_ip: public IP address ]###############################
# Public IP color.
typeset -g POWERLEVEL9K_PUBLIC_IP_FOREGROUND=6
# Custom icon.
# typeset -g POWERLEVEL9K_PUBLIC_IP_VISUAL_IDENTIFIER_EXPANSION='⭐'
########################[ vpn_ip: virtual private network indicator ]#########################
# VPN IP color.
typeset -g POWERLEVEL9K_VPN_IP_FOREGROUND=3
# When on VPN, show just an icon without the IP address.
# Tip: To display the private IP address when on VPN, remove the next line.
typeset -g POWERLEVEL9K_VPN_IP_CONTENT_EXPANSION=
# Regular expression for the VPN network interface. Run `ifconfig` or `ip -4 a show` while on VPN
# to see the name of the interface.
typeset -g POWERLEVEL9K_VPN_IP_INTERFACE='(wg|(.*tun))[0-9]*'
# If set to true, show one segment per matching network interface. If set to false, show only
# one segment corresponding to the first matching network interface.
# Tip: If you set it to true, you'll probably want to unset POWERLEVEL9K_VPN_IP_CONTENT_EXPANSION.
typeset -g POWERLEVEL9K_VPN_IP_SHOW_ALL=false
# Custom icon.
# typeset -g POWERLEVEL9K_VPN_IP_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ ip: ip address and bandwidth usage for a specified network interface ]###########
# IP color.
typeset -g POWERLEVEL9K_IP_FOREGROUND=4
# The following parameters are accessible within the expansion:
#
# Parameter | Meaning
# ----------------------+---------------
# P9K_IP_IP | IP address
# P9K_IP_INTERFACE | network interface
# P9K_IP_RX_BYTES | total number of bytes received
# P9K_IP_TX_BYTES | total number of bytes sent
# P9K_IP_RX_RATE | receive rate (since last prompt)
# P9K_IP_TX_RATE | send rate (since last prompt)
typeset -g POWERLEVEL9K_IP_CONTENT_EXPANSION='$P9K_IP_IP${P9K_IP_RX_RATE:+ %2F⇣$P9K_IP_RX_RATE}${P9K_IP_TX_RATE:+ %3F⇡$P9K_IP_TX_RATE}'
# Show information for the first network interface whose name matches this regular expression.
# Run `ifconfig` or `ip -4 a show` to see the names of all network interfaces.
typeset -g POWERLEVEL9K_IP_INTERFACE='e.*'
# Custom icon.
# typeset -g POWERLEVEL9K_IP_VISUAL_IDENTIFIER_EXPANSION='⭐'
#########################[ proxy: system-wide http/https/ftp proxy ]##########################
# Proxy color.
typeset -g POWERLEVEL9K_PROXY_FOREGROUND=2
# Custom icon.
# typeset -g POWERLEVEL9K_PROXY_VISUAL_IDENTIFIER_EXPANSION='⭐'
################################[ battery: internal battery ]#################################
# Show battery in red when it's below this level and not connected to power supply.
typeset -g POWERLEVEL9K_BATTERY_LOW_THRESHOLD=20
typeset -g POWERLEVEL9K_BATTERY_LOW_FOREGROUND=1
# Show battery in green when it's charging or fully charged.
typeset -g POWERLEVEL9K_BATTERY_{CHARGING,CHARGED}_FOREGROUND=2
# Show battery in yellow when it's discharging.
typeset -g POWERLEVEL9K_BATTERY_DISCONNECTED_FOREGROUND=3
# Battery pictograms going from low to high level of charge.
typeset -g POWERLEVEL9K_BATTERY_STAGES=('%K{0}▁' '%K{0}▂' '%K{0}▃' '%K{0}▄' '%K{0}▅' '%K{0}▆' '%K{0}▇' '%K{0}█')
# Don't show the remaining time to charge/discharge.
typeset -g POWERLEVEL9K_BATTERY_VERBOSE=false
#####################################[ wifi: wifi speed ]#####################################
# WiFi color.
typeset -g POWERLEVEL9K_WIFI_FOREGROUND=4
# Custom icon.
# typeset -g POWERLEVEL9K_WIFI_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Use different colors and icons depending on signal strength ($P9K_WIFI_BARS).
#
# # Wifi colors and icons for different signal strength levels (low to high).
# typeset -g my_wifi_fg=(4 4 4 4 4) # <-- change these values
# typeset -g my_wifi_icon=('WiFi' 'WiFi' 'WiFi' 'WiFi' 'WiFi') # <-- change these values
#
# typeset -g POWERLEVEL9K_WIFI_CONTENT_EXPANSION='%F{${my_wifi_fg[P9K_WIFI_BARS+1]}}$P9K_WIFI_LAST_TX_RATE Mbps'
# typeset -g POWERLEVEL9K_WIFI_VISUAL_IDENTIFIER_EXPANSION='%F{${my_wifi_fg[P9K_WIFI_BARS+1]}}${my_wifi_icon[P9K_WIFI_BARS+1]}'
#
# The following parameters are accessible within the expansions:
#
# Parameter | Meaning
# ----------------------+---------------
# P9K_WIFI_SSID | service set identifier, a.k.a. network name
# P9K_WIFI_LINK_AUTH | authentication protocol such as "wpa2-psk" or "none"
# P9K_WIFI_LAST_TX_RATE | wireless transmit rate in megabits per second
# P9K_WIFI_RSSI | signal strength in dBm, from -120 to 0
# P9K_WIFI_NOISE | noise in dBm, from -120 to 0
# P9K_WIFI_BARS | signal strength in bars, from 0 to 4 (derived from P9K_WIFI_RSSI and P9K_WIFI_NOISE)
#
# All parameters except P9K_WIFI_BARS are extracted from the output of the following command:
#
# /System/Library/PrivateFrameworks/Apple80211.framework/Versions/Current/Resources/airport -I
####################################[ time: current time ]####################################
# Current time color.
typeset -g POWERLEVEL9K_TIME_FOREGROUND=6
# Format for the current time: 09:51:02. See `man 3 strftime`.
typeset -g POWERLEVEL9K_TIME_FORMAT='%D{%H:%M:%S}'
# If set to true, time will update when you hit enter. This way prompts for the past
# commands will contain the start times of their commands as opposed to the default
# behavior where they contain the end times of their preceding commands.
typeset -g POWERLEVEL9K_TIME_UPDATE_ON_COMMAND=false
# Custom icon.
# typeset -g POWERLEVEL9K_TIME_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Custom prefix.
# typeset -g POWERLEVEL9K_TIME_PREFIX='%fat '
# Example of a user-defined prompt segment. Function prompt_example will be called on every
# prompt if `example` prompt segment is added to POWERLEVEL9K_LEFT_PROMPT_ELEMENTS or
# POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS. It displays an icon and green text greeting the user.
#
# Type `p10k help segment` for documentation and a more sophisticated example.
function prompt_example() {
p10k segment -f 2 -i '⭐' -t 'hello, %n'
}
# User-defined prompt segments may optionally provide an instant_prompt_* function. Its job
# is to generate the prompt segment for display in instant prompt. See
# https://github.com/romkatv/powerlevel10k/blob/master/README.md#instant-prompt.
#
# Powerlevel10k will call instant_prompt_* at the same time as the regular prompt_* function
# and will record all `p10k segment` calls it makes. When displaying instant prompt, Powerlevel10k
# will replay these calls without actually calling instant_prompt_*. It is imperative that
# instant_prompt_* always makes the same `p10k segment` calls regardless of environment. If this
# rule is not observed, the content of instant prompt will be incorrect.
#
# Usually, you should either not define instant_prompt_* or simply call prompt_* from it. If
# instant_prompt_* is not defined for a segment, the segment won't be shown in instant prompt.
function instant_prompt_example() {
# Since prompt_example always makes the same `p10k segment` calls, we can call it from
# instant_prompt_example. This will give us the same `example` prompt segment in the instant
# and regular prompts.
prompt_example
}
# User-defined prompt segments can be customized the same way as built-in segments.
# typeset -g POWERLEVEL9K_EXAMPLE_FOREGROUND=208
# typeset -g POWERLEVEL9K_EXAMPLE_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Transient prompt works similarly to the builtin transient_rprompt option. It trims down prompt
# when accepting a command line. Supported values:
#
# - off: Don't change prompt when accepting a command line.
# - always: Trim down prompt when accepting a command line.
# - same-dir: Trim down prompt when accepting a command line unless this is the first command
# typed after changing current working directory.
typeset -g POWERLEVEL9K_TRANSIENT_PROMPT=off
# Instant prompt mode.
#
# - off: Disable instant prompt. Choose this if you've tried instant prompt and found
# it incompatible with your zsh configuration files.
# - quiet: Enable instant prompt and don't print warnings when detecting console output
# during zsh initialization. Choose this if you've read and understood
# https://github.com/romkatv/powerlevel10k/blob/master/README.md#instant-prompt.
# - verbose: Enable instant prompt and print a warning when detecting console output during
# zsh initialization. Choose this if you've never tried instant prompt, haven't
# seen the warning, or if you are unsure what this all means.
typeset -g POWERLEVEL9K_INSTANT_PROMPT=verbose
# Hot reload allows you to change POWERLEVEL9K options after Powerlevel10k has been initialized.
# For example, you can type POWERLEVEL9K_BACKGROUND=red and see your prompt turn red. Hot reload
# can slow down prompt by 1-2 milliseconds, so it's better to keep it turned off unless you
# really need it.
typeset -g POWERLEVEL9K_DISABLE_HOT_RELOAD=true
# If p10k is already loaded, reload configuration.
# This works even with POWERLEVEL9K_DISABLE_HOT_RELOAD=true.
(( ! $+functions[p10k] )) || p10k reload
}
# Tell `p10k configure` which file it should overwrite.
typeset -g POWERLEVEL9K_CONFIG_FILE=${${(%):-%x}:a}
(( ${#p10k_config_opts} )) && setopt ${p10k_config_opts[@]}
'builtin' 'unset' 'p10k_config_opts'
|
let sideLength = 5;
let areaOfCube = 6 * sideLength * sideLength;
console.log(`The surface area of the cube with a side length of ${sideLength} is ${areaOfCube}`); |
#!/usr/bin/env bash
set -eu
source "${GATE_UTILS}"
declare -a LABELS
declare -a NODES
GET_KEYSTONE_TOKEN=0
USE_DECKHAND=0
DECKHAND_REVISION=''
while getopts "d:l:n:tv:" opt; do
case "${opt}" in
d)
USE_DECKHAND=1
DECKHAND_REVISION=${OPTARG}
;;
l)
LABELS+=("${OPTARG}")
;;
n)
NODES+=("${OPTARG}")
;;
t)
GET_KEYSTONE_TOKEN=1
;;
v)
VIA=${OPTARG}
;;
*)
echo "Unknown option"
exit 1
;;
esac
done
shift $((OPTIND-1))
if [ $# -gt 0 ]; then
echo "Unknown arguments specified: ${*}"
exit 1
fi
SCRIPT_DIR="${TEMP_DIR}/curled-scripts"
echo Labels: "${LABELS[@]}"
echo Nodes: "${NODES[@]}"
mkdir -p "${SCRIPT_DIR}"
for NAME in "${NODES[@]}"; do
log Building join script for node "${NAME}"
CURL_ARGS=("--fail" "--max-time" "300" "--retry" "16" "--retry-delay" "15")
if [[ $GET_KEYSTONE_TOKEN == 1 ]]; then
TOKEN="$(os_ks_get_token "${VIA}")"
if [[ -z $TOKEN ]]; then
log Failed to get keystone token, exiting.
exit 1
fi
TOKEN_HASH=$(echo -n "${TOKEN}" | md5sum)
log "Got keystone token, token md5sum: ${TOKEN_HASH}"
CURL_ARGS+=("-H" "X-Auth-Token: ${TOKEN}")
fi
promenade_health_check "${VIA}"
log "Validating documents"
ssh_cmd "${VIA}" curl -v "${CURL_ARGS[@]}" -X POST -H "Content-Type: application/json" -d "$(promenade_render_validate_body "${USE_DECKHAND}" "${DECKHAND_REVISION}")" "$(promenade_render_validate_url)"
JOIN_CURL_URL="$(promenade_render_curl_url "${NAME}" "${USE_DECKHAND}" "${DECKHAND_REVISION}" "${LABELS[@]}")"
log "Fetching join script via: ${JOIN_CURL_URL}"
ssh_cmd "${VIA}" curl "${CURL_ARGS[@]}" \
"${JOIN_CURL_URL}" > "${SCRIPT_DIR}/join-${NAME}.sh"
chmod 755 "${SCRIPT_DIR}/join-${NAME}.sh"
log "Join script received"
log Joining node "${NAME}"
rsync_cmd "${SCRIPT_DIR}/join-${NAME}.sh" "${NAME}:/root/promenade/"
ssh_cmd "${NAME}" "/root/promenade/join-${NAME}.sh" 2>&1 | tee -a "${LOG_FILE}"
done
|
<reponame>1wildman1/Hackathon<gh_stars>0
$(document).ready(function() {
$('#example-1').ratings(10).bind('ratingchanged', function(event, data) {
$('#example-rating-1').text(data.rating);
});
$('#example-2').ratings(5).bind('ratingchanged', function(event, data) {
$('#example-rating-2').text(data.rating);
});
}); |
import { Store } from '../models/store';
export default new Store();
|
# -*- coding: utf-8 -*-
from irt.graph import graph
from irt import text as itxt
from irt.graph import split as graph_split
from irt.common import helper
import gzip
import pathlib
import logging
import textwrap
import statistics
from functools import lru_cache
from dataclasses import dataclass
from itertools import combinations
from collections import defaultdict
from typing import Union
from typing import Callable
def _ents_from_triples(triples):
if not triples:
return set()
hs, ts, _ = zip(*triples)
return set(hs) | set(ts)
log = logging.getLogger(__name__)
@dataclass(eq=False) # id based hashing
class Part:
name: str
owe: set[int] # open world entities
triples: set[tuple[int]]
@property
@lru_cache
def graph(self) -> graph.Graph:
return graph.Graph(source=graph.GraphImport(triples=self.triples))
@property
@lru_cache
def entities(self):
return _ents_from_triples(self.triples)
@property
@lru_cache
def heads(self) -> set[int]:
if not self.triples:
return set()
return set(tuple(zip(*self.triples))[0])
@property
@lru_cache
def tails(self) -> set[int]:
if not self.triples:
return set()
return set(tuple(zip(*self.triples))[1])
@property
def description(self) -> str:
return (
f"owe: {len(self.owe)}\n"
f"entities: {len(self.entities)}\n"
f"heads: {len(self.heads)}\n"
f"tails: {len(self.tails)}\n"
f"triples: {len(self.triples)}\n"
)
# ---
def __str__(self) -> str:
return f"{self.name}={len(self.triples)}"
def __or__(self, other: "Part") -> "Part":
return Part(
name=f"{self.name}|{other.name}",
owe=self.owe | other.owe,
triples=self.triples | other.triples,
)
@dataclass
class Split:
"""
Container class for a ow/cw triple splits
"""
cfg: graph_split.Config
concepts: set[int]
closed_world: Part
open_world_valid: Part
open_world_test: Part
# ---
@property
def description(self) -> str:
s = f"IRT SPLIT\n{len(self.concepts)} retained concepts\n\n{self.cfg}"
# functools.partial not applicable :(
def _indent(s):
return textwrap.indent(s, " ")
s += f"\nClosed World - TRAIN:\n{_indent(self.closed_world.description)}"
s += f"\nOpen World - VALID:\n{_indent(self.open_world_valid.description)}"
s += f"\nOpen World - TEST:\n{_indent(self.open_world_test.description)}"
return s
# ---
def __str__(self) -> str:
return "IRT split: " + (
" | ".join(
f"{part}"
for part in (
self.closed_world,
self.open_world_valid,
self.open_world_test,
)
)
)
def __getitem__(self, key: str):
return {
"cw.train": self.closed_world,
"ow.valid": self.open_world_valid,
"ow.test": self.open_world_test,
}[key]
# ---
def check(self):
"""
Run some self diagnosis
"""
log.info("! running self-check for dataset split")
# no triples must be shared between splits
triplesets = (
("cw.train", self.closed_world.triples),
("ow.valid", self.open_world_valid.triples),
("ow.test", self.open_world_test.triples),
)
for (n1, s1), (n2, s2) in combinations(triplesets, 2):
assert s1.isdisjoint(s2), f"{n1} and {n2} share triples"
# no ow entities must be shared between splits
owesets = (
("cw.train", self.closed_world.owe),
("ow.valid", self.open_world_valid.owe),
("ow.test", self.open_world_test.owe),
)
for (n1, s1), (n2, s2) in combinations(owesets, 2):
assert s1.isdisjoint(s2), f"{n1} and {n2} share owe entities"
# ow entities must not be seen in earlier splits and no ow
# entities must occur in cw.valid (use .entities property
# which gets this information directly from the triple sets)
assert (
self.closed_world.owe == self.closed_world.entities
), "cw.train owe != cw.train entities"
seen = self.closed_world.entities
if self.cfg.strict:
assert self.open_world_valid.owe.isdisjoint(
seen
), "entities in ow valid leaked"
else:
log.warning("entities in ow valid leaked!")
seen |= self.open_world_valid.entities
if self.cfg.strict:
assert self.open_world_test.owe.isdisjoint(
seen
), "entities in ow test leaked"
else:
log.warning("entities in ow test leaked!")
# each triple of the open world splits must contain at least
# one open world entity
for part in (self.open_world_valid, self.open_world_test):
undesired = set(
(h, t, r)
for h, t, r in part.triples
if h not in part.owe and t not in part.owe
)
if self.cfg.strict:
# deactivate for fb15k237-owe
assert not len(undesired), f"found undesired triples: len({undesired})"
if len(undesired):
log.error(
f"there are {len(undesired)} triples containing"
f" only closed world entities in {part.name}"
)
# ---
@classmethod
def load(K, path: Union[str, pathlib.Path]):
log.info("loading split data")
path = helper.path(path, exists=True)
cfg = graph_split.Config.load(path / "config.yml")
with (path / "concepts.txt").open(mode="r") as fd:
# we are only interested in the entity ids
concepts = {int(e) for e, _ in (line.split(maxsplit=1) for line in fd)}
parts, seen = {}, set()
for name in ("closed_world", "open_world-valid", "open_world-test"):
log.info(f"initializing part: {name}")
key = name.replace("-", "_")
with (path / f"{name}.txt").open(mode="r") as fd:
triples = {tuple(map(int, line.split())) for line in fd}
entities = _ents_from_triples(triples)
part = Part(
name=name,
owe=entities - seen,
triples=triples,
)
parts[key] = part
seen |= entities
self = K(cfg=cfg, concepts=concepts, **parts)
return self
@dataclass(frozen=True)
class TextSample:
mention: str
context: str
class Text(defaultdict):
mode: itxt.Mode
def __init__(self, mode: itxt.Mode):
super().__init__(set)
self.mode = mode
def __str__(self) -> str:
fmt = "irt text: ~{mean_contexts:.2f} text contexts per entity"
return fmt.format(**self.stats)
@property
def stats(self) -> dict[str, float]:
contexts, mentions = zip(
*[
(len(samples), len({sample.mention for sample in samples}))
for samples in self.values()
]
)
return dict(
mean_contexts=statistics.mean(contexts),
median_contexts=statistics.median(contexts),
mean_mentions=statistics.mean(mentions),
median_mentions=statistics.median(mentions),
)
# fmt: off
@property
def description(self):
s = f"IRT Text ({self.mode})\n"
stats = (
"mean contexts: {mean_contexts:.2f}\n"
"median contexts: {median_contexts:.2f}\n"
"mean mentions: {mean_mentions:.2f}\n"
"median mentions: {median_mentions:.2f}\n"
).format(**self.stats)
s += textwrap.indent(stats, " ")
return s
# fmt: on
# ---
@classmethod
def load(K, path: Union[str, pathlib.Path], mode: itxt.Mode):
log.info(f"loading text data ({mode.value=})")
self = K(mode=mode)
path = helper.path(path, exists=True)
fpath = path / itxt.Mode.filename(mode)
with gzip.open(str(fpath), mode="rb") as fd:
header = next(fd).decode().strip()
assert header.startswith("#")
splits = (
# strip each value of the split lines
map(str.strip, line.decode().split(itxt.SEP, maxsplit=2))
for line in fd
if line.strip()
)
for e, mention, context in splits:
sample = TextSample(mention=mention, context=context)
self[int(e)].add(sample)
return self
class Dataset:
"""
IRT Dataset
Load an IRT dataset from a given folder. This container class
maintains the full graph (based on networkx), the ow/cw
triple-splits, and the text associated with entities.
"""
path: pathlib.Path
# ---
@property
def name(self) -> str:
return self.graph.name
@property
def config(self) -> graph_split.Config:
return self.split.cfg
@property
def id2ent(self) -> dict[int, str]:
return self.graph.source.ents
@property
def id2rel(self) -> dict[int, str]:
return self.graph.source.rels
# ---
def _lazy(self, attr: str, fn: Callable, *args, **kwargs):
if not hasattr(self, attr):
setattr(self, attr, fn(*args, **kwargs))
return getattr(self, attr)
@property
def graph(self) -> graph.Graph:
return self._lazy(
"_graph",
graph.Graph.load,
path=self.path / "graph",
)
@property
def split(self) -> Split:
return self._lazy(
"_split",
Split.load,
path=self.path / "split",
)
@property
def text(self) -> Text: # i.e. Dict[int, set[TextSample]]
return self._lazy(
"_text",
Text.load,
path=self.path / "text",
mode=self._text_mode,
)
# ---
def __str__(self):
return f"IRT dataset:\n{self.graph}\n{self.split}\n{self.text}"
# fmt: off
@property
@lru_cache
def description(self) -> str:
return (
f"IRT DATASET\n\n"
f"{self.graph.description}\n"
f"{self.split.description}\n"
f"{self.text.description}"
)
# fmt: on
# ---
def __init__(
self,
path: Union[str, pathlib.Path],
mode: itxt.Mode = itxt.Mode.CLEAN,
check: bool = False,
):
self._check = check
self._text_mode = mode
self.path = helper.path(path, exists=True)
|
<filename>tags.go
package bytecodec
import (
"fmt"
"reflect"
"regexp"
"strconv"
"strings"
"github.com/shimmeringbee/bytecodec/bitbuffer"
)
type StringTermination uint8
const (
Prefix StringTermination = 0
Null StringTermination = 1
TagEndian = "bcendian"
TagSlicePrefix = "bcsliceprefix"
TagStringType = "bcstringtype"
TagIncludeIf = "bcincludeif"
TagFieldWidth = "bcfieldwidth"
BigEndianKeyword = "big"
NullTerminationKeyword = "null"
)
func tagEndianness(tag reflect.StructTag) bitbuffer.Endian {
if tag.Get(TagEndian) == BigEndianKeyword {
return bitbuffer.BigEndian
}
return bitbuffer.LittleEndian
}
type SlicePrefixTag struct {
Size uint8
Endian bitbuffer.Endian
}
func (l SlicePrefixTag) HasPrefix() bool {
return l.Size > 0
}
func tagSlicePrefix(tag reflect.StructTag) (l SlicePrefixTag, err error) {
l.Endian = bitbuffer.LittleEndian
rawTag, tagPresent := tag.Lookup(TagSlicePrefix)
if !tagPresent {
return
}
splitTag := strings.Split(rawTag, ",")
if len(splitTag) < 1 {
return
}
bitCount, err := strconv.Atoi(splitTag[0])
if err != nil {
return
}
l.Size = uint8(bitCount)
if len(splitTag) < 2 {
return
}
if splitTag[1] == BigEndianKeyword {
l.Endian = bitbuffer.BigEndian
}
return
}
type StringTypeTag struct {
Termination StringTermination
Size uint8
Endian bitbuffer.Endian
}
func tagStringType(tag reflect.StructTag) (s StringTypeTag, err error) {
s.Termination = Prefix
s.Size = 8
s.Endian = bitbuffer.LittleEndian
rawTag, tagPresent := tag.Lookup(TagStringType)
if !tagPresent {
return
}
splitTag := strings.Split(rawTag, ",")
if splitTag[0] == NullTerminationKeyword {
s.Termination = Null
s.Size = 0
}
if len(splitTag) <= 1 {
return
}
count, err := strconv.Atoi(splitTag[1])
if err != nil {
return
}
s.Size = uint8(count)
if len(splitTag) <= 2 {
return
}
if splitTag[2] == BigEndianKeyword {
s.Endian = bitbuffer.BigEndian
}
return
}
type IncludeIfOperation uint8
const (
Equal IncludeIfOperation = 0x00
NotEqual IncludeIfOperation = 0x01
)
type IncludeIfTag struct {
Relative bool
FieldPath []string
Operation IncludeIfOperation
Value string
}
var IncludeIfRegex = regexp.MustCompile(`^([a-zA-Z0-9.]+)(!=|==)?(.*)$`)
func tagIncludeIf(tag reflect.StructTag) (i IncludeIfTag, err error) {
rawTag, tagPresent := tag.Lookup(TagIncludeIf)
if !tagPresent {
return IncludeIfTag{}, nil
}
matches := IncludeIfRegex.FindAllSubmatch([]byte(rawTag), -1)
path := string(matches[0][1])
operator := string(matches[0][2])
i.Value = string(matches[0][3])
i.Relative = path[0] != '.'
pathParts := strings.Split(path, ".")
partStart := 1
if i.Relative {
partStart = 0
}
i.FieldPath = pathParts[partStart:]
switch operator {
case "==", "":
i.Operation = Equal
case "!=":
i.Operation = NotEqual
default:
return IncludeIfTag{}, fmt.Errorf("'%s' is not a valid includeIf operator", operator)
}
return
}
func (i IncludeIfTag) HasIncludeIf() bool {
return len(i.FieldPath) > 0
}
type FieldWidthTag struct {
Default bool
BitWidth int
}
func (t FieldWidthTag) Width(defaultWidth int) int {
if t.Default {
return defaultWidth
}
return t.BitWidth
}
func tagFieldWidth(tag reflect.StructTag) (t FieldWidthTag, err error) {
rawTag, tagPresent := tag.Lookup(TagFieldWidth)
if !tagPresent {
t.Default = true
} else {
t.Default = false
width, err := strconv.ParseInt(rawTag, 10, 8)
if err != nil {
return t, err
}
t.BitWidth = int(width)
}
return
}
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
local source="${BUILT_PRODUCTS_DIR}/Pods-MSServerSentEvents_Tests/$1"
local destination="${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source=$(readlink "${source}")
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers/" --filter "- PrivateHeaders/" --filter "- Modules/" ${source} ${destination}"
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers/" --filter "- PrivateHeaders/" --filter "- Modules/" "${source}" "${destination}"
# Resign the code if required by the build settings to avoid unstable apps
if [ "${CODE_SIGNING_REQUIRED}" == "YES" ]; then
code_sign "${destination}/$1"
fi
# Embed linked Swift runtime libraries
local basename
basename=$(echo $1 | sed -E s/\\..+// && exit ${PIPESTATUS[0]})
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}/$1/${basename}" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
if [ "${CODE_SIGNING_REQUIRED}" == "YES" ]; then
code_sign "${destination}/${lib}"
fi
done
}
# Signs a framework with the provided identity
code_sign() {
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements $1"
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements "$1"
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework 'Expecta.framework'
install_framework 'GCDWebServer.framework'
install_framework 'MSServerSentEvents.framework'
install_framework 'Specta.framework'
install_framework 'libextobjc.framework'
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework 'Expecta.framework'
install_framework 'GCDWebServer.framework'
install_framework 'MSServerSentEvents.framework'
install_framework 'Specta.framework'
install_framework 'libextobjc.framework'
fi
|
# ltmain.sh - Provide generalized library-building support services.
# NOTE: Changing this file will not affect anything until you rerun configure.
#
# Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005
# Free Software Foundation, Inc.
# Originally by Gordon Matzigkeit <gord@gnu.ai.mit.edu>, 1996
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# As a special exception to the GNU General Public License, if you
# distribute this file as part of a program that contains a
# configuration script generated by Autoconf, you may include it under
# the same distribution terms that you use for the rest of that program.
basename="s,^.*/,,g"
# Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh
# is ksh but when the shell is invoked as "sh" and the current value of
# the _XPG environment variable is not equal to 1 (one), the special
# positional parameter $0, within a function call, is the name of the
# function.
progpath="$0"
# define SED for historic ltconfig's generated by Libtool 1.3
test -z "$SED" && SED=sed
# The name of this program:
progname=`echo "$progpath" | $SED $basename`
modename="$progname"
# Global variables:
EXIT_SUCCESS=0
EXIT_FAILURE=1
PROGRAM=ltmain.sh
PACKAGE=libtool
VERSION=1.5.20
TIMESTAMP=" (1.1220.2.287 2005/08/31 18:54:15)"
# See if we are running on zsh, and set the options which allow our
# commands through without removal of \ escapes.
if test -n "${ZSH_VERSION+set}" ; then
setopt NO_GLOB_SUBST
fi
# Check that we have a working $echo.
if test "X$1" = X--no-reexec; then
# Discard the --no-reexec flag, and continue.
shift
elif test "X$1" = X--fallback-echo; then
# Avoid inline document here, it may be left over
:
elif test "X`($echo '\t') 2>/dev/null`" = 'X\t'; then
# Yippee, $echo works!
:
else
# Restart under the correct shell, and then maybe $echo will work.
exec $SHELL "$progpath" --no-reexec ${1+"$@"}
fi
if test "X$1" = X--fallback-echo; then
# used as fallback echo
shift
cat <<EOF
$*
EOF
exit $EXIT_SUCCESS
fi
default_mode=
help="Try \`$progname --help' for more information."
magic="%%%MAGIC variable%%%"
mkdir="mkdir"
mv="mv -f"
rm="rm -f"
# Sed substitution that helps us do robust quoting. It backslashifies
# metacharacters that are still active within double-quoted strings.
Xsed="${SED}"' -e 1s/^X//'
sed_quote_subst='s/\([\\`\\"$\\\\]\)/\\\1/g'
# test EBCDIC or ASCII
case `echo X|tr X '\101'` in
A) # ASCII based system
# \n is not interpreted correctly by Solaris 8 /usr/ucb/tr
SP2NL='tr \040 \012'
NL2SP='tr \015\012 \040\040'
;;
*) # EBCDIC based system
SP2NL='tr \100 \n'
NL2SP='tr \r\n \100\100'
;;
esac
# NLS nuisances.
# Only set LANG and LC_ALL to C if already set.
# These must not be set unconditionally because not all systems understand
# e.g. LANG=C (notably SCO).
# We save the old values to restore during execute mode.
if test "${LC_ALL+set}" = set; then
save_LC_ALL="$LC_ALL"; LC_ALL=C; export LC_ALL
fi
if test "${LANG+set}" = set; then
save_LANG="$LANG"; LANG=C; export LANG
fi
# Make sure IFS has a sensible default
lt_nl='
'
IFS=" $lt_nl"
if test "$build_libtool_libs" != yes && test "$build_old_libs" != yes; then
$echo "$modename: not configured to build any kind of library" 1>&2
$echo "Fatal configuration error. See the $PACKAGE docs for more information." 1>&2
exit $EXIT_FAILURE
fi
# Global variables.
mode=$default_mode
nonopt=
prev=
prevopt=
run=
show="$echo"
show_help=
execute_dlfiles=
lo2o="s/\\.lo\$/.${objext}/"
o2lo="s/\\.${objext}\$/.lo/"
#####################################
# Shell function definitions:
# This seems to be the best place for them
# func_win32_libid arg
# return the library type of file 'arg'
#
# Need a lot of goo to handle *both* DLLs and import libs
# Has to be a shell function in order to 'eat' the argument
# that is supplied when $file_magic_command is called.
func_win32_libid ()
{
win32_libid_type="unknown"
win32_fileres=`file -L $1 2>/dev/null`
case $win32_fileres in
*ar\ archive\ import\ library*) # definitely import
win32_libid_type="x86 archive import"
;;
*ar\ archive*) # could be an import, or static
if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null | \
$EGREP -e 'file format pe-i386(.*architecture: i386)?' >/dev/null ; then
win32_nmres=`eval $NM -f posix -A $1 | \
sed -n -e '1,100{/ I /{x;/import/!{s/^/import/;h;p;};x;};}'`
if test "X$win32_nmres" = "Ximport" ; then
win32_libid_type="x86 archive import"
else
win32_libid_type="x86 archive static"
fi
fi
;;
*DLL*)
win32_libid_type="x86 DLL"
;;
*executable*) # but shell scripts are "executable" too...
case $win32_fileres in
*MS\ Windows\ PE\ Intel*)
win32_libid_type="x86 DLL"
;;
esac
;;
esac
$echo $win32_libid_type
}
# func_infer_tag arg
# Infer tagged configuration to use if any are available and
# if one wasn't chosen via the "--tag" command line option.
# Only attempt this if the compiler in the base compile
# command doesn't match the default compiler.
# arg is usually of the form 'gcc ...'
func_infer_tag ()
{
if test -n "$available_tags" && test -z "$tagname"; then
CC_quoted=
for arg in $CC; do
case $arg in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
arg="\"$arg\""
;;
esac
CC_quoted="$CC_quoted $arg"
done
case $@ in
# Blanks in the command may have been stripped by the calling shell,
# but not from the CC environment variable when configure was run.
" $CC "* | "$CC "* | " `$echo $CC` "* | "`$echo $CC` "* | " $CC_quoted"* | "$CC_quoted "* | " `$echo $CC_quoted` "* | "`$echo $CC_quoted` "*) ;;
# Blanks at the start of $base_compile will cause this to fail
# if we don't check for them as well.
*)
for z in $available_tags; do
if grep "^# ### BEGIN LIBTOOL TAG CONFIG: $z$" < "$progpath" > /dev/null; then
# Evaluate the configuration.
eval "`${SED} -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`"
CC_quoted=
for arg in $CC; do
# Double-quote args containing other shell metacharacters.
case $arg in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
arg="\"$arg\""
;;
esac
CC_quoted="$CC_quoted $arg"
done
case "$@ " in
" $CC "* | "$CC "* | " `$echo $CC` "* | "`$echo $CC` "* | " $CC_quoted"* | "$CC_quoted "* | " `$echo $CC_quoted` "* | "`$echo $CC_quoted` "*)
# The compiler in the base compile command matches
# the one in the tagged configuration.
# Assume this is the tagged configuration we want.
tagname=$z
break
;;
esac
fi
done
# If $tagname still isn't set, then no tagged configuration
# was found and let the user know that the "--tag" command
# line option must be used.
if test -z "$tagname"; then
$echo "$modename: unable to infer tagged configuration"
$echo "$modename: specify a tag with \`--tag'" 1>&2
exit $EXIT_FAILURE
# else
# $echo "$modename: using $tagname tagged configuration"
fi
;;
esac
fi
}
# func_extract_an_archive dir oldlib
func_extract_an_archive ()
{
f_ex_an_ar_dir="$1"; shift
f_ex_an_ar_oldlib="$1"
$show "(cd $f_ex_an_ar_dir && $AR x $f_ex_an_ar_oldlib)"
$run eval "(cd \$f_ex_an_ar_dir && $AR x \$f_ex_an_ar_oldlib)" || exit $?
if ($AR t "$f_ex_an_ar_oldlib" | sort | sort -uc >/dev/null 2>&1); then
:
else
$echo "$modename: ERROR: object name conflicts: $f_ex_an_ar_dir/$f_ex_an_ar_oldlib" 1>&2
exit $EXIT_FAILURE
fi
}
# func_extract_archives gentop oldlib ...
func_extract_archives ()
{
my_gentop="$1"; shift
my_oldlibs=${1+"$@"}
my_oldobjs=""
my_xlib=""
my_xabs=""
my_xdir=""
my_status=""
$show "${rm}r $my_gentop"
$run ${rm}r "$my_gentop"
$show "$mkdir $my_gentop"
$run $mkdir "$my_gentop"
my_status=$?
if test "$my_status" -ne 0 && test ! -d "$my_gentop"; then
exit $my_status
fi
for my_xlib in $my_oldlibs; do
# Extract the objects.
case $my_xlib in
[\\/]* | [A-Za-z]:[\\/]*) my_xabs="$my_xlib" ;;
*) my_xabs=`pwd`"/$my_xlib" ;;
esac
my_xlib=`$echo "X$my_xlib" | $Xsed -e 's%^.*/%%'`
my_xdir="$my_gentop/$my_xlib"
$show "${rm}r $my_xdir"
$run ${rm}r "$my_xdir"
$show "$mkdir $my_xdir"
$run $mkdir "$my_xdir"
status=$?
if test "$status" -ne 0 && test ! -d "$my_xdir"; then
exit $status
fi
case $host in
*-darwin*)
$show "Extracting $my_xabs"
# Do not bother doing anything if just a dry run
if test -z "$run"; then
darwin_orig_dir=`pwd`
cd $my_xdir || exit $?
darwin_archive=$my_xabs
darwin_curdir=`pwd`
darwin_base_archive=`$echo "X$darwin_archive" | $Xsed -e 's%^.*/%%'`
darwin_arches=`lipo -info "$darwin_archive" 2>/dev/null | $EGREP Architectures 2>/dev/null`
if test -n "$darwin_arches"; then
darwin_arches=`echo "$darwin_arches" | $SED -e 's/.*are://'`
darwin_arch=
$show "$darwin_base_archive has multiple architectures $darwin_arches"
for darwin_arch in $darwin_arches ; do
mkdir -p "unfat-$$/${darwin_base_archive}-${darwin_arch}"
lipo -thin $darwin_arch -output "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}" "${darwin_archive}"
cd "unfat-$$/${darwin_base_archive}-${darwin_arch}"
func_extract_an_archive "`pwd`" "${darwin_base_archive}"
cd "$darwin_curdir"
$rm "unfat-$$/${darwin_base_archive}-${darwin_arch}/${darwin_base_archive}"
done # $darwin_arches
## Okay now we have a bunch of thin objects, gotta fatten them up :)
darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print| xargs basename | sort -u | $NL2SP`
darwin_file=
darwin_files=
for darwin_file in $darwin_filelist; do
darwin_files=`find unfat-$$ -name $darwin_file -print | $NL2SP`
lipo -create -output "$darwin_file" $darwin_files
done # $darwin_filelist
${rm}r unfat-$$
cd "$darwin_orig_dir"
else
cd "$darwin_orig_dir"
func_extract_an_archive "$my_xdir" "$my_xabs"
fi # $darwin_arches
fi # $run
;;
*)
func_extract_an_archive "$my_xdir" "$my_xabs"
;;
esac
my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | $NL2SP`
done
func_extract_archives_result="$my_oldobjs"
}
# End of Shell function definitions
#####################################
# Darwin sucks
#eval std_shrext=\"$shrext_cmds\"
# And fixing for Darwin sucks for everybody else
if test -z "$shrext_cmds" && test -n "$shrext"; then
eval shrext_cmds=\"$shrext\"
fi
eval std_shrext=\"$shrext_cmds\"
# This value is evaluated to 32768, so place it here as a compatilibity hack
# because older libtool.m4 didn't define this variable
test -z "$max_cmd_len" && max_cmd_len=32768
# Parse our command line options once, thoroughly.
while test "$#" -gt 0
do
arg="$1"
shift
case $arg in
-*=*) optarg=`$echo "X$arg" | $Xsed -e 's/[-_a-zA-Z0-9]*=//'` ;;
*) optarg= ;;
esac
# If the previous option needs an argument, assign it.
if test -n "$prev"; then
case $prev in
execute_dlfiles)
execute_dlfiles="$execute_dlfiles $arg"
;;
tag)
tagname="$arg"
preserve_args="${preserve_args}=$arg"
# Check whether tagname contains only valid characters
case $tagname in
*[!-_A-Za-z0-9,/]*)
$echo "$progname: invalid tag name: $tagname" 1>&2
exit $EXIT_FAILURE
;;
esac
case $tagname in
CC)
# Don't test for the "default" C tag, as we know, it's there, but
# not specially marked.
;;
*)
if grep "^# ### BEGIN LIBTOOL TAG CONFIG: $tagname$" < "$progpath" > /dev/null; then
taglist="$taglist $tagname"
# Evaluate the configuration.
eval "`${SED} -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$tagname'$/,/^# ### END LIBTOOL TAG CONFIG: '$tagname'$/p' < $progpath`"
else
$echo "$progname: ignoring unknown tag $tagname" 1>&2
fi
;;
esac
;;
*)
eval "$prev=\$arg"
;;
esac
prev=
prevopt=
continue
fi
# Have we seen a non-optional argument yet?
case $arg in
--help)
show_help=yes
;;
--version)
$echo "$PROGRAM (GNU $PACKAGE) $VERSION$TIMESTAMP"
$echo
$echo "Copyright (C) 2005 Free Software Foundation, Inc."
$echo "This is free software; see the source for copying conditions. There is NO"
$echo "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
exit $?
;;
--config)
${SED} -e '1,/^# ### BEGIN LIBTOOL CONFIG/d' -e '/^# ### END LIBTOOL CONFIG/,$d' $progpath
# Now print the configurations for the tags.
for tagname in $taglist; do
${SED} -n -e "/^# ### BEGIN LIBTOOL TAG CONFIG: $tagname$/,/^# ### END LIBTOOL TAG CONFIG: $tagname$/p" < "$progpath"
done
exit $?
;;
--debug)
$echo "$progname: enabling shell trace mode"
set -x
preserve_args="$preserve_args $arg"
;;
--dry-run | -n)
run=:
;;
--features)
$echo "host: $host"
if test "$build_libtool_libs" = yes; then
$echo "enable shared libraries"
else
$echo "disable shared libraries"
fi
if test "$build_old_libs" = yes; then
$echo "enable static libraries"
else
$echo "disable static libraries"
fi
exit $?
;;
--finish) mode="finish" ;;
--mode) prevopt="--mode" prev=mode ;;
--mode=*) mode="$optarg" ;;
--preserve-dup-deps) duplicate_deps="yes" ;;
--quiet | --silent)
show=:
preserve_args="$preserve_args $arg"
;;
--tag) prevopt="--tag" prev=tag ;;
--tag=*)
set tag "$optarg" ${1+"$@"}
shift
prev=tag
preserve_args="$preserve_args --tag"
;;
-dlopen)
prevopt="-dlopen"
prev=execute_dlfiles
;;
-*)
$echo "$modename: unrecognized option \`$arg'" 1>&2
$echo "$help" 1>&2
exit $EXIT_FAILURE
;;
*)
nonopt="$arg"
break
;;
esac
done
if test -n "$prevopt"; then
$echo "$modename: option \`$prevopt' requires an argument" 1>&2
$echo "$help" 1>&2
exit $EXIT_FAILURE
fi
# If this variable is set in any of the actions, the command in it
# will be execed at the end. This prevents here-documents from being
# left over by shells.
exec_cmd=
if test -z "$show_help"; then
# Infer the operation mode.
if test -z "$mode"; then
$echo "*** Warning: inferring the mode of operation is deprecated." 1>&2
$echo "*** Future versions of Libtool will require --mode=MODE be specified." 1>&2
case $nonopt in
*cc | cc* | *++ | gcc* | *-gcc* | g++* | xlc*)
mode=link
for arg
do
case $arg in
-c)
mode=compile
break
;;
esac
done
;;
*db | *dbx | *strace | *truss)
mode=execute
;;
*install*|cp|mv)
mode=install
;;
*rm)
mode=uninstall
;;
*)
# If we have no mode, but dlfiles were specified, then do execute mode.
test -n "$execute_dlfiles" && mode=execute
# Just use the default operation mode.
if test -z "$mode"; then
if test -n "$nonopt"; then
$echo "$modename: warning: cannot infer operation mode from \`$nonopt'" 1>&2
else
$echo "$modename: warning: cannot infer operation mode without MODE-ARGS" 1>&2
fi
fi
;;
esac
fi
# Only execute mode is allowed to have -dlopen flags.
if test -n "$execute_dlfiles" && test "$mode" != execute; then
$echo "$modename: unrecognized option \`-dlopen'" 1>&2
$echo "$help" 1>&2
exit $EXIT_FAILURE
fi
# Change the help message to a mode-specific one.
generic_help="$help"
help="Try \`$modename --help --mode=$mode' for more information."
# These modes are in order of execution frequency so that they run quickly.
case $mode in
# libtool compile mode
compile)
modename="$modename: compile"
# Get the compilation command and the source file.
base_compile=
srcfile="$nonopt" # always keep a non-empty value in "srcfile"
suppress_opt=yes
suppress_output=
arg_mode=normal
libobj=
later=
for arg
do
case $arg_mode in
arg )
# do not "continue". Instead, add this to base_compile
lastarg="$arg"
arg_mode=normal
;;
target )
libobj="$arg"
arg_mode=normal
continue
;;
normal )
# Accept any command-line options.
case $arg in
-o)
if test -n "$libobj" ; then
$echo "$modename: you cannot specify \`-o' more than once" 1>&2
exit $EXIT_FAILURE
fi
arg_mode=target
continue
;;
-static | -prefer-pic | -prefer-non-pic)
later="$later $arg"
continue
;;
-no-suppress)
suppress_opt=no
continue
;;
-Xcompiler)
arg_mode=arg # the next one goes into the "base_compile" arg list
continue # The current "srcfile" will either be retained or
;; # replaced later. I would guess that would be a bug.
-Wc,*)
args=`$echo "X$arg" | $Xsed -e "s/^-Wc,//"`
lastarg=
save_ifs="$IFS"; IFS=','
for arg in $args; do
IFS="$save_ifs"
# Double-quote args containing other shell metacharacters.
# Many Bourne shells cannot handle close brackets correctly
# in scan sets, so we specify it separately.
case $arg in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
arg="\"$arg\""
;;
esac
lastarg="$lastarg $arg"
done
IFS="$save_ifs"
lastarg=`$echo "X$lastarg" | $Xsed -e "s/^ //"`
# Add the arguments to base_compile.
base_compile="$base_compile $lastarg"
continue
;;
* )
# Accept the current argument as the source file.
# The previous "srcfile" becomes the current argument.
#
lastarg="$srcfile"
srcfile="$arg"
;;
esac # case $arg
;;
esac # case $arg_mode
# Aesthetically quote the previous argument.
lastarg=`$echo "X$lastarg" | $Xsed -e "$sed_quote_subst"`
case $lastarg in
# Double-quote args containing other shell metacharacters.
# Many Bourne shells cannot handle close brackets correctly
# in scan sets, and some SunOS ksh mistreat backslash-escaping
# in scan sets (worked around with variable expansion),
# and furthermore cannot handle '|' '&' '(' ')' in scan sets
# at all, so we specify them separately.
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
lastarg="\"$lastarg\""
;;
esac
base_compile="$base_compile $lastarg"
done # for arg
case $arg_mode in
arg)
$echo "$modename: you must specify an argument for -Xcompile"
exit $EXIT_FAILURE
;;
target)
$echo "$modename: you must specify a target with \`-o'" 1>&2
exit $EXIT_FAILURE
;;
*)
# Get the name of the library object.
[ -z "$libobj" ] && libobj=`$echo "X$srcfile" | $Xsed -e 's%^.*/%%'`
;;
esac
# Recognize several different file suffixes.
# If the user specifies -o file.o, it is replaced with file.lo
xform='[cCFSifmso]'
case $libobj in
*.ada) xform=ada ;;
*.adb) xform=adb ;;
*.ads) xform=ads ;;
*.asm) xform=asm ;;
*.c++) xform=c++ ;;
*.cc) xform=cc ;;
*.ii) xform=ii ;;
*.class) xform=class ;;
*.cpp) xform=cpp ;;
*.cxx) xform=cxx ;;
*.f90) xform=f90 ;;
*.for) xform=for ;;
*.java) xform=java ;;
esac
libobj=`$echo "X$libobj" | $Xsed -e "s/\.$xform$/.lo/"`
case $libobj in
*.lo) obj=`$echo "X$libobj" | $Xsed -e "$lo2o"` ;;
*)
$echo "$modename: cannot determine name of library object from \`$libobj'" 1>&2
exit $EXIT_FAILURE
;;
esac
func_infer_tag $base_compile
for arg in $later; do
case $arg in
-static)
build_old_libs=yes
continue
;;
-prefer-pic)
pic_mode=yes
continue
;;
-prefer-non-pic)
pic_mode=no
continue
;;
esac
done
qlibobj=`$echo "X$libobj" | $Xsed -e "$sed_quote_subst"`
case $qlibobj in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
qlibobj="\"$qlibobj\"" ;;
esac
test "X$libobj" != "X$qlibobj" \
&& $echo "X$libobj" | grep '[]~#^*{};<>?"'"'"' &()|`$[]' \
&& $echo "$modename: libobj name \`$libobj' may not contain shell special characters."
objname=`$echo "X$obj" | $Xsed -e 's%^.*/%%'`
xdir=`$echo "X$obj" | $Xsed -e 's%/[^/]*$%%'`
if test "X$xdir" = "X$obj"; then
xdir=
else
xdir=$xdir/
fi
lobj=${xdir}$objdir/$objname
if test -z "$base_compile"; then
$echo "$modename: you must specify a compilation command" 1>&2
$echo "$help" 1>&2
exit $EXIT_FAILURE
fi
# Delete any leftover library objects.
if test "$build_old_libs" = yes; then
removelist="$obj $lobj $libobj ${libobj}T"
else
removelist="$lobj $libobj ${libobj}T"
fi
$run $rm $removelist
trap "$run $rm $removelist; exit $EXIT_FAILURE" 1 2 15
# On Cygwin there's no "real" PIC flag so we must build both object types
case $host_os in
cygwin* | mingw* | pw32* | os2*)
pic_mode=default
;;
esac
if test "$pic_mode" = no && test "$deplibs_check_method" != pass_all; then
# non-PIC code in shared libraries is not supported
pic_mode=default
fi
# Calculate the filename of the output object if compiler does
# not support -o with -c
if test "$compiler_c_o" = no; then
output_obj=`$echo "X$srcfile" | $Xsed -e 's%^.*/%%' -e 's%\.[^.]*$%%'`.${objext}
lockfile="$output_obj.lock"
removelist="$removelist $output_obj $lockfile"
trap "$run $rm $removelist; exit $EXIT_FAILURE" 1 2 15
else
output_obj=
need_locks=no
lockfile=
fi
# Lock this critical section if it is needed
# We use this script file to make the link, it avoids creating a new file
if test "$need_locks" = yes; then
until $run ln "$progpath" "$lockfile" 2>/dev/null; do
$show "Waiting for $lockfile to be removed"
sleep 2
done
elif test "$need_locks" = warn; then
if test -f "$lockfile"; then
$echo "\
*** ERROR, $lockfile exists and contains:
`cat $lockfile 2>/dev/null`
This indicates that another process is trying to use the same
temporary object file, and libtool could not work around it because
your compiler does not support \`-c' and \`-o' together. If you
repeat this compilation, it may succeed, by chance, but you had better
avoid parallel builds (make -j) in this platform, or get a better
compiler."
$run $rm $removelist
exit $EXIT_FAILURE
fi
$echo "$srcfile" > "$lockfile"
fi
if test -n "$fix_srcfile_path"; then
eval srcfile=\"$fix_srcfile_path\"
fi
qsrcfile=`$echo "X$srcfile" | $Xsed -e "$sed_quote_subst"`
case $qsrcfile in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
qsrcfile="\"$qsrcfile\"" ;;
esac
$run $rm "$libobj" "${libobj}T"
# Create a libtool object file (analogous to a ".la" file),
# but don't create it if we're doing a dry run.
test -z "$run" && cat > ${libobj}T <<EOF
# $libobj - a libtool object file
# Generated by $PROGRAM - GNU $PACKAGE $VERSION$TIMESTAMP
#
# Please DO NOT delete this file!
# It is necessary for linking the library.
# Name of the PIC object.
EOF
# Only build a PIC object if we are building libtool libraries.
if test "$build_libtool_libs" = yes; then
# Without this assignment, base_compile gets emptied.
fbsd_hideous_sh_bug=$base_compile
if test "$pic_mode" != no; then
command="$base_compile $qsrcfile $pic_flag"
else
# Don't build PIC code
command="$base_compile $qsrcfile"
fi
if test ! -d "${xdir}$objdir"; then
$show "$mkdir ${xdir}$objdir"
$run $mkdir ${xdir}$objdir
status=$?
if test "$status" -ne 0 && test ! -d "${xdir}$objdir"; then
exit $status
fi
fi
if test -z "$output_obj"; then
# Place PIC objects in $objdir
command="$command -o $lobj"
fi
$run $rm "$lobj" "$output_obj"
$show "$command"
if $run eval "$command"; then :
else
test -n "$output_obj" && $run $rm $removelist
exit $EXIT_FAILURE
fi
if test "$need_locks" = warn &&
test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
$echo "\
*** ERROR, $lockfile contains:
`cat $lockfile 2>/dev/null`
but it should contain:
$srcfile
This indicates that another process is trying to use the same
temporary object file, and libtool could not work around it because
your compiler does not support \`-c' and \`-o' together. If you
repeat this compilation, it may succeed, by chance, but you had better
avoid parallel builds (make -j) in this platform, or get a better
compiler."
$run $rm $removelist
exit $EXIT_FAILURE
fi
# Just move the object if needed, then go on to compile the next one
if test -n "$output_obj" && test "X$output_obj" != "X$lobj"; then
$show "$mv $output_obj $lobj"
if $run $mv $output_obj $lobj; then :
else
error=$?
$run $rm $removelist
exit $error
fi
fi
# Append the name of the PIC object to the libtool object file.
test -z "$run" && cat >> ${libobj}T <<EOF
pic_object='$objdir/$objname'
EOF
# Allow error messages only from the first compilation.
if test "$suppress_opt" = yes; then
suppress_output=' >/dev/null 2>&1'
fi
else
# No PIC object so indicate it doesn't exist in the libtool
# object file.
test -z "$run" && cat >> ${libobj}T <<EOF
pic_object=none
EOF
fi
# Only build a position-dependent object if we build old libraries.
if test "$build_old_libs" = yes; then
if test "$pic_mode" != yes; then
# Don't build PIC code
command="$base_compile $qsrcfile"
else
command="$base_compile $qsrcfile $pic_flag"
fi
if test "$compiler_c_o" = yes; then
command="$command -o $obj"
fi
# Suppress compiler output if we already did a PIC compilation.
command="$command$suppress_output"
$run $rm "$obj" "$output_obj"
$show "$command"
if $run eval "$command"; then :
else
$run $rm $removelist
exit $EXIT_FAILURE
fi
if test "$need_locks" = warn &&
test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then
$echo "\
*** ERROR, $lockfile contains:
`cat $lockfile 2>/dev/null`
but it should contain:
$srcfile
This indicates that another process is trying to use the same
temporary object file, and libtool could not work around it because
your compiler does not support \`-c' and \`-o' together. If you
repeat this compilation, it may succeed, by chance, but you had better
avoid parallel builds (make -j) in this platform, or get a better
compiler."
$run $rm $removelist
exit $EXIT_FAILURE
fi
# Just move the object if needed
if test -n "$output_obj" && test "X$output_obj" != "X$obj"; then
$show "$mv $output_obj $obj"
if $run $mv $output_obj $obj; then :
else
error=$?
$run $rm $removelist
exit $error
fi
fi
# Append the name of the non-PIC object the libtool object file.
# Only append if the libtool object file exists.
test -z "$run" && cat >> ${libobj}T <<EOF
# Name of the non-PIC object.
non_pic_object='$objname'
EOF
else
# Append the name of the non-PIC object the libtool object file.
# Only append if the libtool object file exists.
test -z "$run" && cat >> ${libobj}T <<EOF
# Name of the non-PIC object.
non_pic_object=none
EOF
fi
$run $mv "${libobj}T" "${libobj}"
# Unlock the critical section if it was locked
if test "$need_locks" != no; then
$run $rm "$lockfile"
fi
exit $EXIT_SUCCESS
;;
# libtool link mode
link | relink)
modename="$modename: link"
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2*)
# It is impossible to link a dll without this setting, and
# we shouldn't force the makefile maintainer to figure out
# which system we are compiling for in order to pass an extra
# flag for every libtool invocation.
# allow_undefined=no
# FIXME: Unfortunately, there are problems with the above when trying
# to make a dll which has undefined symbols, in which case not
# even a static library is built. For now, we need to specify
# -no-undefined on the libtool link line when we can be certain
# that all symbols are satisfied, otherwise we get a static library.
allow_undefined=yes
;;
*)
allow_undefined=yes
;;
esac
libtool_args="$nonopt"
base_compile="$nonopt $@"
compile_command="$nonopt"
finalize_command="$nonopt"
compile_rpath=
finalize_rpath=
compile_shlibpath=
finalize_shlibpath=
convenience=
old_convenience=
deplibs=
old_deplibs=
compiler_flags=
linker_flags=
dllsearchpath=
lib_search_path=`pwd`
inst_prefix_dir=
avoid_version=no
dlfiles=
dlprefiles=
dlself=no
export_dynamic=no
export_symbols=
export_symbols_regex=
generated=
libobjs=
ltlibs=
module=no
no_install=no
objs=
non_pic_objects=
precious_files_regex=
prefer_static_libs=no
preload=no
prev=
prevarg=
release=
rpath=
xrpath=
perm_rpath=
temp_rpath=
thread_safe=no
vinfo=
vinfo_number=no
func_infer_tag $base_compile
# We need to know -static, to get the right output filenames.
for arg
do
case $arg in
-all-static | -static)
if test "X$arg" = "X-all-static"; then
if test "$build_libtool_libs" = yes && test -z "$link_static_flag"; then
$echo "$modename: warning: complete static linking is impossible in this configuration" 1>&2
fi
if test -n "$link_static_flag"; then
dlopen_self=$dlopen_self_static
fi
else
if test -z "$pic_flag" && test -n "$link_static_flag"; then
dlopen_self=$dlopen_self_static
fi
fi
build_libtool_libs=no
build_old_libs=yes
prefer_static_libs=yes
break
;;
esac
done
# See if our shared archives depend on static archives.
test -n "$old_archive_from_new_cmds" && build_old_libs=yes
# Go through the arguments, transforming them on the way.
while test "$#" -gt 0; do
arg="$1"
shift
case $arg in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
qarg=\"`$echo "X$arg" | $Xsed -e "$sed_quote_subst"`\" ### testsuite: skip nested quoting test
;;
*) qarg=$arg ;;
esac
libtool_args="$libtool_args $qarg"
# If the previous option needs an argument, assign it.
if test -n "$prev"; then
case $prev in
output)
compile_command="$compile_command @OUTPUT@"
finalize_command="$finalize_command @OUTPUT@"
;;
esac
case $prev in
dlfiles|dlprefiles)
if test "$preload" = no; then
# Add the symbol object into the linking commands.
compile_command="$compile_command @SYMFILE@"
finalize_command="$finalize_command @SYMFILE@"
preload=yes
fi
case $arg in
*.la | *.lo) ;; # We handle these cases below.
force)
if test "$dlself" = no; then
dlself=needless
export_dynamic=yes
fi
prev=
continue
;;
self)
if test "$prev" = dlprefiles; then
dlself=yes
elif test "$prev" = dlfiles && test "$dlopen_self" != yes; then
dlself=yes
else
dlself=needless
export_dynamic=yes
fi
prev=
continue
;;
*)
if test "$prev" = dlfiles; then
dlfiles="$dlfiles $arg"
else
dlprefiles="$dlprefiles $arg"
fi
prev=
continue
;;
esac
;;
expsyms)
export_symbols="$arg"
if test ! -f "$arg"; then
$echo "$modename: symbol file \`$arg' does not exist"
exit $EXIT_FAILURE
fi
prev=
continue
;;
expsyms_regex)
export_symbols_regex="$arg"
prev=
continue
;;
inst_prefix)
inst_prefix_dir="$arg"
prev=
continue
;;
precious_regex)
precious_files_regex="$arg"
prev=
continue
;;
release)
release="-$arg"
prev=
continue
;;
objectlist)
if test -f "$arg"; then
save_arg=$arg
moreargs=
for fil in `cat $save_arg`
do
# moreargs="$moreargs $fil"
arg=$fil
# A libtool-controlled object.
# Check to see that this really is a libtool object.
if (${SED} -e '2q' $arg | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then
pic_object=
non_pic_object=
# Read the .lo file
# If there is no directory component, then add one.
case $arg in
*/* | *\\*) . $arg ;;
*) . ./$arg ;;
esac
if test -z "$pic_object" || \
test -z "$non_pic_object" ||
test "$pic_object" = none && \
test "$non_pic_object" = none; then
$echo "$modename: cannot find name of object for \`$arg'" 1>&2
exit $EXIT_FAILURE
fi
# Extract subdirectory from the argument.
xdir=`$echo "X$arg" | $Xsed -e 's%/[^/]*$%%'`
if test "X$xdir" = "X$arg"; then
xdir=
else
xdir="$xdir/"
fi
if test "$pic_object" != none; then
# Prepend the subdirectory the object is found in.
pic_object="$xdir$pic_object"
if test "$prev" = dlfiles; then
if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
dlfiles="$dlfiles $pic_object"
prev=
continue
else
# If libtool objects are unsupported, then we need to preload.
prev=dlprefiles
fi
fi
# CHECK ME: I think I busted this. -Ossama
if test "$prev" = dlprefiles; then
# Preload the old-style object.
dlprefiles="$dlprefiles $pic_object"
prev=
fi
# A PIC object.
libobjs="$libobjs $pic_object"
arg="$pic_object"
fi
# Non-PIC object.
if test "$non_pic_object" != none; then
# Prepend the subdirectory the object is found in.
non_pic_object="$xdir$non_pic_object"
# A standard non-PIC object
non_pic_objects="$non_pic_objects $non_pic_object"
if test -z "$pic_object" || test "$pic_object" = none ; then
arg="$non_pic_object"
fi
fi
else
# Only an error if not doing a dry-run.
if test -z "$run"; then
$echo "$modename: \`$arg' is not a valid libtool object" 1>&2
exit $EXIT_FAILURE
else
# Dry-run case.
# Extract subdirectory from the argument.
xdir=`$echo "X$arg" | $Xsed -e 's%/[^/]*$%%'`
if test "X$xdir" = "X$arg"; then
xdir=
else
xdir="$xdir/"
fi
pic_object=`$echo "X${xdir}${objdir}/${arg}" | $Xsed -e "$lo2o"`
non_pic_object=`$echo "X${xdir}${arg}" | $Xsed -e "$lo2o"`
libobjs="$libobjs $pic_object"
non_pic_objects="$non_pic_objects $non_pic_object"
fi
fi
done
else
$echo "$modename: link input file \`$save_arg' does not exist"
exit $EXIT_FAILURE
fi
arg=$save_arg
prev=
continue
;;
rpath | xrpath)
# We need an absolute path.
case $arg in
[\\/]* | [A-Za-z]:[\\/]*) ;;
*)
$echo "$modename: only absolute run-paths are allowed" 1>&2
exit $EXIT_FAILURE
;;
esac
if test "$prev" = rpath; then
case "$rpath " in
*" $arg "*) ;;
*) rpath="$rpath $arg" ;;
esac
else
case "$xrpath " in
*" $arg "*) ;;
*) xrpath="$xrpath $arg" ;;
esac
fi
prev=
continue
;;
xcompiler)
compiler_flags="$compiler_flags $qarg"
prev=
compile_command="$compile_command $qarg"
finalize_command="$finalize_command $qarg"
continue
;;
xlinker)
linker_flags="$linker_flags $qarg"
compiler_flags="$compiler_flags $wl$qarg"
prev=
compile_command="$compile_command $wl$qarg"
finalize_command="$finalize_command $wl$qarg"
continue
;;
xcclinker)
linker_flags="$linker_flags $qarg"
compiler_flags="$compiler_flags $qarg"
prev=
compile_command="$compile_command $qarg"
finalize_command="$finalize_command $qarg"
continue
;;
shrext)
shrext_cmds="$arg"
prev=
continue
;;
darwin_framework)
compiler_flags="$compiler_flags $arg"
compile_command="$compile_command $arg"
finalize_command="$finalize_command $arg"
prev=
continue
;;
*)
eval "$prev=\"\$arg\""
prev=
continue
;;
esac
fi # test -n "$prev"
prevarg="$arg"
case $arg in
-all-static)
if test -n "$link_static_flag"; then
compile_command="$compile_command $link_static_flag"
finalize_command="$finalize_command $link_static_flag"
fi
continue
;;
-allow-undefined)
# FIXME: remove this flag sometime in the future.
$echo "$modename: \`-allow-undefined' is deprecated because it is the default" 1>&2
continue
;;
-avoid-version)
avoid_version=yes
continue
;;
-dlopen)
prev=dlfiles
continue
;;
-dlpreopen)
prev=dlprefiles
continue
;;
-export-dynamic)
export_dynamic=yes
continue
;;
-export-symbols | -export-symbols-regex)
if test -n "$export_symbols" || test -n "$export_symbols_regex"; then
$echo "$modename: more than one -exported-symbols argument is not allowed"
exit $EXIT_FAILURE
fi
if test "X$arg" = "X-export-symbols"; then
prev=expsyms
else
prev=expsyms_regex
fi
continue
;;
-framework|-arch)
prev=darwin_framework
compiler_flags="$compiler_flags $arg"
compile_command="$compile_command $arg"
finalize_command="$finalize_command $arg"
continue
;;
-inst-prefix-dir)
prev=inst_prefix
continue
;;
# The native IRIX linker understands -LANG:*, -LIST:* and -LNO:*
# so, if we see these flags be careful not to treat them like -L
-L[A-Z][A-Z]*:*)
case $with_gcc/$host in
no/*-*-irix* | /*-*-irix*)
compile_command="$compile_command $arg"
finalize_command="$finalize_command $arg"
;;
esac
continue
;;
-L*)
dir=`$echo "X$arg" | $Xsed -e 's/^-L//'`
# We need an absolute path.
case $dir in
[\\/]* | [A-Za-z]:[\\/]*) ;;
*)
absdir=`cd "$dir" && pwd`
if test -z "$absdir"; then
$echo "$modename: cannot determine absolute directory name of \`$dir'" 1>&2
exit $EXIT_FAILURE
fi
dir="$absdir"
;;
esac
case "$deplibs " in
*" -L$dir "*) ;;
*)
deplibs="$deplibs -L$dir"
lib_search_path="$lib_search_path $dir"
;;
esac
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2*)
case :$dllsearchpath: in
*":$dir:"*) ;;
*) dllsearchpath="$dllsearchpath:$dir";;
esac
;;
esac
continue
;;
-l*)
if test "X$arg" = "X-lc" || test "X$arg" = "X-lm"; then
case $host in
*-*-cygwin* | *-*-pw32* | *-*-beos*)
# These systems don't actually have a C or math library (as such)
continue
;;
*-*-mingw* | *-*-os2*)
# These systems don't actually have a C library (as such)
test "X$arg" = "X-lc" && continue
;;
*-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
# Do not include libc due to us having libc/libc_r.
test "X$arg" = "X-lc" && continue
;;
*-*-rhapsody* | *-*-darwin1.[012])
# Rhapsody C and math libraries are in the System framework
deplibs="$deplibs -framework System"
continue
esac
elif test "X$arg" = "X-lc_r"; then
case $host in
*-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
# Do not include libc_r directly, use -pthread flag.
continue
;;
esac
fi
deplibs="$deplibs $arg"
continue
;;
# Tru64 UNIX uses -model [arg] to determine the layout of C++
# classes, name mangling, and exception handling.
-model)
compile_command="$compile_command $arg"
compiler_flags="$compiler_flags $arg"
finalize_command="$finalize_command $arg"
prev=xcompiler
continue
;;
-mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe)
compiler_flags="$compiler_flags $arg"
compile_command="$compile_command $arg"
finalize_command="$finalize_command $arg"
continue
;;
-module)
module=yes
continue
;;
# -64, -mips[0-9] enable 64-bit mode on the SGI compiler
# -r[0-9][0-9]* specifies the processor on the SGI compiler
# -xarch=*, -xtarget=* enable 64-bit mode on the Sun compiler
# +DA*, +DD* enable 64-bit mode on the HP compiler
# -q* pass through compiler args for the IBM compiler
# -m* pass through architecture-specific compiler args for GCC
-64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*)
# Unknown arguments in both finalize_command and compile_command need
# to be aesthetically quoted because they are evaled later.
arg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"`
case $arg in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
arg="\"$arg\""
;;
esac
compile_command="$compile_command $arg"
finalize_command="$finalize_command $arg"
if test "$with_gcc" = "yes" ; then
compiler_flags="$compiler_flags $arg"
fi
continue
;;
-shrext)
prev=shrext
continue
;;
-no-fast-install)
fast_install=no
continue
;;
-no-install)
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2*)
# The PATH hackery in wrapper scripts is required on Windows
# in order for the loader to find any dlls it needs.
$echo "$modename: warning: \`-no-install' is ignored for $host" 1>&2
$echo "$modename: warning: assuming \`-no-fast-install' instead" 1>&2
fast_install=no
;;
*) no_install=yes ;;
esac
continue
;;
-no-undefined)
allow_undefined=no
continue
;;
-objectlist)
prev=objectlist
continue
;;
-o) prev=output ;;
-precious-files-regex)
prev=precious_regex
continue
;;
-release)
prev=release
continue
;;
-rpath)
prev=rpath
continue
;;
-R)
prev=xrpath
continue
;;
-R*)
dir=`$echo "X$arg" | $Xsed -e 's/^-R//'`
# We need an absolute path.
case $dir in
[\\/]* | [A-Za-z]:[\\/]*) ;;
*)
$echo "$modename: only absolute run-paths are allowed" 1>&2
exit $EXIT_FAILURE
;;
esac
case "$xrpath " in
*" $dir "*) ;;
*) xrpath="$xrpath $dir" ;;
esac
continue
;;
-static)
# The effects of -static are defined in a previous loop.
# We used to do the same as -all-static on platforms that
# didn't have a PIC flag, but the assumption that the effects
# would be equivalent was wrong. It would break on at least
# Digital Unix and AIX.
continue
;;
-thread-safe)
thread_safe=yes
continue
;;
-version-info)
prev=vinfo
continue
;;
-version-number)
prev=vinfo
vinfo_number=yes
continue
;;
-Wc,*)
args=`$echo "X$arg" | $Xsed -e "$sed_quote_subst" -e 's/^-Wc,//'`
arg=
save_ifs="$IFS"; IFS=','
for flag in $args; do
IFS="$save_ifs"
case $flag in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
flag="\"$flag\""
;;
esac
arg="$arg $wl$flag"
compiler_flags="$compiler_flags $flag"
done
IFS="$save_ifs"
arg=`$echo "X$arg" | $Xsed -e "s/^ //"`
;;
-Wl,*)
args=`$echo "X$arg" | $Xsed -e "$sed_quote_subst" -e 's/^-Wl,//'`
arg=
save_ifs="$IFS"; IFS=','
for flag in $args; do
IFS="$save_ifs"
case $flag in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
flag="\"$flag\""
;;
esac
arg="$arg $wl$flag"
compiler_flags="$compiler_flags $wl$flag"
linker_flags="$linker_flags $flag"
done
IFS="$save_ifs"
arg=`$echo "X$arg" | $Xsed -e "s/^ //"`
;;
-Xcompiler)
prev=xcompiler
continue
;;
-Xlinker)
prev=xlinker
continue
;;
-XCClinker)
prev=xcclinker
continue
;;
# Some other compiler flag.
-* | +*)
# Unknown arguments in both finalize_command and compile_command need
# to be aesthetically quoted because they are evaled later.
arg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"`
case $arg in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
arg="\"$arg\""
;;
esac
;;
*.$objext)
# A standard object.
objs="$objs $arg"
;;
*.lo)
# A libtool-controlled object.
# Check to see that this really is a libtool object.
if (${SED} -e '2q' $arg | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then
pic_object=
non_pic_object=
# Read the .lo file
# If there is no directory component, then add one.
case $arg in
*/* | *\\*) . $arg ;;
*) . ./$arg ;;
esac
if test -z "$pic_object" || \
test -z "$non_pic_object" ||
test "$pic_object" = none && \
test "$non_pic_object" = none; then
$echo "$modename: cannot find name of object for \`$arg'" 1>&2
exit $EXIT_FAILURE
fi
# Extract subdirectory from the argument.
xdir=`$echo "X$arg" | $Xsed -e 's%/[^/]*$%%'`
if test "X$xdir" = "X$arg"; then
xdir=
else
xdir="$xdir/"
fi
if test "$pic_object" != none; then
# Prepend the subdirectory the object is found in.
pic_object="$xdir$pic_object"
if test "$prev" = dlfiles; then
if test "$build_libtool_libs" = yes && test "$dlopen_support" = yes; then
dlfiles="$dlfiles $pic_object"
prev=
continue
else
# If libtool objects are unsupported, then we need to preload.
prev=dlprefiles
fi
fi
# CHECK ME: I think I busted this. -Ossama
if test "$prev" = dlprefiles; then
# Preload the old-style object.
dlprefiles="$dlprefiles $pic_object"
prev=
fi
# A PIC object.
libobjs="$libobjs $pic_object"
arg="$pic_object"
fi
# Non-PIC object.
if test "$non_pic_object" != none; then
# Prepend the subdirectory the object is found in.
non_pic_object="$xdir$non_pic_object"
# A standard non-PIC object
non_pic_objects="$non_pic_objects $non_pic_object"
if test -z "$pic_object" || test "$pic_object" = none ; then
arg="$non_pic_object"
fi
fi
else
# Only an error if not doing a dry-run.
if test -z "$run"; then
$echo "$modename: \`$arg' is not a valid libtool object" 1>&2
exit $EXIT_FAILURE
else
# Dry-run case.
# Extract subdirectory from the argument.
xdir=`$echo "X$arg" | $Xsed -e 's%/[^/]*$%%'`
if test "X$xdir" = "X$arg"; then
xdir=
else
xdir="$xdir/"
fi
pic_object=`$echo "X${xdir}${objdir}/${arg}" | $Xsed -e "$lo2o"`
non_pic_object=`$echo "X${xdir}${arg}" | $Xsed -e "$lo2o"`
libobjs="$libobjs $pic_object"
non_pic_objects="$non_pic_objects $non_pic_object"
fi
fi
;;
*.$libext)
# An archive.
deplibs="$deplibs $arg"
old_deplibs="$old_deplibs $arg"
continue
;;
*.la)
# A libtool-controlled library.
if test "$prev" = dlfiles; then
# This library was specified with -dlopen.
dlfiles="$dlfiles $arg"
prev=
elif test "$prev" = dlprefiles; then
# The library was specified with -dlpreopen.
dlprefiles="$dlprefiles $arg"
prev=
else
deplibs="$deplibs $arg"
fi
continue
;;
# Some other compiler argument.
*)
# Unknown arguments in both finalize_command and compile_command need
# to be aesthetically quoted because they are evaled later.
arg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"`
case $arg in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
arg="\"$arg\""
;;
esac
;;
esac # arg
# Now actually substitute the argument into the commands.
if test -n "$arg"; then
compile_command="$compile_command $arg"
finalize_command="$finalize_command $arg"
fi
done # argument parsing loop
if test -n "$prev"; then
$echo "$modename: the \`$prevarg' option requires an argument" 1>&2
$echo "$help" 1>&2
exit $EXIT_FAILURE
fi
if test "$export_dynamic" = yes && test -n "$export_dynamic_flag_spec"; then
eval arg=\"$export_dynamic_flag_spec\"
compile_command="$compile_command $arg"
finalize_command="$finalize_command $arg"
fi
oldlibs=
# calculate the name of the file, without its directory
outputname=`$echo "X$output" | $Xsed -e 's%^.*/%%'`
libobjs_save="$libobjs"
if test -n "$shlibpath_var"; then
# get the directories listed in $shlibpath_var
eval shlib_search_path=\`\$echo \"X\${$shlibpath_var}\" \| \$Xsed -e \'s/:/ /g\'\`
else
shlib_search_path=
fi
eval sys_lib_search_path=\"$sys_lib_search_path_spec\"
eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\"
output_objdir=`$echo "X$output" | $Xsed -e 's%/[^/]*$%%'`
if test "X$output_objdir" = "X$output"; then
output_objdir="$objdir"
else
output_objdir="$output_objdir/$objdir"
fi
# Create the object directory.
if test ! -d "$output_objdir"; then
$show "$mkdir $output_objdir"
$run $mkdir $output_objdir
status=$?
if test "$status" -ne 0 && test ! -d "$output_objdir"; then
exit $status
fi
fi
# Determine the type of output
case $output in
"")
$echo "$modename: you must specify an output file" 1>&2
$echo "$help" 1>&2
exit $EXIT_FAILURE
;;
*.$libext) linkmode=oldlib ;;
*.lo | *.$objext) linkmode=obj ;;
*.la) linkmode=lib ;;
*) linkmode=prog ;; # Anything else should be a program.
esac
case $host in
*cygwin* | *mingw* | *pw32*)
# don't eliminate duplications in $postdeps and $predeps
duplicate_compiler_generated_deps=yes
;;
*)
duplicate_compiler_generated_deps=$duplicate_deps
;;
esac
specialdeplibs=
libs=
# Find all interdependent deplibs by searching for libraries
# that are linked more than once (e.g. -la -lb -la)
for deplib in $deplibs; do
if test "X$duplicate_deps" = "Xyes" ; then
case "$libs " in
*" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
esac
fi
libs="$libs $deplib"
done
if test "$linkmode" = lib; then
libs="$predeps $libs $compiler_lib_search_path $postdeps"
# Compute libraries that are listed more than once in $predeps
# $postdeps and mark them as special (i.e., whose duplicates are
# not to be eliminated).
pre_post_deps=
if test "X$duplicate_compiler_generated_deps" = "Xyes" ; then
for pre_post_dep in $predeps $postdeps; do
case "$pre_post_deps " in
*" $pre_post_dep "*) specialdeplibs="$specialdeplibs $pre_post_deps" ;;
esac
pre_post_deps="$pre_post_deps $pre_post_dep"
done
fi
pre_post_deps=
fi
deplibs=
newdependency_libs=
newlib_search_path=
need_relink=no # whether we're linking any uninstalled libtool libraries
notinst_deplibs= # not-installed libtool libraries
notinst_path= # paths that contain not-installed libtool libraries
case $linkmode in
lib)
passes="conv link"
for file in $dlfiles $dlprefiles; do
case $file in
*.la) ;;
*)
$echo "$modename: libraries can \`-dlopen' only libtool libraries: $file" 1>&2
exit $EXIT_FAILURE
;;
esac
done
;;
prog)
compile_deplibs=
finalize_deplibs=
alldeplibs=no
newdlfiles=
newdlprefiles=
passes="conv scan dlopen dlpreopen link"
;;
*) passes="conv"
;;
esac
for pass in $passes; do
if test "$linkmode,$pass" = "lib,link" ||
test "$linkmode,$pass" = "prog,scan"; then
libs="$deplibs"
deplibs=
fi
if test "$linkmode" = prog; then
case $pass in
dlopen) libs="$dlfiles" ;;
dlpreopen) libs="$dlprefiles" ;;
link) libs="$deplibs %DEPLIBS% $dependency_libs" ;;
esac
fi
if test "$pass" = dlopen; then
# Collect dlpreopened libraries
save_deplibs="$deplibs"
deplibs=
fi
for deplib in $libs; do
lib=
found=no
case $deplib in
-mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe)
if test "$linkmode,$pass" = "prog,link"; then
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
else
compiler_flags="$compiler_flags $deplib"
fi
continue
;;
-l*)
if test "$linkmode" != lib && test "$linkmode" != prog; then
$echo "$modename: warning: \`-l' is ignored for archives/objects" 1>&2
continue
fi
name=`$echo "X$deplib" | $Xsed -e 's/^-l//'`
for searchdir in $newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path; do
for search_ext in .la $std_shrext .so .a; do
# Search the libtool library
lib="$searchdir/lib${name}${search_ext}"
if test -f "$lib"; then
if test "$search_ext" = ".la"; then
found=yes
else
found=no
fi
break 2
fi
done
done
if test "$found" != yes; then
# deplib doesn't seem to be a libtool library
if test "$linkmode,$pass" = "prog,link"; then
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
else
deplibs="$deplib $deplibs"
test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs"
fi
continue
else # deplib is a libtool library
# If $allow_libtool_libs_with_static_runtimes && $deplib is a stdlib,
# We need to do some special things here, and not later.
if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
case " $predeps $postdeps " in
*" $deplib "*)
if (${SED} -e '2q' $lib |
grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then
library_names=
old_library=
case $lib in
*/* | *\\*) . $lib ;;
*) . ./$lib ;;
esac
for l in $old_library $library_names; do
ll="$l"
done
if test "X$ll" = "X$old_library" ; then # only static version available
found=no
ladir=`$echo "X$lib" | $Xsed -e 's%/[^/]*$%%'`
test "X$ladir" = "X$lib" && ladir="."
lib=$ladir/$old_library
if test "$linkmode,$pass" = "prog,link"; then
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
else
deplibs="$deplib $deplibs"
test "$linkmode" = lib && newdependency_libs="$deplib $newdependency_libs"
fi
continue
fi
fi
;;
*) ;;
esac
fi
fi
;; # -l
-L*)
case $linkmode in
lib)
deplibs="$deplib $deplibs"
test "$pass" = conv && continue
newdependency_libs="$deplib $newdependency_libs"
newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'`
;;
prog)
if test "$pass" = conv; then
deplibs="$deplib $deplibs"
continue
fi
if test "$pass" = scan; then
deplibs="$deplib $deplibs"
else
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
fi
newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'`
;;
*)
$echo "$modename: warning: \`-L' is ignored for archives/objects" 1>&2
;;
esac # linkmode
continue
;; # -L
-R*)
if test "$pass" = link; then
dir=`$echo "X$deplib" | $Xsed -e 's/^-R//'`
# Make sure the xrpath contains only unique directories.
case "$xrpath " in
*" $dir "*) ;;
*) xrpath="$xrpath $dir" ;;
esac
fi
deplibs="$deplib $deplibs"
continue
;;
*.la) lib="$deplib" ;;
*.$libext)
if test "$pass" = conv; then
deplibs="$deplib $deplibs"
continue
fi
case $linkmode in
lib)
valid_a_lib=no
case $deplibs_check_method in
match_pattern*)
set dummy $deplibs_check_method
match_pattern_regex=`expr "$deplibs_check_method" : "$2 \(.*\)"`
if eval $echo \"$deplib\" 2>/dev/null \
| $SED 10q \
| $EGREP "$match_pattern_regex" > /dev/null; then
valid_a_lib=yes
fi
;;
pass_all)
valid_a_lib=yes
;;
esac
if test "$valid_a_lib" != yes; then
$echo
$echo "*** Warning: Trying to link with static lib archive $deplib."
$echo "*** I have the capability to make that library automatically link in when"
$echo "*** you link to this library. But I can only do this if you have a"
$echo "*** shared version of the library, which you do not appear to have"
$echo "*** because the file extensions .$libext of this argument makes me believe"
$echo "*** that it is just a static archive that I should not used here."
else
$echo
$echo "*** Warning: Linking the shared library $output against the"
$echo "*** static library $deplib is not portable!"
deplibs="$deplib $deplibs"
fi
continue
;;
prog)
if test "$pass" != link; then
deplibs="$deplib $deplibs"
else
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
fi
continue
;;
esac # linkmode
;; # *.$libext
*.lo | *.$objext)
if test "$pass" = conv; then
deplibs="$deplib $deplibs"
elif test "$linkmode" = prog; then
if test "$pass" = dlpreopen || test "$dlopen_support" != yes || test "$build_libtool_libs" = no; then
# If there is no dlopen support or we're linking statically,
# we need to preload.
newdlprefiles="$newdlprefiles $deplib"
compile_deplibs="$deplib $compile_deplibs"
finalize_deplibs="$deplib $finalize_deplibs"
else
newdlfiles="$newdlfiles $deplib"
fi
fi
continue
;;
%DEPLIBS%)
alldeplibs=yes
continue
;;
esac # case $deplib
if test "$found" = yes || test -f "$lib"; then :
else
$echo "$modename: cannot find the library \`$lib'" 1>&2
exit $EXIT_FAILURE
fi
# Check to see that this really is a libtool archive.
if (${SED} -e '2q' $lib | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then :
else
$echo "$modename: \`$lib' is not a valid libtool archive" 1>&2
exit $EXIT_FAILURE
fi
ladir=`$echo "X$lib" | $Xsed -e 's%/[^/]*$%%'`
test "X$ladir" = "X$lib" && ladir="."
dlname=
dlopen=
dlpreopen=
libdir=
library_names=
old_library=
# If the library was installed with an old release of libtool,
# it will not redefine variables installed, or shouldnotlink
installed=yes
shouldnotlink=no
avoidtemprpath=
# Read the .la file
case $lib in
*/* | *\\*) . $lib ;;
*) . ./$lib ;;
esac
if test "$linkmode,$pass" = "lib,link" ||
test "$linkmode,$pass" = "prog,scan" ||
{ test "$linkmode" != prog && test "$linkmode" != lib; }; then
test -n "$dlopen" && dlfiles="$dlfiles $dlopen"
test -n "$dlpreopen" && dlprefiles="$dlprefiles $dlpreopen"
fi
if test "$pass" = conv; then
# Only check for convenience libraries
deplibs="$lib $deplibs"
if test -z "$libdir"; then
if test -z "$old_library"; then
$echo "$modename: cannot find name of link library for \`$lib'" 1>&2
exit $EXIT_FAILURE
fi
# It is a libtool convenience library, so add in its objects.
convenience="$convenience $ladir/$objdir/$old_library"
old_convenience="$old_convenience $ladir/$objdir/$old_library"
tmp_libs=
for deplib in $dependency_libs; do
deplibs="$deplib $deplibs"
if test "X$duplicate_deps" = "Xyes" ; then
case "$tmp_libs " in
*" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
esac
fi
tmp_libs="$tmp_libs $deplib"
done
elif test "$linkmode" != prog && test "$linkmode" != lib; then
$echo "$modename: \`$lib' is not a convenience library" 1>&2
exit $EXIT_FAILURE
fi
continue
fi # $pass = conv
# Get the name of the library we link against.
linklib=
for l in $old_library $library_names; do
linklib="$l"
done
if test -z "$linklib"; then
$echo "$modename: cannot find name of link library for \`$lib'" 1>&2
exit $EXIT_FAILURE
fi
# This library was specified with -dlopen.
if test "$pass" = dlopen; then
if test -z "$libdir"; then
$echo "$modename: cannot -dlopen a convenience library: \`$lib'" 1>&2
exit $EXIT_FAILURE
fi
if test -z "$dlname" ||
test "$dlopen_support" != yes ||
test "$build_libtool_libs" = no; then
# If there is no dlname, no dlopen support or we're linking
# statically, we need to preload. We also need to preload any
# dependent libraries so libltdl's deplib preloader doesn't
# bomb out in the load deplibs phase.
dlprefiles="$dlprefiles $lib $dependency_libs"
else
newdlfiles="$newdlfiles $lib"
fi
continue
fi # $pass = dlopen
# We need an absolute path.
case $ladir in
[\\/]* | [A-Za-z]:[\\/]*) abs_ladir="$ladir" ;;
*)
abs_ladir=`cd "$ladir" && pwd`
if test -z "$abs_ladir"; then
$echo "$modename: warning: cannot determine absolute directory name of \`$ladir'" 1>&2
$echo "$modename: passing it literally to the linker, although it might fail" 1>&2
abs_ladir="$ladir"
fi
;;
esac
laname=`$echo "X$lib" | $Xsed -e 's%^.*/%%'`
# Find the relevant object directory and library name.
if test "X$installed" = Xyes; then
if test ! -f "$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then
$echo "$modename: warning: library \`$lib' was moved." 1>&2
dir="$ladir"
absdir="$abs_ladir"
libdir="$abs_ladir"
else
dir="$libdir"
absdir="$libdir"
fi
test "X$hardcode_automatic" = Xyes && avoidtemprpath=yes
else
if test ! -f "$ladir/$objdir/$linklib" && test -f "$abs_ladir/$linklib"; then
dir="$ladir"
absdir="$abs_ladir"
# Remove this search path later
notinst_path="$notinst_path $abs_ladir"
else
dir="$ladir/$objdir"
absdir="$abs_ladir/$objdir"
# Remove this search path later
notinst_path="$notinst_path $abs_ladir"
fi
fi # $installed = yes
name=`$echo "X$laname" | $Xsed -e 's/\.la$//' -e 's/^lib//'`
# This library was specified with -dlpreopen.
if test "$pass" = dlpreopen; then
if test -z "$libdir"; then
$echo "$modename: cannot -dlpreopen a convenience library: \`$lib'" 1>&2
exit $EXIT_FAILURE
fi
# Prefer using a static library (so that no silly _DYNAMIC symbols
# are required to link).
if test -n "$old_library"; then
newdlprefiles="$newdlprefiles $dir/$old_library"
# Otherwise, use the dlname, so that lt_dlopen finds it.
elif test -n "$dlname"; then
newdlprefiles="$newdlprefiles $dir/$dlname"
else
newdlprefiles="$newdlprefiles $dir/$linklib"
fi
fi # $pass = dlpreopen
if test -z "$libdir"; then
# Link the convenience library
if test "$linkmode" = lib; then
deplibs="$dir/$old_library $deplibs"
elif test "$linkmode,$pass" = "prog,link"; then
compile_deplibs="$dir/$old_library $compile_deplibs"
finalize_deplibs="$dir/$old_library $finalize_deplibs"
else
deplibs="$lib $deplibs" # used for prog,scan pass
fi
continue
fi
if test "$linkmode" = prog && test "$pass" != link; then
newlib_search_path="$newlib_search_path $ladir"
deplibs="$lib $deplibs"
linkalldeplibs=no
if test "$link_all_deplibs" != no || test -z "$library_names" ||
test "$build_libtool_libs" = no; then
linkalldeplibs=yes
fi
tmp_libs=
for deplib in $dependency_libs; do
case $deplib in
-L*) newlib_search_path="$newlib_search_path "`$echo "X$deplib" | $Xsed -e 's/^-L//'`;; ### testsuite: skip nested quoting test
esac
# Need to link against all dependency_libs?
if test "$linkalldeplibs" = yes; then
deplibs="$deplib $deplibs"
else
# Need to hardcode shared library paths
# or/and link against static libraries
newdependency_libs="$deplib $newdependency_libs"
fi
if test "X$duplicate_deps" = "Xyes" ; then
case "$tmp_libs " in
*" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
esac
fi
tmp_libs="$tmp_libs $deplib"
done # for deplib
continue
fi # $linkmode = prog...
if test "$linkmode,$pass" = "prog,link"; then
if test -n "$library_names" &&
{ test "$prefer_static_libs" = no || test -z "$old_library"; }; then
# We need to hardcode the library path
if test -n "$shlibpath_var" && test -z "$avoidtemprpath" ; then
# Make sure the rpath contains only unique directories.
case "$temp_rpath " in
*" $dir "*) ;;
*" $absdir "*) ;;
*) temp_rpath="$temp_rpath $absdir" ;;
esac
fi
# Hardcode the library path.
# Skip directories that are in the system default run-time
# search path.
case " $sys_lib_dlsearch_path " in
*" $absdir "*) ;;
*)
case "$compile_rpath " in
*" $absdir "*) ;;
*) compile_rpath="$compile_rpath $absdir"
esac
;;
esac
case " $sys_lib_dlsearch_path " in
*" $libdir "*) ;;
*)
case "$finalize_rpath " in
*" $libdir "*) ;;
*) finalize_rpath="$finalize_rpath $libdir"
esac
;;
esac
fi # $linkmode,$pass = prog,link...
if test "$alldeplibs" = yes &&
{ test "$deplibs_check_method" = pass_all ||
{ test "$build_libtool_libs" = yes &&
test -n "$library_names"; }; }; then
# We only need to search for static libraries
continue
fi
fi
link_static=no # Whether the deplib will be linked statically
if test -n "$library_names" &&
{ test "$prefer_static_libs" = no || test -z "$old_library"; }; then
if test "$installed" = no; then
notinst_deplibs="$notinst_deplibs $lib"
need_relink=yes
fi
# This is a shared library
# Warn about portability, can't link against -module's on
# some systems (darwin)
if test "$shouldnotlink" = yes && test "$pass" = link ; then
$echo
if test "$linkmode" = prog; then
$echo "*** Warning: Linking the executable $output against the loadable module"
else
$echo "*** Warning: Linking the shared library $output against the loadable module"
fi
$echo "*** $linklib is not portable!"
fi
if test "$linkmode" = lib &&
test "$hardcode_into_libs" = yes; then
# Hardcode the library path.
# Skip directories that are in the system default run-time
# search path.
case " $sys_lib_dlsearch_path " in
*" $absdir "*) ;;
*)
case "$compile_rpath " in
*" $absdir "*) ;;
*) compile_rpath="$compile_rpath $absdir"
esac
;;
esac
case " $sys_lib_dlsearch_path " in
*" $libdir "*) ;;
*)
case "$finalize_rpath " in
*" $libdir "*) ;;
*) finalize_rpath="$finalize_rpath $libdir"
esac
;;
esac
fi
if test -n "$old_archive_from_expsyms_cmds"; then
# figure out the soname
set dummy $library_names
realname="$2"
shift; shift
libname=`eval \\$echo \"$libname_spec\"`
# use dlname if we got it. it's perfectly good, no?
if test -n "$dlname"; then
soname="$dlname"
elif test -n "$soname_spec"; then
# bleh windows
case $host in
*cygwin* | mingw*)
major=`expr $current - $age`
versuffix="-$major"
;;
esac
eval soname=\"$soname_spec\"
else
soname="$realname"
fi
# Make a new name for the extract_expsyms_cmds to use
soroot="$soname"
soname=`$echo $soroot | ${SED} -e 's/^.*\///'`
newlib="libimp-`$echo $soname | ${SED} 's/^lib//;s/\.dll$//'`.a"
# If the library has no export list, then create one now
if test -f "$output_objdir/$soname-def"; then :
else
$show "extracting exported symbol list from \`$soname'"
save_ifs="$IFS"; IFS='~'
cmds=$extract_expsyms_cmds
for cmd in $cmds; do
IFS="$save_ifs"
eval cmd=\"$cmd\"
$show "$cmd"
$run eval "$cmd" || exit $?
done
IFS="$save_ifs"
fi
# Create $newlib
if test -f "$output_objdir/$newlib"; then :; else
$show "generating import library for \`$soname'"
save_ifs="$IFS"; IFS='~'
cmds=$old_archive_from_expsyms_cmds
for cmd in $cmds; do
IFS="$save_ifs"
eval cmd=\"$cmd\"
$show "$cmd"
$run eval "$cmd" || exit $?
done
IFS="$save_ifs"
fi
# make sure the library variables are pointing to the new library
dir=$output_objdir
linklib=$newlib
fi # test -n "$old_archive_from_expsyms_cmds"
if test "$linkmode" = prog || test "$mode" != relink; then
add_shlibpath=
add_dir=
add=
lib_linked=yes
case $hardcode_action in
immediate | unsupported)
if test "$hardcode_direct" = no; then
add="$dir/$linklib"
case $host in
*-*-sco3.2v5* ) add_dir="-L$dir" ;;
*-*-darwin* )
# if the lib is a module then we can not link against
# it, someone is ignoring the new warnings I added
if /usr/bin/file -L $add 2> /dev/null | $EGREP "bundle" >/dev/null ; then
$echo "** Warning, lib $linklib is a module, not a shared library"
if test -z "$old_library" ; then
$echo
$echo "** And there doesn't seem to be a static archive available"
$echo "** The link will probably fail, sorry"
else
add="$dir/$old_library"
fi
fi
esac
elif test "$hardcode_minus_L" = no; then
case $host in
*-*-sunos*) add_shlibpath="$dir" ;;
esac
add_dir="-L$dir"
add="-l$name"
elif test "$hardcode_shlibpath_var" = no; then
add_shlibpath="$dir"
add="-l$name"
else
lib_linked=no
fi
;;
relink)
if test "$hardcode_direct" = yes; then
add="$dir/$linklib"
elif test "$hardcode_minus_L" = yes; then
add_dir="-L$dir"
# Try looking first in the location we're being installed to.
if test -n "$inst_prefix_dir"; then
case $libdir in
[\\/]*)
add_dir="$add_dir -L$inst_prefix_dir$libdir"
;;
esac
fi
add="-l$name"
elif test "$hardcode_shlibpath_var" = yes; then
add_shlibpath="$dir"
add="-l$name"
else
lib_linked=no
fi
;;
*) lib_linked=no ;;
esac
if test "$lib_linked" != yes; then
$echo "$modename: configuration error: unsupported hardcode properties"
exit $EXIT_FAILURE
fi
if test -n "$add_shlibpath"; then
case :$compile_shlibpath: in
*":$add_shlibpath:"*) ;;
*) compile_shlibpath="$compile_shlibpath$add_shlibpath:" ;;
esac
fi
if test "$linkmode" = prog; then
test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs"
test -n "$add" && compile_deplibs="$add $compile_deplibs"
else
test -n "$add_dir" && deplibs="$add_dir $deplibs"
test -n "$add" && deplibs="$add $deplibs"
if test "$hardcode_direct" != yes && \
test "$hardcode_minus_L" != yes && \
test "$hardcode_shlibpath_var" = yes; then
case :$finalize_shlibpath: in
*":$libdir:"*) ;;
*) finalize_shlibpath="$finalize_shlibpath$libdir:" ;;
esac
fi
fi
fi
if test "$linkmode" = prog || test "$mode" = relink; then
add_shlibpath=
add_dir=
add=
# Finalize command for both is simple: just hardcode it.
if test "$hardcode_direct" = yes; then
add="$libdir/$linklib"
elif test "$hardcode_minus_L" = yes; then
add_dir="-L$libdir"
add="-l$name"
elif test "$hardcode_shlibpath_var" = yes; then
case :$finalize_shlibpath: in
*":$libdir:"*) ;;
*) finalize_shlibpath="$finalize_shlibpath$libdir:" ;;
esac
add="-l$name"
elif test "$hardcode_automatic" = yes; then
if test -n "$inst_prefix_dir" &&
test -f "$inst_prefix_dir$libdir/$linklib" ; then
add="$inst_prefix_dir$libdir/$linklib"
else
add="$libdir/$linklib"
fi
else
# We cannot seem to hardcode it, guess we'll fake it.
add_dir="-L$libdir"
# Try looking first in the location we're being installed to.
if test -n "$inst_prefix_dir"; then
case $libdir in
[\\/]*)
add_dir="$add_dir -L$inst_prefix_dir$libdir"
;;
esac
fi
add="-l$name"
fi
if test "$linkmode" = prog; then
test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs"
test -n "$add" && finalize_deplibs="$add $finalize_deplibs"
else
test -n "$add_dir" && deplibs="$add_dir $deplibs"
test -n "$add" && deplibs="$add $deplibs"
fi
fi
elif test "$linkmode" = prog; then
# Here we assume that one of hardcode_direct or hardcode_minus_L
# is not unsupported. This is valid on all known static and
# shared platforms.
if test "$hardcode_direct" != unsupported; then
test -n "$old_library" && linklib="$old_library"
compile_deplibs="$dir/$linklib $compile_deplibs"
finalize_deplibs="$dir/$linklib $finalize_deplibs"
else
compile_deplibs="-l$name -L$dir $compile_deplibs"
finalize_deplibs="-l$name -L$dir $finalize_deplibs"
fi
elif test "$build_libtool_libs" = yes; then
# Not a shared library
if test "$deplibs_check_method" != pass_all; then
# We're trying link a shared library against a static one
# but the system doesn't support it.
# Just print a warning and add the library to dependency_libs so
# that the program can be linked against the static library.
$echo
$echo "*** Warning: This system can not link to static lib archive $lib."
$echo "*** I have the capability to make that library automatically link in when"
$echo "*** you link to this library. But I can only do this if you have a"
$echo "*** shared version of the library, which you do not appear to have."
if test "$module" = yes; then
$echo "*** But as you try to build a module library, libtool will still create "
$echo "*** a static module, that should work as long as the dlopening application"
$echo "*** is linked with the -dlopen flag to resolve symbols at runtime."
if test -z "$global_symbol_pipe"; then
$echo
$echo "*** However, this would only work if libtool was able to extract symbol"
$echo "*** lists from a program, using \`nm' or equivalent, but libtool could"
$echo "*** not find such a program. So, this module is probably useless."
$echo "*** \`nm' from GNU binutils and a full rebuild may help."
fi
if test "$build_old_libs" = no; then
build_libtool_libs=module
build_old_libs=yes
else
build_libtool_libs=no
fi
fi
else
deplibs="$dir/$old_library $deplibs"
link_static=yes
fi
fi # link shared/static library?
if test "$linkmode" = lib; then
if test -n "$dependency_libs" &&
{ test "$hardcode_into_libs" != yes ||
test "$build_old_libs" = yes ||
test "$link_static" = yes; }; then
# Extract -R from dependency_libs
temp_deplibs=
for libdir in $dependency_libs; do
case $libdir in
-R*) temp_xrpath=`$echo "X$libdir" | $Xsed -e 's/^-R//'`
case " $xrpath " in
*" $temp_xrpath "*) ;;
*) xrpath="$xrpath $temp_xrpath";;
esac;;
*) temp_deplibs="$temp_deplibs $libdir";;
esac
done
dependency_libs="$temp_deplibs"
fi
newlib_search_path="$newlib_search_path $absdir"
# Link against this library
test "$link_static" = no && newdependency_libs="$abs_ladir/$laname $newdependency_libs"
# ... and its dependency_libs
tmp_libs=
for deplib in $dependency_libs; do
newdependency_libs="$deplib $newdependency_libs"
if test "X$duplicate_deps" = "Xyes" ; then
case "$tmp_libs " in
*" $deplib "*) specialdeplibs="$specialdeplibs $deplib" ;;
esac
fi
tmp_libs="$tmp_libs $deplib"
done
if test "$link_all_deplibs" != no; then
# Add the search paths of all dependency libraries
for deplib in $dependency_libs; do
case $deplib in
-L*) path="$deplib" ;;
*.la)
dir=`$echo "X$deplib" | $Xsed -e 's%/[^/]*$%%'`
test "X$dir" = "X$deplib" && dir="."
# We need an absolute path.
case $dir in
[\\/]* | [A-Za-z]:[\\/]*) absdir="$dir" ;;
*)
absdir=`cd "$dir" && pwd`
if test -z "$absdir"; then
$echo "$modename: warning: cannot determine absolute directory name of \`$dir'" 1>&2
absdir="$dir"
fi
;;
esac
if grep "^installed=no" $deplib > /dev/null; then
path="$absdir/$objdir"
else
eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
if test -z "$libdir"; then
$echo "$modename: \`$deplib' is not a valid libtool archive" 1>&2
exit $EXIT_FAILURE
fi
if test "$absdir" != "$libdir"; then
$echo "$modename: warning: \`$deplib' seems to be moved" 1>&2
fi
path="$absdir"
fi
depdepl=
case $host in
*-*-darwin*)
# we do not want to link against static libs,
# but need to link against shared
eval deplibrary_names=`${SED} -n -e 's/^library_names=\(.*\)$/\1/p' $deplib`
if test -n "$deplibrary_names" ; then
for tmp in $deplibrary_names ; do
depdepl=$tmp
done
if test -f "$path/$depdepl" ; then
depdepl="$path/$depdepl"
fi
# do not add paths which are already there
case " $newlib_search_path " in
*" $path "*) ;;
*) newlib_search_path="$newlib_search_path $path";;
esac
fi
path=""
;;
*)
path="-L$path"
;;
esac
;;
-l*)
case $host in
*-*-darwin*)
# Again, we only want to link against shared libraries
eval tmp_libs=`$echo "X$deplib" | $Xsed -e "s,^\-l,,"`
for tmp in $newlib_search_path ; do
if test -f "$tmp/lib$tmp_libs.dylib" ; then
eval depdepl="$tmp/lib$tmp_libs.dylib"
break
fi
done
path=""
;;
*) continue ;;
esac
;;
*) continue ;;
esac
case " $deplibs " in
*" $path "*) ;;
*) deplibs="$path $deplibs" ;;
esac
case " $deplibs " in
*" $depdepl "*) ;;
*) deplibs="$depdepl $deplibs" ;;
esac
done
fi # link_all_deplibs != no
fi # linkmode = lib
done # for deplib in $libs
dependency_libs="$newdependency_libs"
if test "$pass" = dlpreopen; then
# Link the dlpreopened libraries before other libraries
for deplib in $save_deplibs; do
deplibs="$deplib $deplibs"
done
fi
if test "$pass" != dlopen; then
if test "$pass" != conv; then
# Make sure lib_search_path contains only unique directories.
lib_search_path=
for dir in $newlib_search_path; do
case "$lib_search_path " in
*" $dir "*) ;;
*) lib_search_path="$lib_search_path $dir" ;;
esac
done
newlib_search_path=
fi
if test "$linkmode,$pass" != "prog,link"; then
vars="deplibs"
else
vars="compile_deplibs finalize_deplibs"
fi
for var in $vars dependency_libs; do
# Add libraries to $var in reverse order
eval tmp_libs=\"\$$var\"
new_libs=
for deplib in $tmp_libs; do
# FIXME: Pedantically, this is the right thing to do, so
# that some nasty dependency loop isn't accidentally
# broken:
#new_libs="$deplib $new_libs"
# Pragmatically, this seems to cause very few problems in
# practice:
case $deplib in
-L*) new_libs="$deplib $new_libs" ;;
-R*) ;;
*)
# And here is the reason: when a library appears more
# than once as an explicit dependence of a library, or
# is implicitly linked in more than once by the
# compiler, it is considered special, and multiple
# occurrences thereof are not removed. Compare this
# with having the same library being listed as a
# dependency of multiple other libraries: in this case,
# we know (pedantically, we assume) the library does not
# need to be listed more than once, so we keep only the
# last copy. This is not always right, but it is rare
# enough that we require users that really mean to play
# such unportable linking tricks to link the library
# using -Wl,-lname, so that libtool does not consider it
# for duplicate removal.
case " $specialdeplibs " in
*" $deplib "*) new_libs="$deplib $new_libs" ;;
*)
case " $new_libs " in
*" $deplib "*) ;;
*) new_libs="$deplib $new_libs" ;;
esac
;;
esac
;;
esac
done
tmp_libs=
for deplib in $new_libs; do
case $deplib in
-L*)
case " $tmp_libs " in
*" $deplib "*) ;;
*) tmp_libs="$tmp_libs $deplib" ;;
esac
;;
*) tmp_libs="$tmp_libs $deplib" ;;
esac
done
eval $var=\"$tmp_libs\"
done # for var
fi
# Last step: remove runtime libs from dependency_libs
# (they stay in deplibs)
tmp_libs=
for i in $dependency_libs ; do
case " $predeps $postdeps $compiler_lib_search_path " in
*" $i "*)
i=""
;;
esac
if test -n "$i" ; then
tmp_libs="$tmp_libs $i"
fi
done
dependency_libs=$tmp_libs
done # for pass
if test "$linkmode" = prog; then
dlfiles="$newdlfiles"
dlprefiles="$newdlprefiles"
fi
case $linkmode in
oldlib)
if test -n "$deplibs"; then
$echo "$modename: warning: \`-l' and \`-L' are ignored for archives" 1>&2
fi
if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
$echo "$modename: warning: \`-dlopen' is ignored for archives" 1>&2
fi
if test -n "$rpath"; then
$echo "$modename: warning: \`-rpath' is ignored for archives" 1>&2
fi
if test -n "$xrpath"; then
$echo "$modename: warning: \`-R' is ignored for archives" 1>&2
fi
if test -n "$vinfo"; then
$echo "$modename: warning: \`-version-info/-version-number' is ignored for archives" 1>&2
fi
if test -n "$release"; then
$echo "$modename: warning: \`-release' is ignored for archives" 1>&2
fi
if test -n "$export_symbols" || test -n "$export_symbols_regex"; then
$echo "$modename: warning: \`-export-symbols' is ignored for archives" 1>&2
fi
# Now set the variables for building old libraries.
build_libtool_libs=no
oldlibs="$output"
objs="$objs$old_deplibs"
;;
lib)
# Make sure we only generate libraries of the form `libNAME.la'.
case $outputname in
lib*)
name=`$echo "X$outputname" | $Xsed -e 's/\.la$//' -e 's/^lib//'`
eval shared_ext=\"$shrext_cmds\"
eval libname=\"$libname_spec\"
;;
*)
if test "$module" = no; then
$echo "$modename: libtool library \`$output' must begin with \`lib'" 1>&2
$echo "$help" 1>&2
exit $EXIT_FAILURE
fi
if test "$need_lib_prefix" != no; then
# Add the "lib" prefix for modules if required
name=`$echo "X$outputname" | $Xsed -e 's/\.la$//'`
eval shared_ext=\"$shrext_cmds\"
eval libname=\"$libname_spec\"
else
libname=`$echo "X$outputname" | $Xsed -e 's/\.la$//'`
fi
;;
esac
if test -n "$objs"; then
if test "$deplibs_check_method" != pass_all; then
$echo "$modename: cannot build libtool library \`$output' from non-libtool objects on this host:$objs" 2>&1
exit $EXIT_FAILURE
else
$echo
$echo "*** Warning: Linking the shared library $output against the non-libtool"
$echo "*** objects $objs is not portable!"
libobjs="$libobjs $objs"
fi
fi
if test "$dlself" != no; then
$echo "$modename: warning: \`-dlopen self' is ignored for libtool libraries" 1>&2
fi
set dummy $rpath
if test "$#" -gt 2; then
$echo "$modename: warning: ignoring multiple \`-rpath's for a libtool library" 1>&2
fi
install_libdir="$2"
oldlibs=
if test -z "$rpath"; then
if test "$build_libtool_libs" = yes; then
# Building a libtool convenience library.
# Some compilers have problems with a `.al' extension so
# convenience libraries should have the same extension an
# archive normally would.
oldlibs="$output_objdir/$libname.$libext $oldlibs"
build_libtool_libs=convenience
build_old_libs=yes
fi
if test -n "$vinfo"; then
$echo "$modename: warning: \`-version-info/-version-number' is ignored for convenience libraries" 1>&2
fi
if test -n "$release"; then
$echo "$modename: warning: \`-release' is ignored for convenience libraries" 1>&2
fi
else
# Parse the version information argument.
save_ifs="$IFS"; IFS=':'
set dummy $vinfo 0 0 0
IFS="$save_ifs"
if test -n "$8"; then
$echo "$modename: too many parameters to \`-version-info'" 1>&2
$echo "$help" 1>&2
exit $EXIT_FAILURE
fi
# convert absolute version numbers to libtool ages
# this retains compatibility with .la files and attempts
# to make the code below a bit more comprehensible
case $vinfo_number in
yes)
number_major="$2"
number_minor="$3"
number_revision="$4"
#
# There are really only two kinds -- those that
# use the current revision as the major version
# and those that subtract age and use age as
# a minor version. But, then there is irix
# which has an extra 1 added just for fun
#
case $version_type in
darwin|linux|osf|windows)
current=`expr $number_major + $number_minor`
age="$number_minor"
revision="$number_revision"
;;
freebsd-aout|freebsd-elf|sunos)
current="$number_major"
revision="$number_minor"
age="0"
;;
irix|nonstopux)
current=`expr $number_major + $number_minor - 1`
age="$number_minor"
revision="$number_minor"
;;
esac
;;
no)
current="$2"
revision="$3"
age="$4"
;;
esac
# Check that each of the things are valid numbers.
case $current in
0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
*)
$echo "$modename: CURRENT \`$current' must be a nonnegative integer" 1>&2
$echo "$modename: \`$vinfo' is not valid version information" 1>&2
exit $EXIT_FAILURE
;;
esac
case $revision in
0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
*)
$echo "$modename: REVISION \`$revision' must be a nonnegative integer" 1>&2
$echo "$modename: \`$vinfo' is not valid version information" 1>&2
exit $EXIT_FAILURE
;;
esac
case $age in
0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;;
*)
$echo "$modename: AGE \`$age' must be a nonnegative integer" 1>&2
$echo "$modename: \`$vinfo' is not valid version information" 1>&2
exit $EXIT_FAILURE
;;
esac
if test "$age" -gt "$current"; then
$echo "$modename: AGE \`$age' is greater than the current interface number \`$current'" 1>&2
$echo "$modename: \`$vinfo' is not valid version information" 1>&2
exit $EXIT_FAILURE
fi
# Calculate the version variables.
major=
versuffix=
verstring=
case $version_type in
none) ;;
darwin)
# Like Linux, but with the current version available in
# verstring for coding it into the library header
major=.`expr $current - $age`
versuffix="$major.$age.$revision"
# Darwin ld doesn't like 0 for these options...
minor_current=`expr $current + 1`
verstring="${wl}-compatibility_version ${wl}$minor_current ${wl}-current_version ${wl}$minor_current.$revision"
;;
freebsd-aout)
major=".$current"
versuffix=".$current.$revision";
;;
freebsd-elf)
major=".$current"
versuffix=".$current";
;;
irix | nonstopux)
major=`expr $current - $age + 1`
case $version_type in
nonstopux) verstring_prefix=nonstopux ;;
*) verstring_prefix=sgi ;;
esac
verstring="$verstring_prefix$major.$revision"
# Add in all the interfaces that we are compatible with.
loop=$revision
while test "$loop" -ne 0; do
iface=`expr $revision - $loop`
loop=`expr $loop - 1`
verstring="$verstring_prefix$major.$iface:$verstring"
done
# Before this point, $major must not contain `.'.
major=.$major
versuffix="$major.$revision"
;;
linux)
major=.`expr $current - $age`
versuffix="$major.$age.$revision"
;;
osf)
major=.`expr $current - $age`
versuffix=".$current.$age.$revision"
verstring="$current.$age.$revision"
# Add in all the interfaces that we are compatible with.
loop=$age
while test "$loop" -ne 0; do
iface=`expr $current - $loop`
loop=`expr $loop - 1`
verstring="$verstring:${iface}.0"
done
# Make executables depend on our current version.
verstring="$verstring:${current}.0"
;;
sunos)
major=".$current"
versuffix=".$current.$revision"
;;
windows)
# Use '-' rather than '.', since we only want one
# extension on DOS 8.3 filesystems.
major=`expr $current - $age`
versuffix="-$major"
;;
*)
$echo "$modename: unknown library version type \`$version_type'" 1>&2
$echo "Fatal configuration error. See the $PACKAGE docs for more information." 1>&2
exit $EXIT_FAILURE
;;
esac
# Clear the version info if we defaulted, and they specified a release.
if test -z "$vinfo" && test -n "$release"; then
major=
case $version_type in
darwin)
# we can't check for "0.0" in archive_cmds due to quoting
# problems, so we reset it completely
verstring=
;;
*)
verstring="0.0"
;;
esac
if test "$need_version" = no; then
versuffix=
else
versuffix=".0.0"
fi
fi
# Remove version info from name if versioning should be avoided
if test "$avoid_version" = yes && test "$need_version" = no; then
major=
versuffix=
verstring=""
fi
# Check to see if the archive will have undefined symbols.
if test "$allow_undefined" = yes; then
if test "$allow_undefined_flag" = unsupported; then
$echo "$modename: warning: undefined symbols not allowed in $host shared libraries" 1>&2
build_libtool_libs=no
build_old_libs=yes
fi
else
# Don't allow undefined symbols.
allow_undefined_flag="$no_undefined_flag"
fi
fi
if test "$mode" != relink; then
# Remove our outputs, but don't remove object files since they
# may have been created when compiling PIC objects.
removelist=
tempremovelist=`$echo "$output_objdir/*"`
for p in $tempremovelist; do
case $p in
*.$objext)
;;
$output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/${libname}${release}.*)
if test "X$precious_files_regex" != "X"; then
if echo $p | $EGREP -e "$precious_files_regex" >/dev/null 2>&1
then
continue
fi
fi
removelist="$removelist $p"
;;
*) ;;
esac
done
if test -n "$removelist"; then
$show "${rm}r $removelist"
$run ${rm}r $removelist
fi
fi
# Now set the variables for building old libraries.
if test "$build_old_libs" = yes && test "$build_libtool_libs" != convenience ; then
oldlibs="$oldlibs $output_objdir/$libname.$libext"
# Transform .lo files to .o files.
oldobjs="$objs "`$echo "X$libobjs" | $SP2NL | $Xsed -e '/\.'${libext}'$/d' -e "$lo2o" | $NL2SP`
fi
# Eliminate all temporary directories.
for path in $notinst_path; do
lib_search_path=`$echo "$lib_search_path " | ${SED} -e 's% $path % %g'`
deplibs=`$echo "$deplibs " | ${SED} -e 's% -L$path % %g'`
dependency_libs=`$echo "$dependency_libs " | ${SED} -e 's% -L$path % %g'`
done
if test -n "$xrpath"; then
# If the user specified any rpath flags, then add them.
temp_xrpath=
for libdir in $xrpath; do
temp_xrpath="$temp_xrpath -R$libdir"
case "$finalize_rpath " in
*" $libdir "*) ;;
*) finalize_rpath="$finalize_rpath $libdir" ;;
esac
done
if test "$hardcode_into_libs" != yes || test "$build_old_libs" = yes; then
dependency_libs="$temp_xrpath $dependency_libs"
fi
fi
# Make sure dlfiles contains only unique files that won't be dlpreopened
old_dlfiles="$dlfiles"
dlfiles=
for lib in $old_dlfiles; do
case " $dlprefiles $dlfiles " in
*" $lib "*) ;;
*) dlfiles="$dlfiles $lib" ;;
esac
done
# Make sure dlprefiles contains only unique files
old_dlprefiles="$dlprefiles"
dlprefiles=
for lib in $old_dlprefiles; do
case "$dlprefiles " in
*" $lib "*) ;;
*) dlprefiles="$dlprefiles $lib" ;;
esac
done
if test "$build_libtool_libs" = yes; then
if test -n "$rpath"; then
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-beos*)
# these systems don't actually have a c library (as such)!
;;
*-*-rhapsody* | *-*-darwin1.[012])
# Rhapsody C library is in the System framework
deplibs="$deplibs -framework System"
;;
*-*-netbsd*)
# Don't link with libc until the a.out ld.so is fixed.
;;
*-*-openbsd* | *-*-freebsd* | *-*-dragonfly*)
# Do not include libc due to us having libc/libc_r.
test "X$arg" = "X-lc" && continue
;;
*)
# Add libc to deplibs on all other systems if necessary.
if test "$build_libtool_need_lc" = "yes"; then
deplibs="$deplibs -lc"
fi
;;
esac
fi
# Transform deplibs into only deplibs that can be linked in shared.
name_save=$name
libname_save=$libname
release_save=$release
versuffix_save=$versuffix
major_save=$major
# I'm not sure if I'm treating the release correctly. I think
# release should show up in the -l (ie -lgmp5) so we don't want to
# add it in twice. Is that correct?
release=""
versuffix=""
major=""
newdeplibs=
droppeddeps=no
case $deplibs_check_method in
pass_all)
# Don't check for shared/static. Everything works.
# This might be a little naive. We might want to check
# whether the library exists or not. But this is on
# osf3 & osf4 and I'm not really sure... Just
# implementing what was already the behavior.
newdeplibs=$deplibs
;;
test_compile)
# This code stresses the "libraries are programs" paradigm to its
# limits. Maybe even breaks it. We compile a program, linking it
# against the deplibs as a proxy for the library. Then we can check
# whether they linked in statically or dynamically with ldd.
$rm conftest.c
cat > conftest.c <<EOF
int main() { return 0; }
EOF
$rm conftest
$LTCC -o conftest conftest.c $deplibs
if test "$?" -eq 0 ; then
ldd_output=`ldd conftest`
for i in $deplibs; do
name=`expr $i : '-l\(.*\)'`
# If $name is empty we are operating on a -L argument.
if test "$name" != "" && test "$name" -ne "0"; then
if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
case " $predeps $postdeps " in
*" $i "*)
newdeplibs="$newdeplibs $i"
i=""
;;
esac
fi
if test -n "$i" ; then
libname=`eval \\$echo \"$libname_spec\"`
deplib_matches=`eval \\$echo \"$library_names_spec\"`
set dummy $deplib_matches
deplib_match=$2
if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
newdeplibs="$newdeplibs $i"
else
droppeddeps=yes
$echo
$echo "*** Warning: dynamic linker does not accept needed library $i."
$echo "*** I have the capability to make that library automatically link in when"
$echo "*** you link to this library. But I can only do this if you have a"
$echo "*** shared version of the library, which I believe you do not have"
$echo "*** because a test_compile did reveal that the linker did not use it for"
$echo "*** its dynamic dependency list that programs get resolved with at runtime."
fi
fi
else
newdeplibs="$newdeplibs $i"
fi
done
else
# Error occurred in the first compile. Let's try to salvage
# the situation: Compile a separate program for each library.
for i in $deplibs; do
name=`expr $i : '-l\(.*\)'`
# If $name is empty we are operating on a -L argument.
if test "$name" != "" && test "$name" != "0"; then
$rm conftest
$LTCC -o conftest conftest.c $i
# Did it work?
if test "$?" -eq 0 ; then
ldd_output=`ldd conftest`
if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
case " $predeps $postdeps " in
*" $i "*)
newdeplibs="$newdeplibs $i"
i=""
;;
esac
fi
if test -n "$i" ; then
libname=`eval \\$echo \"$libname_spec\"`
deplib_matches=`eval \\$echo \"$library_names_spec\"`
set dummy $deplib_matches
deplib_match=$2
if test `expr "$ldd_output" : ".*$deplib_match"` -ne 0 ; then
newdeplibs="$newdeplibs $i"
else
droppeddeps=yes
$echo
$echo "*** Warning: dynamic linker does not accept needed library $i."
$echo "*** I have the capability to make that library automatically link in when"
$echo "*** you link to this library. But I can only do this if you have a"
$echo "*** shared version of the library, which you do not appear to have"
$echo "*** because a test_compile did reveal that the linker did not use this one"
$echo "*** as a dynamic dependency that programs can get resolved with at runtime."
fi
fi
else
droppeddeps=yes
$echo
$echo "*** Warning! Library $i is needed by this library but I was not able to"
$echo "*** make it link in! You will probably need to install it or some"
$echo "*** library that it depends on before this library will be fully"
$echo "*** functional. Installing it before continuing would be even better."
fi
else
newdeplibs="$newdeplibs $i"
fi
done
fi
;;
file_magic*)
set dummy $deplibs_check_method
file_magic_regex=`expr "$deplibs_check_method" : "$2 \(.*\)"`
for a_deplib in $deplibs; do
name=`expr $a_deplib : '-l\(.*\)'`
# If $name is empty we are operating on a -L argument.
if test "$name" != "" && test "$name" != "0"; then
if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
case " $predeps $postdeps " in
*" $a_deplib "*)
newdeplibs="$newdeplibs $a_deplib"
a_deplib=""
;;
esac
fi
if test -n "$a_deplib" ; then
libname=`eval \\$echo \"$libname_spec\"`
for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
potential_libs=`ls $i/$libname[.-]* 2>/dev/null`
for potent_lib in $potential_libs; do
# Follow soft links.
if ls -lLd "$potent_lib" 2>/dev/null \
| grep " -> " >/dev/null; then
continue
fi
# The statement above tries to avoid entering an
# endless loop below, in case of cyclic links.
# We might still enter an endless loop, since a link
# loop can be closed while we follow links,
# but so what?
potlib="$potent_lib"
while test -h "$potlib" 2>/dev/null; do
potliblink=`ls -ld $potlib | ${SED} 's/.* -> //'`
case $potliblink in
[\\/]* | [A-Za-z]:[\\/]*) potlib="$potliblink";;
*) potlib=`$echo "X$potlib" | $Xsed -e 's,[^/]*$,,'`"$potliblink";;
esac
done
if eval $file_magic_cmd \"\$potlib\" 2>/dev/null \
| ${SED} 10q \
| $EGREP "$file_magic_regex" > /dev/null; then
newdeplibs="$newdeplibs $a_deplib"
a_deplib=""
break 2
fi
done
done
fi
if test -n "$a_deplib" ; then
droppeddeps=yes
$echo
$echo "*** Warning: linker path does not have real file for library $a_deplib."
$echo "*** I have the capability to make that library automatically link in when"
$echo "*** you link to this library. But I can only do this if you have a"
$echo "*** shared version of the library, which you do not appear to have"
$echo "*** because I did check the linker path looking for a file starting"
if test -z "$potlib" ; then
$echo "*** with $libname but no candidates were found. (...for file magic test)"
else
$echo "*** with $libname and none of the candidates passed a file format test"
$echo "*** using a file magic. Last file checked: $potlib"
fi
fi
else
# Add a -L argument.
newdeplibs="$newdeplibs $a_deplib"
fi
done # Gone through all deplibs.
;;
match_pattern*)
set dummy $deplibs_check_method
match_pattern_regex=`expr "$deplibs_check_method" : "$2 \(.*\)"`
for a_deplib in $deplibs; do
name=`expr $a_deplib : '-l\(.*\)'`
# If $name is empty we are operating on a -L argument.
if test -n "$name" && test "$name" != "0"; then
if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
case " $predeps $postdeps " in
*" $a_deplib "*)
newdeplibs="$newdeplibs $a_deplib"
a_deplib=""
;;
esac
fi
if test -n "$a_deplib" ; then
libname=`eval \\$echo \"$libname_spec\"`
for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do
potential_libs=`ls $i/$libname[.-]* 2>/dev/null`
for potent_lib in $potential_libs; do
potlib="$potent_lib" # see symlink-check above in file_magic test
if eval $echo \"$potent_lib\" 2>/dev/null \
| ${SED} 10q \
| $EGREP "$match_pattern_regex" > /dev/null; then
newdeplibs="$newdeplibs $a_deplib"
a_deplib=""
break 2
fi
done
done
fi
if test -n "$a_deplib" ; then
droppeddeps=yes
$echo
$echo "*** Warning: linker path does not have real file for library $a_deplib."
$echo "*** I have the capability to make that library automatically link in when"
$echo "*** you link to this library. But I can only do this if you have a"
$echo "*** shared version of the library, which you do not appear to have"
$echo "*** because I did check the linker path looking for a file starting"
if test -z "$potlib" ; then
$echo "*** with $libname but no candidates were found. (...for regex pattern test)"
else
$echo "*** with $libname and none of the candidates passed a file format test"
$echo "*** using a regex pattern. Last file checked: $potlib"
fi
fi
else
# Add a -L argument.
newdeplibs="$newdeplibs $a_deplib"
fi
done # Gone through all deplibs.
;;
none | unknown | *)
newdeplibs=""
tmp_deplibs=`$echo "X $deplibs" | $Xsed -e 's/ -lc$//' \
-e 's/ -[LR][^ ]*//g'`
if test "X$allow_libtool_libs_with_static_runtimes" = "Xyes" ; then
for i in $predeps $postdeps ; do
# can't use Xsed below, because $i might contain '/'
tmp_deplibs=`$echo "X $tmp_deplibs" | ${SED} -e "1s,^X,," -e "s,$i,,"`
done
fi
if $echo "X $tmp_deplibs" | $Xsed -e 's/[ ]//g' \
| grep . >/dev/null; then
$echo
if test "X$deplibs_check_method" = "Xnone"; then
$echo "*** Warning: inter-library dependencies are not supported in this platform."
else
$echo "*** Warning: inter-library dependencies are not known to be supported."
fi
$echo "*** All declared inter-library dependencies are being dropped."
droppeddeps=yes
fi
;;
esac
versuffix=$versuffix_save
major=$major_save
release=$release_save
libname=$libname_save
name=$name_save
case $host in
*-*-rhapsody* | *-*-darwin1.[012])
# On Rhapsody replace the C library is the System framework
newdeplibs=`$echo "X $newdeplibs" | $Xsed -e 's/ -lc / -framework System /'`
;;
esac
if test "$droppeddeps" = yes; then
if test "$module" = yes; then
$echo
$echo "*** Warning: libtool could not satisfy all declared inter-library"
$echo "*** dependencies of module $libname. Therefore, libtool will create"
$echo "*** a static module, that should work as long as the dlopening"
$echo "*** application is linked with the -dlopen flag."
if test -z "$global_symbol_pipe"; then
$echo
$echo "*** However, this would only work if libtool was able to extract symbol"
$echo "*** lists from a program, using \`nm' or equivalent, but libtool could"
$echo "*** not find such a program. So, this module is probably useless."
$echo "*** \`nm' from GNU binutils and a full rebuild may help."
fi
if test "$build_old_libs" = no; then
oldlibs="$output_objdir/$libname.$libext"
build_libtool_libs=module
build_old_libs=yes
else
build_libtool_libs=no
fi
else
$echo "*** The inter-library dependencies that have been dropped here will be"
$echo "*** automatically added whenever a program is linked with this library"
$echo "*** or is declared to -dlopen it."
if test "$allow_undefined" = no; then
$echo
$echo "*** Since this library must not contain undefined symbols,"
$echo "*** because either the platform does not support them or"
$echo "*** it was explicitly requested with -no-undefined,"
$echo "*** libtool will only create a static version of it."
if test "$build_old_libs" = no; then
oldlibs="$output_objdir/$libname.$libext"
build_libtool_libs=module
build_old_libs=yes
else
build_libtool_libs=no
fi
fi
fi
fi
# Done checking deplibs!
deplibs=$newdeplibs
fi
# All the library-specific variables (install_libdir is set above).
library_names=
old_library=
dlname=
# Test again, we may have decided not to build it any more
if test "$build_libtool_libs" = yes; then
if test "$hardcode_into_libs" = yes; then
# Hardcode the library paths
hardcode_libdirs=
dep_rpath=
rpath="$finalize_rpath"
test "$mode" != relink && rpath="$compile_rpath$rpath"
for libdir in $rpath; do
if test -n "$hardcode_libdir_flag_spec"; then
if test -n "$hardcode_libdir_separator"; then
if test -z "$hardcode_libdirs"; then
hardcode_libdirs="$libdir"
else
# Just accumulate the unique libdirs.
case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
*"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
;;
*)
hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir"
;;
esac
fi
else
eval flag=\"$hardcode_libdir_flag_spec\"
dep_rpath="$dep_rpath $flag"
fi
elif test -n "$runpath_var"; then
case "$perm_rpath " in
*" $libdir "*) ;;
*) perm_rpath="$perm_rpath $libdir" ;;
esac
fi
done
# Substitute the hardcoded libdirs into the rpath.
if test -n "$hardcode_libdir_separator" &&
test -n "$hardcode_libdirs"; then
libdir="$hardcode_libdirs"
if test -n "$hardcode_libdir_flag_spec_ld"; then
eval dep_rpath=\"$hardcode_libdir_flag_spec_ld\"
else
eval dep_rpath=\"$hardcode_libdir_flag_spec\"
fi
fi
if test -n "$runpath_var" && test -n "$perm_rpath"; then
# We should set the runpath_var.
rpath=
for dir in $perm_rpath; do
rpath="$rpath$dir:"
done
eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var"
fi
test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs"
fi
shlibpath="$finalize_shlibpath"
test "$mode" != relink && shlibpath="$compile_shlibpath$shlibpath"
if test -n "$shlibpath"; then
eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var"
fi
# Get the real and link names of the library.
eval shared_ext=\"$shrext_cmds\"
eval library_names=\"$library_names_spec\"
set dummy $library_names
realname="$2"
shift; shift
if test -n "$soname_spec"; then
eval soname=\"$soname_spec\"
else
soname="$realname"
fi
if test -z "$dlname"; then
dlname=$soname
fi
lib="$output_objdir/$realname"
for link
do
linknames="$linknames $link"
done
# Use standard objects if they are pic
test -z "$pic_flag" && libobjs=`$echo "X$libobjs" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP`
# Prepare the list of exported symbols
if test -z "$export_symbols"; then
if test "$always_export_symbols" = yes || test -n "$export_symbols_regex"; then
$show "generating symbol list for \`$libname.la'"
export_symbols="$output_objdir/$libname.exp"
$run $rm $export_symbols
cmds=$export_symbols_cmds
save_ifs="$IFS"; IFS='~'
for cmd in $cmds; do
IFS="$save_ifs"
eval cmd=\"$cmd\"
if len=`expr "X$cmd" : ".*"` &&
test "$len" -le "$max_cmd_len" || test "$max_cmd_len" -le -1; then
$show "$cmd"
$run eval "$cmd" || exit $?
skipped_export=false
else
# The command line is too long to execute in one step.
$show "using reloadable object file for export list..."
skipped_export=:
# Break out early, otherwise skipped_export may be
# set to false by a later but shorter cmd.
break
fi
done
IFS="$save_ifs"
if test -n "$export_symbols_regex"; then
$show "$EGREP -e \"$export_symbols_regex\" \"$export_symbols\" > \"${export_symbols}T\""
$run eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"'
$show "$mv \"${export_symbols}T\" \"$export_symbols\""
$run eval '$mv "${export_symbols}T" "$export_symbols"'
fi
fi
fi
if test -n "$export_symbols" && test -n "$include_expsyms"; then
$run eval '$echo "X$include_expsyms" | $SP2NL >> "$export_symbols"'
fi
tmp_deplibs=
inst_prefix_arg=
for test_deplib in $deplibs; do
case " $convenience " in
*" $test_deplib "*) ;;
*)
if test -n "$inst_prefix_dir" && (echo "$test_deplib" | grep -- "$inst_prefix_dir" >/dev/null); then
inst_prefix_arg="$inst_prefix_arg $test_deplib"
else
tmp_deplibs="$tmp_deplibs $test_deplib"
fi
;;
esac
done
deplibs="$tmp_deplibs"
if test -n "$inst_prefix_arg"; then
deplibs="$inst_prefix_arg $deplibs"
fi
if test -n "$convenience"; then
if test -n "$whole_archive_flag_spec"; then
save_libobjs=$libobjs
eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
else
gentop="$output_objdir/${outputname}x"
generated="$generated $gentop"
func_extract_archives $gentop $convenience
libobjs="$libobjs $func_extract_archives_result"
fi
fi
if test "$thread_safe" = yes && test -n "$thread_safe_flag_spec"; then
eval flag=\"$thread_safe_flag_spec\"
linker_flags="$linker_flags $flag"
fi
# Make a backup of the uninstalled library when relinking
if test "$mode" = relink; then
$run eval '(cd $output_objdir && $rm ${realname}U && $mv $realname ${realname}U)' || exit $?
fi
# Do each of the archive commands.
if test "$module" = yes && test -n "$module_cmds" ; then
if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
eval test_cmds=\"$module_expsym_cmds\"
cmds=$module_expsym_cmds
else
eval test_cmds=\"$module_cmds\"
cmds=$module_cmds
fi
else
if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
eval test_cmds=\"$archive_expsym_cmds\"
cmds=$archive_expsym_cmds
else
eval test_cmds=\"$archive_cmds\"
cmds=$archive_cmds
fi
fi
if test "X$skipped_export" != "X:" &&
len=`expr "X$test_cmds" : ".*" 2>/dev/null` &&
test "$len" -le "$max_cmd_len" || test "$max_cmd_len" -le -1; then
:
else
# The command line is too long to link in one step, link piecewise.
$echo "creating reloadable object files..."
# Save the value of $output and $libobjs because we want to
# use them later. If we have whole_archive_flag_spec, we
# want to use save_libobjs as it was before
# whole_archive_flag_spec was expanded, because we can't
# assume the linker understands whole_archive_flag_spec.
# This may have to be revisited, in case too many
# convenience libraries get linked in and end up exceeding
# the spec.
if test -z "$convenience" || test -z "$whole_archive_flag_spec"; then
save_libobjs=$libobjs
fi
save_output=$output
output_la=`$echo "X$output" | $Xsed -e "$basename"`
# Clear the reloadable object creation command queue and
# initialize k to one.
test_cmds=
concat_cmds=
objlist=
delfiles=
last_robj=
k=1
output=$output_objdir/$output_la-${k}.$objext
# Loop over the list of objects to be linked.
for obj in $save_libobjs
do
eval test_cmds=\"$reload_cmds $objlist $last_robj\"
if test "X$objlist" = X ||
{ len=`expr "X$test_cmds" : ".*" 2>/dev/null` &&
test "$len" -le "$max_cmd_len"; }; then
objlist="$objlist $obj"
else
# The command $test_cmds is almost too long, add a
# command to the queue.
if test "$k" -eq 1 ; then
# The first file doesn't have a previous command to add.
eval concat_cmds=\"$reload_cmds $objlist $last_robj\"
else
# All subsequent reloadable object files will link in
# the last one created.
eval concat_cmds=\"\$concat_cmds~$reload_cmds $objlist $last_robj\"
fi
last_robj=$output_objdir/$output_la-${k}.$objext
k=`expr $k + 1`
output=$output_objdir/$output_la-${k}.$objext
objlist=$obj
len=1
fi
done
# Handle the remaining objects by creating one last
# reloadable object file. All subsequent reloadable object
# files will link in the last one created.
test -z "$concat_cmds" || concat_cmds=$concat_cmds~
eval concat_cmds=\"\${concat_cmds}$reload_cmds $objlist $last_robj\"
if ${skipped_export-false}; then
$show "generating symbol list for \`$libname.la'"
export_symbols="$output_objdir/$libname.exp"
$run $rm $export_symbols
libobjs=$output
# Append the command to create the export file.
eval concat_cmds=\"\$concat_cmds~$export_symbols_cmds\"
fi
# Set up a command to remove the reloadable object files
# after they are used.
i=0
while test "$i" -lt "$k"
do
i=`expr $i + 1`
delfiles="$delfiles $output_objdir/$output_la-${i}.$objext"
done
$echo "creating a temporary reloadable object file: $output"
# Loop through the commands generated above and execute them.
save_ifs="$IFS"; IFS='~'
for cmd in $concat_cmds; do
IFS="$save_ifs"
$show "$cmd"
$run eval "$cmd" || exit $?
done
IFS="$save_ifs"
libobjs=$output
# Restore the value of output.
output=$save_output
if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then
eval libobjs=\"\$libobjs $whole_archive_flag_spec\"
fi
# Expand the library linking commands again to reset the
# value of $libobjs for piecewise linking.
# Do each of the archive commands.
if test "$module" = yes && test -n "$module_cmds" ; then
if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then
cmds=$module_expsym_cmds
else
cmds=$module_cmds
fi
else
if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then
cmds=$archive_expsym_cmds
else
cmds=$archive_cmds
fi
fi
# Append the command to remove the reloadable object files
# to the just-reset $cmds.
eval cmds=\"\$cmds~\$rm $delfiles\"
fi
save_ifs="$IFS"; IFS='~'
for cmd in $cmds; do
IFS="$save_ifs"
eval cmd=\"$cmd\"
$show "$cmd"
$run eval "$cmd" || {
lt_exit=$?
# Restore the uninstalled library and exit
if test "$mode" = relink; then
$run eval '(cd $output_objdir && $rm ${realname}T && $mv ${realname}U $realname)'
fi
exit $lt_exit
}
done
IFS="$save_ifs"
# Restore the uninstalled library and exit
if test "$mode" = relink; then
$run eval '(cd $output_objdir && $rm ${realname}T && $mv $realname ${realname}T && $mv "$realname"U $realname)' || exit $?
if test -n "$convenience"; then
if test -z "$whole_archive_flag_spec"; then
$show "${rm}r $gentop"
$run ${rm}r "$gentop"
fi
fi
exit $EXIT_SUCCESS
fi
# Create links to the real library.
for linkname in $linknames; do
if test "$realname" != "$linkname"; then
$show "(cd $output_objdir && $rm $linkname && $LN_S $realname $linkname)"
$run eval '(cd $output_objdir && $rm $linkname && $LN_S $realname $linkname)' || exit $?
fi
done
# If -module or -export-dynamic was specified, set the dlname.
if test "$module" = yes || test "$export_dynamic" = yes; then
# On all known operating systems, these are identical.
dlname="$soname"
fi
fi
;;
obj)
if test -n "$deplibs"; then
$echo "$modename: warning: \`-l' and \`-L' are ignored for objects" 1>&2
fi
if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
$echo "$modename: warning: \`-dlopen' is ignored for objects" 1>&2
fi
if test -n "$rpath"; then
$echo "$modename: warning: \`-rpath' is ignored for objects" 1>&2
fi
if test -n "$xrpath"; then
$echo "$modename: warning: \`-R' is ignored for objects" 1>&2
fi
if test -n "$vinfo"; then
$echo "$modename: warning: \`-version-info' is ignored for objects" 1>&2
fi
if test -n "$release"; then
$echo "$modename: warning: \`-release' is ignored for objects" 1>&2
fi
case $output in
*.lo)
if test -n "$objs$old_deplibs"; then
$echo "$modename: cannot build library object \`$output' from non-libtool objects" 1>&2
exit $EXIT_FAILURE
fi
libobj="$output"
obj=`$echo "X$output" | $Xsed -e "$lo2o"`
;;
*)
libobj=
obj="$output"
;;
esac
# Delete the old objects.
$run $rm $obj $libobj
# Objects from convenience libraries. This assumes
# single-version convenience libraries. Whenever we create
# different ones for PIC/non-PIC, this we'll have to duplicate
# the extraction.
reload_conv_objs=
gentop=
# reload_cmds runs $LD directly, so let us get rid of
# -Wl from whole_archive_flag_spec
wl=
if test -n "$convenience"; then
if test -n "$whole_archive_flag_spec"; then
eval reload_conv_objs=\"\$reload_objs $whole_archive_flag_spec\"
else
gentop="$output_objdir/${obj}x"
generated="$generated $gentop"
func_extract_archives $gentop $convenience
reload_conv_objs="$reload_objs $func_extract_archives_result"
fi
fi
# Create the old-style object.
reload_objs="$objs$old_deplibs "`$echo "X$libobjs" | $SP2NL | $Xsed -e '/\.'${libext}$'/d' -e '/\.lib$/d' -e "$lo2o" | $NL2SP`" $reload_conv_objs" ### testsuite: skip nested quoting test
output="$obj"
cmds=$reload_cmds
save_ifs="$IFS"; IFS='~'
for cmd in $cmds; do
IFS="$save_ifs"
eval cmd=\"$cmd\"
$show "$cmd"
$run eval "$cmd" || exit $?
done
IFS="$save_ifs"
# Exit if we aren't doing a library object file.
if test -z "$libobj"; then
if test -n "$gentop"; then
$show "${rm}r $gentop"
$run ${rm}r $gentop
fi
exit $EXIT_SUCCESS
fi
if test "$build_libtool_libs" != yes; then
if test -n "$gentop"; then
$show "${rm}r $gentop"
$run ${rm}r $gentop
fi
# Create an invalid libtool object if no PIC, so that we don't
# accidentally link it into a program.
# $show "echo timestamp > $libobj"
# $run eval "echo timestamp > $libobj" || exit $?
exit $EXIT_SUCCESS
fi
if test -n "$pic_flag" || test "$pic_mode" != default; then
# Only do commands if we really have different PIC objects.
reload_objs="$libobjs $reload_conv_objs"
output="$libobj"
cmds=$reload_cmds
save_ifs="$IFS"; IFS='~'
for cmd in $cmds; do
IFS="$save_ifs"
eval cmd=\"$cmd\"
$show "$cmd"
$run eval "$cmd" || exit $?
done
IFS="$save_ifs"
fi
if test -n "$gentop"; then
$show "${rm}r $gentop"
$run ${rm}r $gentop
fi
exit $EXIT_SUCCESS
;;
prog)
case $host in
*cygwin*) output=`$echo $output | ${SED} -e 's,.exe$,,;s,$,.exe,'` ;;
esac
if test -n "$vinfo"; then
$echo "$modename: warning: \`-version-info' is ignored for programs" 1>&2
fi
if test -n "$release"; then
$echo "$modename: warning: \`-release' is ignored for programs" 1>&2
fi
if test "$preload" = yes; then
if test "$dlopen_support" = unknown && test "$dlopen_self" = unknown &&
test "$dlopen_self_static" = unknown; then
$echo "$modename: warning: \`AC_LIBTOOL_DLOPEN' not used. Assuming no dlopen support."
fi
fi
case $host in
*-*-rhapsody* | *-*-darwin1.[012])
# On Rhapsody replace the C library is the System framework
compile_deplibs=`$echo "X $compile_deplibs" | $Xsed -e 's/ -lc / -framework System /'`
finalize_deplibs=`$echo "X $finalize_deplibs" | $Xsed -e 's/ -lc / -framework System /'`
;;
esac
case $host in
*darwin*)
# Don't allow lazy linking, it breaks C++ global constructors
if test "$tagname" = CXX ; then
compile_command="$compile_command ${wl}-bind_at_load"
finalize_command="$finalize_command ${wl}-bind_at_load"
fi
;;
esac
compile_command="$compile_command $compile_deplibs"
finalize_command="$finalize_command $finalize_deplibs"
if test -n "$rpath$xrpath"; then
# If the user specified any rpath flags, then add them.
for libdir in $rpath $xrpath; do
# This is the magic to use -rpath.
case "$finalize_rpath " in
*" $libdir "*) ;;
*) finalize_rpath="$finalize_rpath $libdir" ;;
esac
done
fi
# Now hardcode the library paths
rpath=
hardcode_libdirs=
for libdir in $compile_rpath $finalize_rpath; do
if test -n "$hardcode_libdir_flag_spec"; then
if test -n "$hardcode_libdir_separator"; then
if test -z "$hardcode_libdirs"; then
hardcode_libdirs="$libdir"
else
# Just accumulate the unique libdirs.
case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
*"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
;;
*)
hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir"
;;
esac
fi
else
eval flag=\"$hardcode_libdir_flag_spec\"
rpath="$rpath $flag"
fi
elif test -n "$runpath_var"; then
case "$perm_rpath " in
*" $libdir "*) ;;
*) perm_rpath="$perm_rpath $libdir" ;;
esac
fi
case $host in
*-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2*)
case :$dllsearchpath: in
*":$libdir:"*) ;;
*) dllsearchpath="$dllsearchpath:$libdir";;
esac
;;
esac
done
# Substitute the hardcoded libdirs into the rpath.
if test -n "$hardcode_libdir_separator" &&
test -n "$hardcode_libdirs"; then
libdir="$hardcode_libdirs"
eval rpath=\" $hardcode_libdir_flag_spec\"
fi
compile_rpath="$rpath"
rpath=
hardcode_libdirs=
for libdir in $finalize_rpath; do
if test -n "$hardcode_libdir_flag_spec"; then
if test -n "$hardcode_libdir_separator"; then
if test -z "$hardcode_libdirs"; then
hardcode_libdirs="$libdir"
else
# Just accumulate the unique libdirs.
case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in
*"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*)
;;
*)
hardcode_libdirs="$hardcode_libdirs$hardcode_libdir_separator$libdir"
;;
esac
fi
else
eval flag=\"$hardcode_libdir_flag_spec\"
rpath="$rpath $flag"
fi
elif test -n "$runpath_var"; then
case "$finalize_perm_rpath " in
*" $libdir "*) ;;
*) finalize_perm_rpath="$finalize_perm_rpath $libdir" ;;
esac
fi
done
# Substitute the hardcoded libdirs into the rpath.
if test -n "$hardcode_libdir_separator" &&
test -n "$hardcode_libdirs"; then
libdir="$hardcode_libdirs"
eval rpath=\" $hardcode_libdir_flag_spec\"
fi
finalize_rpath="$rpath"
if test -n "$libobjs" && test "$build_old_libs" = yes; then
# Transform all the library objects into standard objects.
compile_command=`$echo "X$compile_command" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP`
finalize_command=`$echo "X$finalize_command" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP`
fi
dlsyms=
if test -n "$dlfiles$dlprefiles" || test "$dlself" != no; then
if test -n "$NM" && test -n "$global_symbol_pipe"; then
dlsyms="${outputname}S.c"
else
$echo "$modename: not configured to extract global symbols from dlpreopened files" 1>&2
fi
fi
if test -n "$dlsyms"; then
case $dlsyms in
"") ;;
*.c)
# Discover the nlist of each of the dlfiles.
nlist="$output_objdir/${outputname}.nm"
$show "$rm $nlist ${nlist}S ${nlist}T"
$run $rm "$nlist" "${nlist}S" "${nlist}T"
# Parse the name list into a source file.
$show "creating $output_objdir/$dlsyms"
test -z "$run" && $echo > "$output_objdir/$dlsyms" "\
/* $dlsyms - symbol resolution table for \`$outputname' dlsym emulation. */
/* Generated by $PROGRAM - GNU $PACKAGE $VERSION$TIMESTAMP */
#ifdef __cplusplus
extern \"C\" {
#endif
/* Prevent the only kind of declaration conflicts we can make. */
#define lt_preloaded_symbols some_other_symbol
/* External symbol declarations for the compiler. */\
"
if test "$dlself" = yes; then
$show "generating symbol list for \`$output'"
test -z "$run" && $echo ': @PROGRAM@ ' > "$nlist"
# Add our own program objects to the symbol list.
progfiles=`$echo "X$objs$old_deplibs" | $SP2NL | $Xsed -e "$lo2o" | $NL2SP`
for arg in $progfiles; do
$show "extracting global C symbols from \`$arg'"
$run eval "$NM $arg | $global_symbol_pipe >> '$nlist'"
done
if test -n "$exclude_expsyms"; then
$run eval '$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T'
$run eval '$mv "$nlist"T "$nlist"'
fi
if test -n "$export_symbols_regex"; then
$run eval '$EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T'
$run eval '$mv "$nlist"T "$nlist"'
fi
# Prepare the list of exported symbols
if test -z "$export_symbols"; then
export_symbols="$output_objdir/$outputname.exp"
$run $rm $export_symbols
$run eval "${SED} -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"'
else
$run eval "${SED} -e 's/\([ ][.*^$]\)/\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"'
$run eval 'grep -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T'
$run eval 'mv "$nlist"T "$nlist"'
fi
fi
for arg in $dlprefiles; do
$show "extracting global C symbols from \`$arg'"
name=`$echo "$arg" | ${SED} -e 's%^.*/%%'`
$run eval '$echo ": $name " >> "$nlist"'
$run eval "$NM $arg | $global_symbol_pipe >> '$nlist'"
done
if test -z "$run"; then
# Make sure we have at least an empty file.
test -f "$nlist" || : > "$nlist"
if test -n "$exclude_expsyms"; then
$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T
$mv "$nlist"T "$nlist"
fi
# Try sorting and uniquifying the output.
if grep -v "^: " < "$nlist" |
if sort -k 3 </dev/null >/dev/null 2>&1; then
sort -k 3
else
sort +2
fi |
uniq > "$nlist"S; then
:
else
grep -v "^: " < "$nlist" > "$nlist"S
fi
if test -f "$nlist"S; then
eval "$global_symbol_to_cdecl"' < "$nlist"S >> "$output_objdir/$dlsyms"'
else
$echo '/* NONE */' >> "$output_objdir/$dlsyms"
fi
$echo >> "$output_objdir/$dlsyms" "\
#undef lt_preloaded_symbols
#if defined (__STDC__) && __STDC__
# define lt_ptr void *
#else
# define lt_ptr char *
# define const
#endif
/* The mapping between symbol names and symbols. */
"
case $host in
*cygwin* | *mingw* )
$echo >> "$output_objdir/$dlsyms" "\
/* DATA imports from DLLs on WIN32 can't be const, because
runtime relocations are performed -- see ld's documentation
on pseudo-relocs */
struct {
"
;;
* )
$echo >> "$output_objdir/$dlsyms" "\
const struct {
"
;;
esac
$echo >> "$output_objdir/$dlsyms" "\
const char *name;
lt_ptr address;
}
lt_preloaded_symbols[] =
{\
"
eval "$global_symbol_to_c_name_address" < "$nlist" >> "$output_objdir/$dlsyms"
$echo >> "$output_objdir/$dlsyms" "\
{0, (lt_ptr) 0}
};
/* This works around a problem in FreeBSD linker */
#ifdef FREEBSD_WORKAROUND
static const void *lt_preloaded_setup() {
return lt_preloaded_symbols;
}
#endif
#ifdef __cplusplus
}
#endif\
"
fi
pic_flag_for_symtable=
case $host in
# compiling the symbol table file with pic_flag works around
# a FreeBSD bug that causes programs to crash when -lm is
# linked before any other PIC object. But we must not use
# pic_flag when linking with -static. The problem exists in
# FreeBSD 2.2.6 and is fixed in FreeBSD 3.1.
*-*-freebsd2*|*-*-freebsd3.0*|*-*-freebsdelf3.0*)
case "$compile_command " in
*" -static "*) ;;
*) pic_flag_for_symtable=" $pic_flag -DFREEBSD_WORKAROUND";;
esac;;
*-*-hpux*)
case "$compile_command " in
*" -static "*) ;;
*) pic_flag_for_symtable=" $pic_flag";;
esac
esac
# Now compile the dynamic symbol file.
$show "(cd $output_objdir && $LTCC -c$no_builtin_flag$pic_flag_for_symtable \"$dlsyms\")"
$run eval '(cd $output_objdir && $LTCC -c$no_builtin_flag$pic_flag_for_symtable "$dlsyms")' || exit $?
# Clean up the generated files.
$show "$rm $output_objdir/$dlsyms $nlist ${nlist}S ${nlist}T"
$run $rm "$output_objdir/$dlsyms" "$nlist" "${nlist}S" "${nlist}T"
# Transform the symbol file into the correct name.
compile_command=`$echo "X$compile_command" | $Xsed -e "s%@SYMFILE@%$output_objdir/${outputname}S.${objext}%"`
finalize_command=`$echo "X$finalize_command" | $Xsed -e "s%@SYMFILE@%$output_objdir/${outputname}S.${objext}%"`
;;
*)
$echo "$modename: unknown suffix for \`$dlsyms'" 1>&2
exit $EXIT_FAILURE
;;
esac
else
# We keep going just in case the user didn't refer to
# lt_preloaded_symbols. The linker will fail if global_symbol_pipe
# really was required.
# Nullify the symbol file.
compile_command=`$echo "X$compile_command" | $Xsed -e "s% @SYMFILE@%%"`
finalize_command=`$echo "X$finalize_command" | $Xsed -e "s% @SYMFILE@%%"`
fi
if test "$need_relink" = no || test "$build_libtool_libs" != yes; then
# Replace the output file specification.
compile_command=`$echo "X$compile_command" | $Xsed -e 's%@OUTPUT@%'"$output"'%g'`
link_command="$compile_command$compile_rpath"
# We have no uninstalled library dependencies, so finalize right now.
$show "$link_command"
$run eval "$link_command"
status=$?
# Delete the generated files.
if test -n "$dlsyms"; then
$show "$rm $output_objdir/${outputname}S.${objext}"
$run $rm "$output_objdir/${outputname}S.${objext}"
fi
exit $status
fi
if test -n "$shlibpath_var"; then
# We should set the shlibpath_var
rpath=
for dir in $temp_rpath; do
case $dir in
[\\/]* | [A-Za-z]:[\\/]*)
# Absolute path.
rpath="$rpath$dir:"
;;
*)
# Relative path: add a thisdir entry.
rpath="$rpath\$thisdir/$dir:"
;;
esac
done
temp_rpath="$rpath"
fi
if test -n "$compile_shlibpath$finalize_shlibpath"; then
compile_command="$shlibpath_var=\"$compile_shlibpath$finalize_shlibpath\$$shlibpath_var\" $compile_command"
fi
if test -n "$finalize_shlibpath"; then
finalize_command="$shlibpath_var=\"$finalize_shlibpath\$$shlibpath_var\" $finalize_command"
fi
compile_var=
finalize_var=
if test -n "$runpath_var"; then
if test -n "$perm_rpath"; then
# We should set the runpath_var.
rpath=
for dir in $perm_rpath; do
rpath="$rpath$dir:"
done
compile_var="$runpath_var=\"$rpath\$$runpath_var\" "
fi
if test -n "$finalize_perm_rpath"; then
# We should set the runpath_var.
rpath=
for dir in $finalize_perm_rpath; do
rpath="$rpath$dir:"
done
finalize_var="$runpath_var=\"$rpath\$$runpath_var\" "
fi
fi
if test "$no_install" = yes; then
# We don't need to create a wrapper script.
link_command="$compile_var$compile_command$compile_rpath"
# Replace the output file specification.
link_command=`$echo "X$link_command" | $Xsed -e 's%@OUTPUT@%'"$output"'%g'`
# Delete the old output file.
$run $rm $output
# Link the executable and exit
$show "$link_command"
$run eval "$link_command" || exit $?
exit $EXIT_SUCCESS
fi
if test "$hardcode_action" = relink; then
# Fast installation is not supported
link_command="$compile_var$compile_command$compile_rpath"
relink_command="$finalize_var$finalize_command$finalize_rpath"
$echo "$modename: warning: this platform does not like uninstalled shared libraries" 1>&2
$echo "$modename: \`$output' will be relinked during installation" 1>&2
else
if test "$fast_install" != no; then
link_command="$finalize_var$compile_command$finalize_rpath"
if test "$fast_install" = yes; then
relink_command=`$echo "X$compile_var$compile_command$compile_rpath" | $Xsed -e 's%@OUTPUT@%\$progdir/\$file%g'`
else
# fast_install is set to needless
relink_command=
fi
else
link_command="$compile_var$compile_command$compile_rpath"
relink_command="$finalize_var$finalize_command$finalize_rpath"
fi
fi
# Replace the output file specification.
link_command=`$echo "X$link_command" | $Xsed -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g'`
# Delete the old output files.
$run $rm $output $output_objdir/$outputname $output_objdir/lt-$outputname
$show "$link_command"
$run eval "$link_command" || exit $?
# Now create the wrapper script.
$show "creating $output"
# Quote the relink command for shipping.
if test -n "$relink_command"; then
# Preserve any variables that may affect compiler behavior
for var in $variables_saved_for_relink; do
if eval test -z \"\${$var+set}\"; then
relink_command="{ test -z \"\${$var+set}\" || unset $var || { $var=; export $var; }; }; $relink_command"
elif eval var_value=\$$var; test -z "$var_value"; then
relink_command="$var=; export $var; $relink_command"
else
var_value=`$echo "X$var_value" | $Xsed -e "$sed_quote_subst"`
relink_command="$var=\"$var_value\"; export $var; $relink_command"
fi
done
relink_command="(cd `pwd`; $relink_command)"
relink_command=`$echo "X$relink_command" | $Xsed -e "$sed_quote_subst"`
fi
# Quote $echo for shipping.
if test "X$echo" = "X$SHELL $progpath --fallback-echo"; then
case $progpath in
[\\/]* | [A-Za-z]:[\\/]*) qecho="$SHELL $progpath --fallback-echo";;
*) qecho="$SHELL `pwd`/$progpath --fallback-echo";;
esac
qecho=`$echo "X$qecho" | $Xsed -e "$sed_quote_subst"`
else
qecho=`$echo "X$echo" | $Xsed -e "$sed_quote_subst"`
fi
# Only actually do things if our run command is non-null.
if test -z "$run"; then
# win32 will think the script is a binary if it has
# a .exe suffix, so we strip it off here.
case $output in
*.exe) output=`$echo $output|${SED} 's,.exe$,,'` ;;
esac
# test for cygwin because mv fails w/o .exe extensions
case $host in
*cygwin*)
exeext=.exe
outputname=`$echo $outputname|${SED} 's,.exe$,,'` ;;
*) exeext= ;;
esac
case $host in
*cygwin* | *mingw* )
cwrappersource=`$echo ${objdir}/lt-${outputname}.c`
cwrapper=`$echo ${output}.exe`
$rm $cwrappersource $cwrapper
trap "$rm $cwrappersource $cwrapper; exit $EXIT_FAILURE" 1 2 15
cat > $cwrappersource <<EOF
/* $cwrappersource - temporary wrapper executable for $objdir/$outputname
Generated by $PROGRAM - GNU $PACKAGE $VERSION$TIMESTAMP
The $output program cannot be directly executed until all the libtool
libraries that it depends on are installed.
This wrapper executable should never be moved out of the build directory.
If it is, it will not operate correctly.
Currently, it simply execs the wrapper *script* "/bin/sh $output",
but could eventually absorb all of the scripts functionality and
exec $objdir/$outputname directly.
*/
EOF
cat >> $cwrappersource<<"EOF"
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <malloc.h>
#include <stdarg.h>
#include <assert.h>
#if defined(PATH_MAX)
# define LT_PATHMAX PATH_MAX
#elif defined(MAXPATHLEN)
# define LT_PATHMAX MAXPATHLEN
#else
# define LT_PATHMAX 1024
#endif
#ifndef DIR_SEPARATOR
#define DIR_SEPARATOR '/'
#endif
#if defined (_WIN32) || defined (__MSDOS__) || defined (__DJGPP__) || \
defined (__OS2__)
#define HAVE_DOS_BASED_FILE_SYSTEM
#ifndef DIR_SEPARATOR_2
#define DIR_SEPARATOR_2 '\\'
#endif
#endif
#ifndef DIR_SEPARATOR_2
# define IS_DIR_SEPARATOR(ch) ((ch) == DIR_SEPARATOR)
#else /* DIR_SEPARATOR_2 */
# define IS_DIR_SEPARATOR(ch) \
(((ch) == DIR_SEPARATOR) || ((ch) == DIR_SEPARATOR_2))
#endif /* DIR_SEPARATOR_2 */
#define XMALLOC(type, num) ((type *) xmalloc ((num) * sizeof(type)))
#define XFREE(stale) do { \
if (stale) { free ((void *) stale); stale = 0; } \
} while (0)
const char *program_name = NULL;
void * xmalloc (size_t num);
char * xstrdup (const char *string);
char * basename (const char *name);
char * fnqualify(const char *path);
char * strendzap(char *str, const char *pat);
void lt_fatal (const char *message, ...);
int
main (int argc, char *argv[])
{
char **newargz;
int i;
program_name = (char *) xstrdup ((char *) basename (argv[0]));
newargz = XMALLOC(char *, argc+2);
EOF
cat >> $cwrappersource <<EOF
newargz[0] = "$SHELL";
EOF
cat >> $cwrappersource <<"EOF"
newargz[1] = fnqualify(argv[0]);
/* we know the script has the same name, without the .exe */
/* so make sure newargz[1] doesn't end in .exe */
strendzap(newargz[1],".exe");
for (i = 1; i < argc; i++)
newargz[i+1] = xstrdup(argv[i]);
newargz[argc+1] = NULL;
EOF
cat >> $cwrappersource <<EOF
execv("$SHELL",newargz);
EOF
cat >> $cwrappersource <<"EOF"
return 127;
}
void *
xmalloc (size_t num)
{
void * p = (void *) malloc (num);
if (!p)
lt_fatal ("Memory exhausted");
return p;
}
char *
xstrdup (const char *string)
{
return string ? strcpy ((char *) xmalloc (strlen (string) + 1), string) : NULL
;
}
char *
basename (const char *name)
{
const char *base;
#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
/* Skip over the disk name in MSDOS pathnames. */
if (isalpha (name[0]) && name[1] == ':')
name += 2;
#endif
for (base = name; *name; name++)
if (IS_DIR_SEPARATOR (*name))
base = name + 1;
return (char *) base;
}
char *
fnqualify(const char *path)
{
size_t size;
char *p;
char tmp[LT_PATHMAX + 1];
assert(path != NULL);
/* Is it qualified already? */
#if defined (HAVE_DOS_BASED_FILE_SYSTEM)
if (isalpha (path[0]) && path[1] == ':')
return xstrdup (path);
#endif
if (IS_DIR_SEPARATOR (path[0]))
return xstrdup (path);
/* prepend the current directory */
/* doesn't handle '~' */
if (getcwd (tmp, LT_PATHMAX) == NULL)
lt_fatal ("getcwd failed");
size = strlen(tmp) + 1 + strlen(path) + 1; /* +2 for '/' and '\0' */
p = XMALLOC(char, size);
sprintf(p, "%s%c%s", tmp, DIR_SEPARATOR, path);
return p;
}
char *
strendzap(char *str, const char *pat)
{
size_t len, patlen;
assert(str != NULL);
assert(pat != NULL);
len = strlen(str);
patlen = strlen(pat);
if (patlen <= len)
{
str += len - patlen;
if (strcmp(str, pat) == 0)
*str = '\0';
}
return str;
}
static void
lt_error_core (int exit_status, const char * mode,
const char * message, va_list ap)
{
fprintf (stderr, "%s: %s: ", program_name, mode);
vfprintf (stderr, message, ap);
fprintf (stderr, ".\n");
if (exit_status >= 0)
exit (exit_status);
}
void
lt_fatal (const char *message, ...)
{
va_list ap;
va_start (ap, message);
lt_error_core (EXIT_FAILURE, "FATAL", message, ap);
va_end (ap);
}
EOF
# we should really use a build-platform specific compiler
# here, but OTOH, the wrappers (shell script and this C one)
# are only useful if you want to execute the "real" binary.
# Since the "real" binary is built for $host, then this
# wrapper might as well be built for $host, too.
$run $LTCC -s -o $cwrapper $cwrappersource
;;
esac
$rm $output
trap "$rm $output; exit $EXIT_FAILURE" 1 2 15
$echo > $output "\
#! $SHELL
# $output - temporary wrapper script for $objdir/$outputname
# Generated by $PROGRAM - GNU $PACKAGE $VERSION$TIMESTAMP
#
# The $output program cannot be directly executed until all the libtool
# libraries that it depends on are installed.
#
# This wrapper script should never be moved out of the build directory.
# If it is, it will not operate correctly.
# Sed substitution that helps us do robust quoting. It backslashifies
# metacharacters that are still active within double-quoted strings.
Xsed='${SED} -e 1s/^X//'
sed_quote_subst='$sed_quote_subst'
# The HP-UX ksh and POSIX shell print the target directory to stdout
# if CDPATH is set.
(unset CDPATH) >/dev/null 2>&1 && unset CDPATH
relink_command=\"$relink_command\"
# This environment variable determines our operation mode.
if test \"\$libtool_install_magic\" = \"$magic\"; then
# install mode needs the following variable:
notinst_deplibs='$notinst_deplibs'
else
# When we are sourced in execute mode, \$file and \$echo are already set.
if test \"\$libtool_execute_magic\" != \"$magic\"; then
echo=\"$qecho\"
file=\"\$0\"
# Make sure echo works.
if test \"X\$1\" = X--no-reexec; then
# Discard the --no-reexec flag, and continue.
shift
elif test \"X\`(\$echo '\t') 2>/dev/null\`\" = 'X\t'; then
# Yippee, \$echo works!
:
else
# Restart under the correct shell, and then maybe \$echo will work.
exec $SHELL \"\$0\" --no-reexec \${1+\"\$@\"}
fi
fi\
"
$echo >> $output "\
# Find the directory that this script lives in.
thisdir=\`\$echo \"X\$file\" | \$Xsed -e 's%/[^/]*$%%'\`
test \"x\$thisdir\" = \"x\$file\" && thisdir=.
# Follow symbolic links until we get to the real thisdir.
file=\`ls -ld \"\$file\" | ${SED} -n 's/.*-> //p'\`
while test -n \"\$file\"; do
destdir=\`\$echo \"X\$file\" | \$Xsed -e 's%/[^/]*\$%%'\`
# If there was a directory component, then change thisdir.
if test \"x\$destdir\" != \"x\$file\"; then
case \"\$destdir\" in
[\\\\/]* | [A-Za-z]:[\\\\/]*) thisdir=\"\$destdir\" ;;
*) thisdir=\"\$thisdir/\$destdir\" ;;
esac
fi
file=\`\$echo \"X\$file\" | \$Xsed -e 's%^.*/%%'\`
file=\`ls -ld \"\$thisdir/\$file\" | ${SED} -n 's/.*-> //p'\`
done
# Try to get the absolute directory name.
absdir=\`cd \"\$thisdir\" && pwd\`
test -n \"\$absdir\" && thisdir=\"\$absdir\"
"
if test "$fast_install" = yes; then
$echo >> $output "\
program=lt-'$outputname'$exeext
progdir=\"\$thisdir/$objdir\"
if test ! -f \"\$progdir/\$program\" || \\
{ file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | ${SED} 1q\`; \\
test \"X\$file\" != \"X\$progdir/\$program\"; }; then
file=\"\$\$-\$program\"
if test ! -d \"\$progdir\"; then
$mkdir \"\$progdir\"
else
$rm \"\$progdir/\$file\"
fi"
$echo >> $output "\
# relink executable if necessary
if test -n \"\$relink_command\"; then
if relink_command_output=\`eval \$relink_command 2>&1\`; then :
else
$echo \"\$relink_command_output\" >&2
$rm \"\$progdir/\$file\"
exit $EXIT_FAILURE
fi
fi
$mv \"\$progdir/\$file\" \"\$progdir/\$program\" 2>/dev/null ||
{ $rm \"\$progdir/\$program\";
$mv \"\$progdir/\$file\" \"\$progdir/\$program\"; }
$rm \"\$progdir/\$file\"
fi"
else
$echo >> $output "\
program='$outputname'
progdir=\"\$thisdir/$objdir\"
"
fi
$echo >> $output "\
if test -f \"\$progdir/\$program\"; then"
# Export our shlibpath_var if we have one.
if test "$shlibpath_overrides_runpath" = yes && test -n "$shlibpath_var" && test -n "$temp_rpath"; then
$echo >> $output "\
# Add our own library path to $shlibpath_var
$shlibpath_var=\"$temp_rpath\$$shlibpath_var\"
# Some systems cannot cope with colon-terminated $shlibpath_var
# The second colon is a workaround for a bug in BeOS R4 sed
$shlibpath_var=\`\$echo \"X\$$shlibpath_var\" | \$Xsed -e 's/::*\$//'\`
export $shlibpath_var
"
fi
# fixup the dll searchpath if we need to.
if test -n "$dllsearchpath"; then
$echo >> $output "\
# Add the dll search path components to the executable PATH
PATH=$dllsearchpath:\$PATH
"
fi
$echo >> $output "\
if test \"\$libtool_execute_magic\" != \"$magic\"; then
# Run the actual program with our arguments.
"
case $host in
# Backslashes separate directories on plain windows
*-*-mingw | *-*-os2*)
$echo >> $output "\
exec \"\$progdir\\\\\$program\" \${1+\"\$@\"}
"
;;
*)
$echo >> $output "\
exec \"\$progdir/\$program\" \${1+\"\$@\"}
"
;;
esac
$echo >> $output "\
\$echo \"\$0: cannot exec \$program \${1+\"\$@\"}\"
exit $EXIT_FAILURE
fi
else
# The program doesn't exist.
\$echo \"\$0: error: \\\`\$progdir/\$program' does not exist\" 1>&2
\$echo \"This script is just a wrapper for \$program.\" 1>&2
$echo \"See the $PACKAGE documentation for more information.\" 1>&2
exit $EXIT_FAILURE
fi
fi\
"
chmod +x $output
fi
exit $EXIT_SUCCESS
;;
esac
# See if we need to build an old-fashioned archive.
for oldlib in $oldlibs; do
if test "$build_libtool_libs" = convenience; then
oldobjs="$libobjs_save"
addlibs="$convenience"
build_libtool_libs=no
else
if test "$build_libtool_libs" = module; then
oldobjs="$libobjs_save"
build_libtool_libs=no
else
oldobjs="$old_deplibs $non_pic_objects"
fi
addlibs="$old_convenience"
fi
if test -n "$addlibs"; then
gentop="$output_objdir/${outputname}x"
generated="$generated $gentop"
func_extract_archives $gentop $addlibs
oldobjs="$oldobjs $func_extract_archives_result"
fi
# Do each command in the archive commands.
if test -n "$old_archive_from_new_cmds" && test "$build_libtool_libs" = yes; then
cmds=$old_archive_from_new_cmds
else
# POSIX demands no paths to be encoded in archives. We have
# to avoid creating archives with duplicate basenames if we
# might have to extract them afterwards, e.g., when creating a
# static archive out of a convenience library, or when linking
# the entirety of a libtool archive into another (currently
# not supported by libtool).
if (for obj in $oldobjs
do
$echo "X$obj" | $Xsed -e 's%^.*/%%'
done | sort | sort -uc >/dev/null 2>&1); then
:
else
$echo "copying selected object files to avoid basename conflicts..."
if test -z "$gentop"; then
gentop="$output_objdir/${outputname}x"
generated="$generated $gentop"
$show "${rm}r $gentop"
$run ${rm}r "$gentop"
$show "$mkdir $gentop"
$run $mkdir "$gentop"
status=$?
if test "$status" -ne 0 && test ! -d "$gentop"; then
exit $status
fi
fi
save_oldobjs=$oldobjs
oldobjs=
counter=1
for obj in $save_oldobjs
do
objbase=`$echo "X$obj" | $Xsed -e 's%^.*/%%'`
case " $oldobjs " in
" ") oldobjs=$obj ;;
*[\ /]"$objbase "*)
while :; do
# Make sure we don't pick an alternate name that also
# overlaps.
newobj=lt$counter-$objbase
counter=`expr $counter + 1`
case " $oldobjs " in
*[\ /]"$newobj "*) ;;
*) if test ! -f "$gentop/$newobj"; then break; fi ;;
esac
done
$show "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj"
$run ln "$obj" "$gentop/$newobj" ||
$run cp "$obj" "$gentop/$newobj"
oldobjs="$oldobjs $gentop/$newobj"
;;
*) oldobjs="$oldobjs $obj" ;;
esac
done
fi
eval cmds=\"$old_archive_cmds\"
if len=`expr "X$cmds" : ".*"` &&
test "$len" -le "$max_cmd_len" || test "$max_cmd_len" -le -1; then
cmds=$old_archive_cmds
else
# the command line is too long to link in one step, link in parts
$echo "using piecewise archive linking..."
save_RANLIB=$RANLIB
RANLIB=:
objlist=
concat_cmds=
save_oldobjs=$oldobjs
# Is there a better way of finding the last object in the list?
for obj in $save_oldobjs
do
last_oldobj=$obj
done
for obj in $save_oldobjs
do
oldobjs="$objlist $obj"
objlist="$objlist $obj"
eval test_cmds=\"$old_archive_cmds\"
if len=`expr "X$test_cmds" : ".*" 2>/dev/null` &&
test "$len" -le "$max_cmd_len"; then
:
else
# the above command should be used before it gets too long
oldobjs=$objlist
if test "$obj" = "$last_oldobj" ; then
RANLIB=$save_RANLIB
fi
test -z "$concat_cmds" || concat_cmds=$concat_cmds~
eval concat_cmds=\"\${concat_cmds}$old_archive_cmds\"
objlist=
fi
done
RANLIB=$save_RANLIB
oldobjs=$objlist
if test "X$oldobjs" = "X" ; then
eval cmds=\"\$concat_cmds\"
else
eval cmds=\"\$concat_cmds~\$old_archive_cmds\"
fi
fi
fi
save_ifs="$IFS"; IFS='~'
for cmd in $cmds; do
eval cmd=\"$cmd\"
IFS="$save_ifs"
$show "$cmd"
$run eval "$cmd" || exit $?
done
IFS="$save_ifs"
done
if test -n "$generated"; then
$show "${rm}r$generated"
$run ${rm}r$generated
fi
# Now create the libtool archive.
case $output in
*.la)
old_library=
test "$build_old_libs" = yes && old_library="$libname.$libext"
$show "creating $output"
# Preserve any variables that may affect compiler behavior
for var in $variables_saved_for_relink; do
if eval test -z \"\${$var+set}\"; then
relink_command="{ test -z \"\${$var+set}\" || unset $var || { $var=; export $var; }; }; $relink_command"
elif eval var_value=\$$var; test -z "$var_value"; then
relink_command="$var=; export $var; $relink_command"
else
var_value=`$echo "X$var_value" | $Xsed -e "$sed_quote_subst"`
relink_command="$var=\"$var_value\"; export $var; $relink_command"
fi
done
# Quote the link command for shipping.
relink_command="(cd `pwd`; $SHELL $progpath $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)"
relink_command=`$echo "X$relink_command" | $Xsed -e "$sed_quote_subst"`
if test "$hardcode_automatic" = yes ; then
relink_command=
fi
# Only create the output if not a dry run.
if test -z "$run"; then
for installed in no yes; do
if test "$installed" = yes; then
if test -z "$install_libdir"; then
break
fi
output="$output_objdir/$outputname"i
# Replace all uninstalled libtool libraries with the installed ones
newdependency_libs=
for deplib in $dependency_libs; do
case $deplib in
*.la)
name=`$echo "X$deplib" | $Xsed -e 's%^.*/%%'`
eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $deplib`
if test -z "$libdir"; then
$echo "$modename: \`$deplib' is not a valid libtool archive" 1>&2
exit $EXIT_FAILURE
fi
newdependency_libs="$newdependency_libs $libdir/$name"
;;
*) newdependency_libs="$newdependency_libs $deplib" ;;
esac
done
dependency_libs="$newdependency_libs"
newdlfiles=
for lib in $dlfiles; do
name=`$echo "X$lib" | $Xsed -e 's%^.*/%%'`
eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
if test -z "$libdir"; then
$echo "$modename: \`$lib' is not a valid libtool archive" 1>&2
exit $EXIT_FAILURE
fi
newdlfiles="$newdlfiles $libdir/$name"
done
dlfiles="$newdlfiles"
newdlprefiles=
for lib in $dlprefiles; do
name=`$echo "X$lib" | $Xsed -e 's%^.*/%%'`
eval libdir=`${SED} -n -e 's/^libdir=\(.*\)$/\1/p' $lib`
if test -z "$libdir"; then
$echo "$modename: \`$lib' is not a valid libtool archive" 1>&2
exit $EXIT_FAILURE
fi
newdlprefiles="$newdlprefiles $libdir/$name"
done
dlprefiles="$newdlprefiles"
else
newdlfiles=
for lib in $dlfiles; do
case $lib in
[\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;;
*) abs=`pwd`"/$lib" ;;
esac
newdlfiles="$newdlfiles $abs"
done
dlfiles="$newdlfiles"
newdlprefiles=
for lib in $dlprefiles; do
case $lib in
[\\/]* | [A-Za-z]:[\\/]*) abs="$lib" ;;
*) abs=`pwd`"/$lib" ;;
esac
newdlprefiles="$newdlprefiles $abs"
done
dlprefiles="$newdlprefiles"
fi
$rm $output
# place dlname in correct position for cygwin
tdlname=$dlname
case $host,$output,$installed,$module,$dlname in
*cygwin*,*lai,yes,no,*.dll | *mingw*,*lai,yes,no,*.dll) tdlname=../bin/$dlname ;;
esac
$echo > $output "\
# $outputname - a libtool library file
# Generated by $PROGRAM - GNU $PACKAGE $VERSION$TIMESTAMP
#
# Please DO NOT delete this file!
# It is necessary for linking the library.
# The name that we can dlopen(3).
dlname='$tdlname'
# Names of this library.
library_names='$library_names'
# The name of the static archive.
old_library='$old_library'
# Libraries that this one depends upon.
dependency_libs='$dependency_libs'
# Version information for $libname.
current=$current
age=$age
revision=$revision
# Is this an already installed library?
installed=$installed
# Should we warn about portability when linking against -modules?
shouldnotlink=$module
# Files to dlopen/dlpreopen
dlopen='$dlfiles'
dlpreopen='$dlprefiles'
# Directory that this library needs to be installed in:
libdir='$install_libdir'"
if test "$installed" = no && test "$need_relink" = yes; then
$echo >> $output "\
relink_command=\"$relink_command\""
fi
done
fi
# Do a symbolic link so that the libtool archive can be found in
# LD_LIBRARY_PATH before the program is installed.
$show "(cd $output_objdir && $rm $outputname && $LN_S ../$outputname $outputname)"
$run eval '(cd $output_objdir && $rm $outputname && $LN_S ../$outputname $outputname)' || exit $?
;;
esac
exit $EXIT_SUCCESS
;;
# libtool install mode
install)
modename="$modename: install"
# There may be an optional sh(1) argument at the beginning of
# install_prog (especially on Windows NT).
if test "$nonopt" = "$SHELL" || test "$nonopt" = /bin/sh ||
# Allow the use of GNU shtool's install command.
$echo "X$nonopt" | grep shtool > /dev/null; then
# Aesthetically quote it.
arg=`$echo "X$nonopt" | $Xsed -e "$sed_quote_subst"`
case $arg in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
arg="\"$arg\""
;;
esac
install_prog="$arg "
arg="$1"
shift
else
install_prog=
arg=$nonopt
fi
# The real first argument should be the name of the installation program.
# Aesthetically quote it.
arg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"`
case $arg in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
arg="\"$arg\""
;;
esac
install_prog="$install_prog$arg"
# We need to accept at least all the BSD install flags.
dest=
files=
opts=
prev=
install_type=
isdir=no
stripme=
for arg
do
if test -n "$dest"; then
files="$files $dest"
dest=$arg
continue
fi
case $arg in
-d) isdir=yes ;;
-f)
case " $install_prog " in
*[\\\ /]cp\ *) ;;
*) prev=$arg ;;
esac
;;
-g | -m | -o) prev=$arg ;;
-s)
stripme=" -s"
continue
;;
-*)
;;
*)
# If the previous option needed an argument, then skip it.
if test -n "$prev"; then
prev=
else
dest=$arg
continue
fi
;;
esac
# Aesthetically quote the argument.
arg=`$echo "X$arg" | $Xsed -e "$sed_quote_subst"`
case $arg in
*[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"")
arg="\"$arg\""
;;
esac
install_prog="$install_prog $arg"
done
if test -z "$install_prog"; then
$echo "$modename: you must specify an install program" 1>&2
$echo "$help" 1>&2
exit $EXIT_FAILURE
fi
if test -n "$prev"; then
$echo "$modename: the \`$prev' option requires an argument" 1>&2
$echo "$help" 1>&2
exit $EXIT_FAILURE
fi
if test -z "$files"; then
if test -z "$dest"; then
$echo "$modename: no file or destination specified" 1>&2
else
$echo "$modename: you must specify a destination" 1>&2
fi
$echo "$help" 1>&2
exit $EXIT_FAILURE
fi
# Strip any trailing slash from the destination.
dest=`$echo "X$dest" | $Xsed -e 's%/$%%'`
# Check to see that the destination is a directory.
test -d "$dest" && isdir=yes
if test "$isdir" = yes; then
destdir="$dest"
destname=
else
destdir=`$echo "X$dest" | $Xsed -e 's%/[^/]*$%%'`
test "X$destdir" = "X$dest" && destdir=.
destname=`$echo "X$dest" | $Xsed -e 's%^.*/%%'`
# Not a directory, so check to see that there is only one file specified.
set dummy $files
if test "$#" -gt 2; then
$echo "$modename: \`$dest' is not a directory" 1>&2
$echo "$help" 1>&2
exit $EXIT_FAILURE
fi
fi
case $destdir in
[\\/]* | [A-Za-z]:[\\/]*) ;;
*)
for file in $files; do
case $file in
*.lo) ;;
*)
$echo "$modename: \`$destdir' must be an absolute directory name" 1>&2
$echo "$help" 1>&2
exit $EXIT_FAILURE
;;
esac
done
;;
esac
# This variable tells wrapper scripts just to set variables rather
# than running their programs.
libtool_install_magic="$magic"
staticlibs=
future_libdirs=
current_libdirs=
for file in $files; do
# Do each installation.
case $file in
*.$libext)
# Do the static libraries later.
staticlibs="$staticlibs $file"
;;
*.la)
# Check to see that this really is a libtool archive.
if (${SED} -e '2q' $file | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then :
else
$echo "$modename: \`$file' is not a valid libtool archive" 1>&2
$echo "$help" 1>&2
exit $EXIT_FAILURE
fi
library_names=
old_library=
relink_command=
# If there is no directory component, then add one.
case $file in
*/* | *\\*) . $file ;;
*) . ./$file ;;
esac
# Add the libdir to current_libdirs if it is the destination.
if test "X$destdir" = "X$libdir"; then
case "$current_libdirs " in
*" $libdir "*) ;;
*) current_libdirs="$current_libdirs $libdir" ;;
esac
else
# Note the libdir as a future libdir.
case "$future_libdirs " in
*" $libdir "*) ;;
*) future_libdirs="$future_libdirs $libdir" ;;
esac
fi
dir=`$echo "X$file" | $Xsed -e 's%/[^/]*$%%'`/
test "X$dir" = "X$file/" && dir=
dir="$dir$objdir"
if test -n "$relink_command"; then
# Determine the prefix the user has applied to our future dir.
inst_prefix_dir=`$echo "$destdir" | $SED "s%$libdir\$%%"`
# Don't allow the user to place us outside of our expected
# location b/c this prevents finding dependent libraries that
# are installed to the same prefix.
# At present, this check doesn't affect windows .dll's that
# are installed into $libdir/../bin (currently, that works fine)
# but it's something to keep an eye on.
if test "$inst_prefix_dir" = "$destdir"; then
$echo "$modename: error: cannot install \`$file' to a directory not ending in $libdir" 1>&2
exit $EXIT_FAILURE
fi
if test -n "$inst_prefix_dir"; then
# Stick the inst_prefix_dir data into the link command.
relink_command=`$echo "$relink_command" | $SED "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"`
else
relink_command=`$echo "$relink_command" | $SED "s%@inst_prefix_dir@%%"`
fi
$echo "$modename: warning: relinking \`$file'" 1>&2
$show "$relink_command"
if $run eval "$relink_command"; then :
else
$echo "$modename: error: relink \`$file' with the above command before installing it" 1>&2
exit $EXIT_FAILURE
fi
fi
# See the names of the shared library.
set dummy $library_names
if test -n "$2"; then
realname="$2"
shift
shift
srcname="$realname"
test -n "$relink_command" && srcname="$realname"T
# Install the shared library and build the symlinks.
$show "$install_prog $dir/$srcname $destdir/$realname"
$run eval "$install_prog $dir/$srcname $destdir/$realname" || exit $?
if test -n "$stripme" && test -n "$striplib"; then
$show "$striplib $destdir/$realname"
$run eval "$striplib $destdir/$realname" || exit $?
fi
if test "$#" -gt 0; then
# Delete the old symlinks, and create new ones.
# Try `ln -sf' first, because the `ln' binary might depend on
# the symlink we replace! Solaris /bin/ln does not understand -f,
# so we also need to try rm && ln -s.
for linkname
do
if test "$linkname" != "$realname"; then
$show "(cd $destdir && { $LN_S -f $realname $linkname || { $rm $linkname && $LN_S $realname $linkname; }; })"
$run eval "(cd $destdir && { $LN_S -f $realname $linkname || { $rm $linkname && $LN_S $realname $linkname; }; })"
fi
done
fi
# Do each command in the postinstall commands.
lib="$destdir/$realname"
cmds=$postinstall_cmds
save_ifs="$IFS"; IFS='~'
for cmd in $cmds; do
IFS="$save_ifs"
eval cmd=\"$cmd\"
$show "$cmd"
$run eval "$cmd" || {
lt_exit=$?
# Restore the uninstalled library and exit
if test "$mode" = relink; then
$run eval '(cd $output_objdir && $rm ${realname}T && $mv ${realname}U $realname)'
fi
exit $lt_exit
}
done
IFS="$save_ifs"
fi
# Install the pseudo-library for information purposes.
name=`$echo "X$file" | $Xsed -e 's%^.*/%%'`
instname="$dir/$name"i
$show "$install_prog $instname $destdir/$name"
$run eval "$install_prog $instname $destdir/$name" || exit $?
# Maybe install the static library, too.
test -n "$old_library" && staticlibs="$staticlibs $dir/$old_library"
;;
*.lo)
# Install (i.e. copy) a libtool object.
# Figure out destination file name, if it wasn't already specified.
if test -n "$destname"; then
destfile="$destdir/$destname"
else
destfile=`$echo "X$file" | $Xsed -e 's%^.*/%%'`
destfile="$destdir/$destfile"
fi
# Deduce the name of the destination old-style object file.
case $destfile in
*.lo)
staticdest=`$echo "X$destfile" | $Xsed -e "$lo2o"`
;;
*.$objext)
staticdest="$destfile"
destfile=
;;
*)
$echo "$modename: cannot copy a libtool object to \`$destfile'" 1>&2
$echo "$help" 1>&2
exit $EXIT_FAILURE
;;
esac
# Install the libtool object if requested.
if test -n "$destfile"; then
$show "$install_prog $file $destfile"
$run eval "$install_prog $file $destfile" || exit $?
fi
# Install the old object if enabled.
if test "$build_old_libs" = yes; then
# Deduce the name of the old-style object file.
staticobj=`$echo "X$file" | $Xsed -e "$lo2o"`
$show "$install_prog $staticobj $staticdest"
$run eval "$install_prog \$staticobj \$staticdest" || exit $?
fi
exit $EXIT_SUCCESS
;;
*)
# Figure out destination file name, if it wasn't already specified.
if test -n "$destname"; then
destfile="$destdir/$destname"
else
destfile=`$echo "X$file" | $Xsed -e 's%^.*/%%'`
destfile="$destdir/$destfile"
fi
# If the file is missing, and there is a .exe on the end, strip it
# because it is most likely a libtool script we actually want to
# install
stripped_ext=""
case $file in
*.exe)
if test ! -f "$file"; then
file=`$echo $file|${SED} 's,.exe$,,'`
stripped_ext=".exe"
fi
;;
esac
# Do a test to see if this is really a libtool program.
case $host in
*cygwin*|*mingw*)
wrapper=`$echo $file | ${SED} -e 's,.exe$,,'`
;;
*)
wrapper=$file
;;
esac
if (${SED} -e '4q' $wrapper | grep "^# Generated by .*$PACKAGE")>/dev/null 2>&1; then
notinst_deplibs=
relink_command=
# Note that it is not necessary on cygwin/mingw to append a dot to
# foo even if both foo and FILE.exe exist: automatic-append-.exe
# behavior happens only for exec(3), not for open(2)! Also, sourcing
# `FILE.' does not work on cygwin managed mounts.
#
# If there is no directory component, then add one.
case $wrapper in
*/* | *\\*) . ${wrapper} ;;
*) . ./${wrapper} ;;
esac
# Check the variables that should have been set.
if test -z "$notinst_deplibs"; then
$echo "$modename: invalid libtool wrapper script \`$wrapper'" 1>&2
exit $EXIT_FAILURE
fi
finalize=yes
for lib in $notinst_deplibs; do
# Check to see that each library is installed.
libdir=
if test -f "$lib"; then
# If there is no directory component, then add one.
case $lib in
*/* | *\\*) . $lib ;;
*) . ./$lib ;;
esac
fi
libfile="$libdir/"`$echo "X$lib" | $Xsed -e 's%^.*/%%g'` ### testsuite: skip nested quoting test
if test -n "$libdir" && test ! -f "$libfile"; then
$echo "$modename: warning: \`$lib' has not been installed in \`$libdir'" 1>&2
finalize=no
fi
done
relink_command=
# Note that it is not necessary on cygwin/mingw to append a dot to
# foo even if both foo and FILE.exe exist: automatic-append-.exe
# behavior happens only for exec(3), not for open(2)! Also, sourcing
# `FILE.' does not work on cygwin managed mounts.
#
# If there is no directory component, then add one.
case $wrapper in
*/* | *\\*) . ${wrapper} ;;
*) . ./${wrapper} ;;
esac
outputname=
if test "$fast_install" = no && test -n "$relink_command"; then
if test "$finalize" = yes && test -z "$run"; then
tmpdir="/tmp"
test -n "$TMPDIR" && tmpdir="$TMPDIR"
tmpdir="$tmpdir/libtool-$$"
save_umask=`umask`
umask 0077
if $mkdir "$tmpdir"; then
umask $save_umask
else
umask $save_umask
$echo "$modename: error: cannot create temporary directory \`$tmpdir'" 1>&2
continue
fi
file=`$echo "X$file$stripped_ext" | $Xsed -e 's%^.*/%%'`
outputname="$tmpdir/$file"
# Replace the output file specification.
relink_command=`$echo "X$relink_command" | $Xsed -e 's%@OUTPUT@%'"$outputname"'%g'`
$show "$relink_command"
if $run eval "$relink_command"; then :
else
$echo "$modename: error: relink \`$file' with the above command before installing it" 1>&2
${rm}r "$tmpdir"
continue
fi
file="$outputname"
else
$echo "$modename: warning: cannot relink \`$file'" 1>&2
fi
else
# Install the binary that we compiled earlier.
file=`$echo "X$file$stripped_ext" | $Xsed -e "s%\([^/]*\)$%$objdir/\1%"`
fi
fi
# remove .exe since cygwin /usr/bin/install will append another
# one anyway
case $install_prog,$host in
*/usr/bin/install*,*cygwin*)
case $file:$destfile in
*.exe:*.exe)
# this is ok
;;
*.exe:*)
destfile=$destfile.exe
;;
*:*.exe)
destfile=`$echo $destfile | ${SED} -e 's,.exe$,,'`
;;
esac
;;
esac
$show "$install_prog$stripme $file $destfile"
$run eval "$install_prog\$stripme \$file \$destfile" || exit $?
test -n "$outputname" && ${rm}r "$tmpdir"
;;
esac
done
for file in $staticlibs; do
name=`$echo "X$file" | $Xsed -e 's%^.*/%%'`
# Set up the ranlib parameters.
oldlib="$destdir/$name"
$show "$install_prog $file $oldlib"
$run eval "$install_prog \$file \$oldlib" || exit $?
if test -n "$stripme" && test -n "$old_striplib"; then
$show "$old_striplib $oldlib"
$run eval "$old_striplib $oldlib" || exit $?
fi
# Do each command in the postinstall commands.
cmds=$old_postinstall_cmds
save_ifs="$IFS"; IFS='~'
for cmd in $cmds; do
IFS="$save_ifs"
eval cmd=\"$cmd\"
$show "$cmd"
$run eval "$cmd" || exit $?
done
IFS="$save_ifs"
done
if test -n "$future_libdirs"; then
$echo "$modename: warning: remember to run \`$progname --finish$future_libdirs'" 1>&2
fi
if test -n "$current_libdirs"; then
# Maybe just do a dry run.
test -n "$run" && current_libdirs=" -n$current_libdirs"
exec_cmd='$SHELL $progpath $preserve_args --finish$current_libdirs'
else
exit $EXIT_SUCCESS
fi
;;
# libtool finish mode
finish)
modename="$modename: finish"
libdirs="$nonopt"
admincmds=
if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then
for dir
do
libdirs="$libdirs $dir"
done
for libdir in $libdirs; do
if test -n "$finish_cmds"; then
# Do each command in the finish commands.
cmds=$finish_cmds
save_ifs="$IFS"; IFS='~'
for cmd in $cmds; do
IFS="$save_ifs"
eval cmd=\"$cmd\"
$show "$cmd"
$run eval "$cmd" || admincmds="$admincmds
$cmd"
done
IFS="$save_ifs"
fi
if test -n "$finish_eval"; then
# Do the single finish_eval.
eval cmds=\"$finish_eval\"
$run eval "$cmds" || admincmds="$admincmds
$cmds"
fi
done
fi
# Exit here if they wanted silent mode.
test "$show" = : && exit $EXIT_SUCCESS
$echo "----------------------------------------------------------------------"
$echo "Libraries have been installed in:"
for libdir in $libdirs; do
$echo " $libdir"
done
$echo
$echo "If you ever happen to want to link against installed libraries"
$echo "in a given directory, LIBDIR, you must either use libtool, and"
$echo "specify the full pathname of the library, or use the \`-LLIBDIR'"
$echo "flag during linking and do at least one of the following:"
if test -n "$shlibpath_var"; then
$echo " - add LIBDIR to the \`$shlibpath_var' environment variable"
$echo " during execution"
fi
if test -n "$runpath_var"; then
$echo " - add LIBDIR to the \`$runpath_var' environment variable"
$echo " during linking"
fi
if test -n "$hardcode_libdir_flag_spec"; then
libdir=LIBDIR
eval flag=\"$hardcode_libdir_flag_spec\"
$echo " - use the \`$flag' linker flag"
fi
if test -n "$admincmds"; then
$echo " - have your system administrator run these commands:$admincmds"
fi
if test -f /etc/ld.so.conf; then
$echo " - have your system administrator add LIBDIR to \`/etc/ld.so.conf'"
fi
$echo
$echo "See any operating system documentation about shared libraries for"
$echo "more information, such as the ld(1) and ld.so(8) manual pages."
$echo "----------------------------------------------------------------------"
exit $EXIT_SUCCESS
;;
# libtool execute mode
execute)
modename="$modename: execute"
# The first argument is the command name.
cmd="$nonopt"
if test -z "$cmd"; then
$echo "$modename: you must specify a COMMAND" 1>&2
$echo "$help"
exit $EXIT_FAILURE
fi
# Handle -dlopen flags immediately.
for file in $execute_dlfiles; do
if test ! -f "$file"; then
$echo "$modename: \`$file' is not a file" 1>&2
$echo "$help" 1>&2
exit $EXIT_FAILURE
fi
dir=
case $file in
*.la)
# Check to see that this really is a libtool archive.
if (${SED} -e '2q' $file | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then :
else
$echo "$modename: \`$lib' is not a valid libtool archive" 1>&2
$echo "$help" 1>&2
exit $EXIT_FAILURE
fi
# Read the libtool library.
dlname=
library_names=
# If there is no directory component, then add one.
case $file in
*/* | *\\*) . $file ;;
*) . ./$file ;;
esac
# Skip this library if it cannot be dlopened.
if test -z "$dlname"; then
# Warn if it was a shared library.
test -n "$library_names" && $echo "$modename: warning: \`$file' was not linked with \`-export-dynamic'"
continue
fi
dir=`$echo "X$file" | $Xsed -e 's%/[^/]*$%%'`
test "X$dir" = "X$file" && dir=.
if test -f "$dir/$objdir/$dlname"; then
dir="$dir/$objdir"
else
$echo "$modename: cannot find \`$dlname' in \`$dir' or \`$dir/$objdir'" 1>&2
exit $EXIT_FAILURE
fi
;;
*.lo)
# Just add the directory containing the .lo file.
dir=`$echo "X$file" | $Xsed -e 's%/[^/]*$%%'`
test "X$dir" = "X$file" && dir=.
;;
*)
$echo "$modename: warning \`-dlopen' is ignored for non-libtool libraries and objects" 1>&2
continue
;;
esac
# Get the absolute pathname.
absdir=`cd "$dir" && pwd`
test -n "$absdir" && dir="$absdir"
# Now add the directory to shlibpath_var.
if eval "test -z \"\$$shlibpath_var\""; then
eval "$shlibpath_var=\"\$dir\""
else
eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\""
fi
done
# This variable tells wrapper scripts just to set shlibpath_var
# rather than running their programs.
libtool_execute_magic="$magic"
# Check if any of the arguments is a wrapper script.
args=
for file
do
case $file in
-*) ;;
*)
# Do a test to see if this is really a libtool program.
if (${SED} -e '4q' $file | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then
# If there is no directory component, then add one.
case $file in
*/* | *\\*) . $file ;;
*) . ./$file ;;
esac
# Transform arg to wrapped name.
file="$progdir/$program"
fi
;;
esac
# Quote arguments (to preserve shell metacharacters).
file=`$echo "X$file" | $Xsed -e "$sed_quote_subst"`
args="$args \"$file\""
done
if test -z "$run"; then
if test -n "$shlibpath_var"; then
# Export the shlibpath_var.
eval "export $shlibpath_var"
fi
# Restore saved environment variables
if test "${save_LC_ALL+set}" = set; then
LC_ALL="$save_LC_ALL"; export LC_ALL
fi
if test "${save_LANG+set}" = set; then
LANG="$save_LANG"; export LANG
fi
# Now prepare to actually exec the command.
exec_cmd="\$cmd$args"
else
# Display what would be done.
if test -n "$shlibpath_var"; then
eval "\$echo \"\$shlibpath_var=\$$shlibpath_var\""
$echo "export $shlibpath_var"
fi
$echo "$cmd$args"
exit $EXIT_SUCCESS
fi
;;
# libtool clean and uninstall mode
clean | uninstall)
modename="$modename: $mode"
rm="$nonopt"
files=
rmforce=
exit_status=0
# This variable tells wrapper scripts just to set variables rather
# than running their programs.
libtool_install_magic="$magic"
for arg
do
case $arg in
-f) rm="$rm $arg"; rmforce=yes ;;
-*) rm="$rm $arg" ;;
*) files="$files $arg" ;;
esac
done
if test -z "$rm"; then
$echo "$modename: you must specify an RM program" 1>&2
$echo "$help" 1>&2
exit $EXIT_FAILURE
fi
rmdirs=
origobjdir="$objdir"
for file in $files; do
dir=`$echo "X$file" | $Xsed -e 's%/[^/]*$%%'`
if test "X$dir" = "X$file"; then
dir=.
objdir="$origobjdir"
else
objdir="$dir/$origobjdir"
fi
name=`$echo "X$file" | $Xsed -e 's%^.*/%%'`
test "$mode" = uninstall && objdir="$dir"
# Remember objdir for removal later, being careful to avoid duplicates
if test "$mode" = clean; then
case " $rmdirs " in
*" $objdir "*) ;;
*) rmdirs="$rmdirs $objdir" ;;
esac
fi
# Don't error if the file doesn't exist and rm -f was used.
if (test -L "$file") >/dev/null 2>&1 \
|| (test -h "$file") >/dev/null 2>&1 \
|| test -f "$file"; then
:
elif test -d "$file"; then
exit_status=1
continue
elif test "$rmforce" = yes; then
continue
fi
rmfiles="$file"
case $name in
*.la)
# Possibly a libtool archive, so verify it.
if (${SED} -e '2q' $file | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then
. $dir/$name
# Delete the libtool libraries and symlinks.
for n in $library_names; do
rmfiles="$rmfiles $objdir/$n"
done
test -n "$old_library" && rmfiles="$rmfiles $objdir/$old_library"
test "$mode" = clean && rmfiles="$rmfiles $objdir/$name $objdir/${name}i"
if test "$mode" = uninstall; then
if test -n "$library_names"; then
# Do each command in the postuninstall commands.
cmds=$postuninstall_cmds
save_ifs="$IFS"; IFS='~'
for cmd in $cmds; do
IFS="$save_ifs"
eval cmd=\"$cmd\"
$show "$cmd"
$run eval "$cmd"
if test "$?" -ne 0 && test "$rmforce" != yes; then
exit_status=1
fi
done
IFS="$save_ifs"
fi
if test -n "$old_library"; then
# Do each command in the old_postuninstall commands.
cmds=$old_postuninstall_cmds
save_ifs="$IFS"; IFS='~'
for cmd in $cmds; do
IFS="$save_ifs"
eval cmd=\"$cmd\"
$show "$cmd"
$run eval "$cmd"
if test "$?" -ne 0 && test "$rmforce" != yes; then
exit_status=1
fi
done
IFS="$save_ifs"
fi
# FIXME: should reinstall the best remaining shared library.
fi
fi
;;
*.lo)
# Possibly a libtool object, so verify it.
if (${SED} -e '2q' $file | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then
# Read the .lo file
. $dir/$name
# Add PIC object to the list of files to remove.
if test -n "$pic_object" \
&& test "$pic_object" != none; then
rmfiles="$rmfiles $dir/$pic_object"
fi
# Add non-PIC object to the list of files to remove.
if test -n "$non_pic_object" \
&& test "$non_pic_object" != none; then
rmfiles="$rmfiles $dir/$non_pic_object"
fi
fi
;;
*)
if test "$mode" = clean ; then
noexename=$name
case $file in
*.exe)
file=`$echo $file|${SED} 's,.exe$,,'`
noexename=`$echo $name|${SED} 's,.exe$,,'`
# $file with .exe has already been added to rmfiles,
# add $file without .exe
rmfiles="$rmfiles $file"
;;
esac
# Do a test to see if this is a libtool program.
if (${SED} -e '4q' $file | grep "^# Generated by .*$PACKAGE") >/dev/null 2>&1; then
relink_command=
. $dir/$noexename
# note $name still contains .exe if it was in $file originally
# as does the version of $file that was added into $rmfiles
rmfiles="$rmfiles $objdir/$name $objdir/${name}S.${objext}"
if test "$fast_install" = yes && test -n "$relink_command"; then
rmfiles="$rmfiles $objdir/lt-$name"
fi
if test "X$noexename" != "X$name" ; then
rmfiles="$rmfiles $objdir/lt-${noexename}.c"
fi
fi
fi
;;
esac
$show "$rm $rmfiles"
$run $rm $rmfiles || exit_status=1
done
objdir="$origobjdir"
# Try to remove the ${objdir}s in the directories where we deleted files
for dir in $rmdirs; do
if test -d "$dir"; then
$show "rmdir $dir"
$run rmdir $dir >/dev/null 2>&1
fi
done
exit $exit_status
;;
"")
$echo "$modename: you must specify a MODE" 1>&2
$echo "$generic_help" 1>&2
exit $EXIT_FAILURE
;;
esac
if test -z "$exec_cmd"; then
$echo "$modename: invalid operation mode \`$mode'" 1>&2
$echo "$generic_help" 1>&2
exit $EXIT_FAILURE
fi
fi # test -z "$show_help"
if test -n "$exec_cmd"; then
eval exec $exec_cmd
exit $EXIT_FAILURE
fi
# We need to display help for each of the modes.
case $mode in
"") $echo \
"Usage: $modename [OPTION]... [MODE-ARG]...
Provide generalized library-building support services.
--config show all configuration variables
--debug enable verbose shell tracing
-n, --dry-run display commands without modifying any files
--features display basic configuration information and exit
--finish same as \`--mode=finish'
--help display this help message and exit
--mode=MODE use operation mode MODE [default=inferred from MODE-ARGS]
--quiet same as \`--silent'
--silent don't print informational messages
--tag=TAG use configuration variables from tag TAG
--version print version information
MODE must be one of the following:
clean remove files from the build directory
compile compile a source file into a libtool object
execute automatically set library path, then run a program
finish complete the installation of libtool libraries
install install libraries or executables
link create a library or an executable
uninstall remove libraries from an installed directory
MODE-ARGS vary depending on the MODE. Try \`$modename --help --mode=MODE' for
a more detailed description of MODE.
Report bugs to <bug-libtool@gnu.org>."
exit $EXIT_SUCCESS
;;
clean)
$echo \
"Usage: $modename [OPTION]... --mode=clean RM [RM-OPTION]... FILE...
Remove files from the build directory.
RM is the name of the program to use to delete files associated with each FILE
(typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed
to RM.
If FILE is a libtool library, object or program, all the files associated
with it are deleted. Otherwise, only FILE itself is deleted using RM."
;;
compile)
$echo \
"Usage: $modename [OPTION]... --mode=compile COMPILE-COMMAND... SOURCEFILE
Compile a source file into a libtool library object.
This mode accepts the following additional options:
-o OUTPUT-FILE set the output file name to OUTPUT-FILE
-prefer-pic try to building PIC objects only
-prefer-non-pic try to building non-PIC objects only
-static always build a \`.o' file suitable for static linking
COMPILE-COMMAND is a command to be used in creating a \`standard' object file
from the given SOURCEFILE.
The output file name is determined by removing the directory component from
SOURCEFILE, then substituting the C source code suffix \`.c' with the
library object suffix, \`.lo'."
;;
execute)
$echo \
"Usage: $modename [OPTION]... --mode=execute COMMAND [ARGS]...
Automatically set library path, then run a program.
This mode accepts the following additional options:
-dlopen FILE add the directory containing FILE to the library path
This mode sets the library path environment variable according to \`-dlopen'
flags.
If any of the ARGS are libtool executable wrappers, then they are translated
into their corresponding uninstalled binary, and any of their required library
directories are added to the library path.
Then, COMMAND is executed, with ARGS as arguments."
;;
finish)
$echo \
"Usage: $modename [OPTION]... --mode=finish [LIBDIR]...
Complete the installation of libtool libraries.
Each LIBDIR is a directory that contains libtool libraries.
The commands that this mode executes may require superuser privileges. Use
the \`--dry-run' option if you just want to see what would be executed."
;;
install)
$echo \
"Usage: $modename [OPTION]... --mode=install INSTALL-COMMAND...
Install executables or libraries.
INSTALL-COMMAND is the installation command. The first component should be
either the \`install' or \`cp' program.
The rest of the components are interpreted as arguments to that command (only
BSD-compatible install options are recognized)."
;;
link)
$echo \
"Usage: $modename [OPTION]... --mode=link LINK-COMMAND...
Link object files or libraries together to form another library, or to
create an executable program.
LINK-COMMAND is a command using the C compiler that you would use to create
a program from several object files.
The following components of LINK-COMMAND are treated specially:
-all-static do not do any dynamic linking at all
-avoid-version do not add a version suffix if possible
-dlopen FILE \`-dlpreopen' FILE if it cannot be dlopened at runtime
-dlpreopen FILE link in FILE and add its symbols to lt_preloaded_symbols
-export-dynamic allow symbols from OUTPUT-FILE to be resolved with dlsym(3)
-export-symbols SYMFILE
try to export only the symbols listed in SYMFILE
-export-symbols-regex REGEX
try to export only the symbols matching REGEX
-LLIBDIR search LIBDIR for required installed libraries
-lNAME OUTPUT-FILE requires the installed library libNAME
-module build a library that can dlopened
-no-fast-install disable the fast-install mode
-no-install link a not-installable executable
-no-undefined declare that a library does not refer to external symbols
-o OUTPUT-FILE create OUTPUT-FILE from the specified objects
-objectlist FILE Use a list of object files found in FILE to specify objects
-precious-files-regex REGEX
don't remove output files matching REGEX
-release RELEASE specify package release information
-rpath LIBDIR the created library will eventually be installed in LIBDIR
-R[ ]LIBDIR add LIBDIR to the runtime path of programs and libraries
-static do not do any dynamic linking of libtool libraries
-version-info CURRENT[:REVISION[:AGE]]
specify library version info [each variable defaults to 0]
All other options (arguments beginning with \`-') are ignored.
Every other argument is treated as a filename. Files ending in \`.la' are
treated as uninstalled libtool libraries, other files are standard or library
object files.
If the OUTPUT-FILE ends in \`.la', then a libtool library is created,
only library objects (\`.lo' files) may be specified, and \`-rpath' is
required, except when creating a convenience library.
If OUTPUT-FILE ends in \`.a' or \`.lib', then a standard library is created
using \`ar' and \`ranlib', or on Windows using \`lib'.
If OUTPUT-FILE ends in \`.lo' or \`.${objext}', then a reloadable object file
is created, otherwise an executable program is created."
;;
uninstall)
$echo \
"Usage: $modename [OPTION]... --mode=uninstall RM [RM-OPTION]... FILE...
Remove libraries from an installation directory.
RM is the name of the program to use to delete files associated with each FILE
(typically \`/bin/rm'). RM-OPTIONS are options (such as \`-f') to be passed
to RM.
If FILE is a libtool library, all the files associated with it are deleted.
Otherwise, only FILE itself is deleted using RM."
;;
*)
$echo "$modename: invalid operation mode \`$mode'" 1>&2
$echo "$help" 1>&2
exit $EXIT_FAILURE
;;
esac
$echo
$echo "Try \`$modename --help' for more information about other modes."
exit $?
# The TAGs below are defined such that we never get into a situation
# in which we disable both kinds of libraries. Given conflicting
# choices, we go for a static library, that is the most portable,
# since we can't tell whether shared libraries were disabled because
# the user asked for that or because the platform doesn't support
# them. This is particularly important on AIX, because we don't
# support having both static and shared libraries enabled at the same
# time on that platform, so we default to a shared-only configuration.
# If a disable-shared tag is given, we'll fallback to a static-only
# configuration. But we'll never go from static-only to shared-only.
# ### BEGIN LIBTOOL TAG CONFIG: disable-shared
build_libtool_libs=no
build_old_libs=yes
# ### END LIBTOOL TAG CONFIG: disable-shared
# ### BEGIN LIBTOOL TAG CONFIG: disable-static
build_old_libs=`case $build_libtool_libs in yes) $echo no;; *) $echo yes;; esac`
# ### END LIBTOOL TAG CONFIG: disable-static
# Local Variables:
# mode:shell-script
# sh-indentation:2
# End:
|
function isEqual(num1, num2) {
return num1 === num2;
} |
<reponame>geyang/gym-sawyer
from cmx import CommonMark
import gym
doc = CommonMark("README.md")
doc @ """
# Sawyer Push Environment
## To-dos
- [ ] simple 1-object pushing domain, show goal image and current
camera view
- [ ] make sure that the reward and termination condition are
implemented correctly
- [ ] run PPO, Q-learning baselines on this domain, collect
success rate and final distance to object.
The goal of this environment is to push one of the blocks to the
goal location specified by an image. The representation tend to
first pick up the arm as opposed to the object. This is because
the arm position varies quite a lot more in the dataset, as it
is actuated directly.
Hence we can consider feature learning as a dataset problem. Model
calibration provides a way for us to understand how a model can
also learn something about the data distribution. Issues to consider
in multi-object vision domains:
1. occlusion
2. not learning the position of the object, due to not having enough
variation/contrastive loss not picking up
3. the object getting out of the frame.
There are two ways to go forward:
1. VAE
2. calibrated contrastive learning (HDGE)
How do we know that the model is learning the right representations?
1. visualize as in maze?
2. model accuracy?
3. mining hard examples?
"""
with doc @ """
## Number of Objects
can include 1 - 4 objects:""", doc.row():
for i in range(1, 5):
env = gym.make("sawyer:PickPlace-v0", cam_id=-1, num_objs=i)
env.reset()
img = env.render("glamor", width=150, height=150)
doc.image(img, src=f"./figures/pick_place_{i}.png",
caption=f"{i} block{'s' if i > 1 else ''}")
doc @ """
## Whitebox evaluation of the learned representation:
How well does the learned representation $\phi$
predict information such as object location and pose?
Here is our experiment grid:
"""
doc.csv @ """
Method, block, arm, Comments
naïve, None, None, Only the robotic arm is varied a lot, so there is not much variation to the location of the blocks.
"""
if __name__ == '__main__':
doc.flush()
|
#!/bin/bash
PG_HOST=cax-sb-dev-psql.postgres.database.azure.com
caxdb() {
DBNAME=$1
shift
psql "host=$PG_HOST port=5432 user=psqladmin@$PG_HOST password=$PG_PASS sslmode=require dbname=$DBNAME" $@
}
caxdb membercompany -f drop-tables.sql
caxdb membercompany -f create-tables.sql
caxdb membercompany -f insert-business-partners.sql
caxdb membercompany -f insert-member-companies.sql
caxdb membercompany -f insert-member-company-roles.sql
|
#
# Copyright 2021 Nebulon, Inc.
# All Rights Reserved.
#
# DISCLAIMER: THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
# EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES
# OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
from .graphqlclient import GraphQLParam, NebMixin
from .common import PageInput, NebEnum, read_value
from .filters import UUIDFilter, StringFilter
from .sorting import SortDirection
class CommunicationMethodType(NebEnum):
"""Defines customer communication preferences"""
Email = "Email"
"""
Prefer communication via E-Mail
"""
Phone = "Phone"
"""
Prefer communication via Phone
"""
class AddressInput:
"""An input object to setup an address for a datacenter
Allows specifying a physical address for a datacenter. This information is
used for support cases related to physical equipment in customer's
datacenters and to allow part shipments to the provided address.
"""
def __init__(
self,
house_number: str,
address1: str,
city: str,
postal_code: str,
country_code: str,
state_province_code: str = None,
address2: str = None,
address3: str = None,
):
"""Constructs a new address object for a datacenter
:param house_number: The house number and letters for the address
:type house_number: str
:param address1: Address field 1, typically the street address
:type address1: str
:param address2: Address field 2
:type address2: str, optional
:param address3: Address field 3
:type address3: str, optional
:param city: City name
:type city: str
:param state_province_code: The optional state or province code if
applicable for the specified country.
:type state_province_code: str, optional
:param postal_code: The postal code for the address
:type postal_code: str
:param country_code: The country code for the address
:type country_code: str
"""
self.__house_number = house_number
self.__address1 = address1
self.__address2 = address2
self.__address3 = address3
self.__city = city
self.__state_province_code = state_province_code
self.__postal_code = postal_code
self.__country_code = country_code
@property
def house_number(self) -> str:
"""House number and letters for the address"""
return self.__house_number
@property
def address1(self) -> str:
"""Address field 1, typically the street address"""
return self.__address1
@property
def address2(self) -> str:
"""Address field 2"""
return self.__address2
@property
def address3(self) -> str:
"""Address field 3"""
return self.__address3
@property
def city(self) -> str:
"""City name for the address"""
return self.__city
@property
def state_province_code(self) -> str:
"""State or province code"""
return self.__state_province_code
@property
def postal_code(self) -> str:
"""Postal code for the address"""
return self.__postal_code
@property
def country_code(self) -> str:
"""Country code for the address"""
return self.__country_code
@property
def as_dict(self):
result = dict()
result["houseNumber"] = self.house_number
result["address1"] = self.address1
result["address2"] = self.address2
result["address3"] = self.address3
result["city"] = self.city
result["stateProvinceCode"] = self.state_province_code
result["postalCode"] = self.postal_code
result["countryCode"] = self.country_code
return result
class ContactInput:
"""An input object to define a datacenter contact
Allows specifying contact information for a datacenter. This information
is used to contact a customer in case of infrastructure issues and to send
replacement parts.
"""
def __init__(
self,
user_uuid: str,
primary: bool,
communication_method: CommunicationMethodType
):
"""Constructs a new contact information object
:param user_uuid: The unique identifier of an existing user account
in nebulon ON that should be used as a contact
:type user_uuid: str
:param primary: Indicates if this contact should be the primary contact
for a datacenter.
:type primary: bool
:param communication_method: The preferred communication type for the
contact
:type communication_method: CommunicationMethodType
"""
self.__user_uuid = user_uuid
self.__primary = primary
self.__communication_method = communication_method
@property
def user_uuid(self) -> str:
"""The unique identifier of a nebulon ON user account"""
return self.__user_uuid
@property
def primary(self) -> bool:
"""Indicates if this contact is the primary contact for a datacenter"""
return self.__primary
@property
def communication_method(self) -> CommunicationMethodType:
"""Indicates the preferred communication method for this contact"""
return self.__communication_method
@property
def as_dict(self):
result = dict()
result["userUUID"] = self.user_uuid
result["primary"] = self.primary
result["communicationMethod"] = self.communication_method
return result
class DataCenterSort:
"""A sort object for datacenters
Allows sorting datacenters on common properties. The sort object allows
only one property to be specified.
"""
def __init__(
self,
name: SortDirection = None
):
"""Constructs a new sort object for datacenters
:param name: Sort direction for the ``name`` property
:type name: SortDirection, optional
"""
self.__name = name
@property
def name(self) -> SortDirection:
"""Sort direction for the ``name`` property"""
return self.__name
@property
def as_dict(self):
result = dict()
result["name"] = self.name
return result
class DataCenterFilter:
"""A filter object to filter datacenters.
Allows filtering for specific datacenters in nebulon ON. The
filter allows only one property to be specified. If filtering on multiple
properties is needed, use the ``and_filter`` and ``or_filter`` options to
concatenate multiple filters.
"""
def __init__(
self,
uuid: UUIDFilter = None,
name: StringFilter = None,
and_filter=None,
or_filter=None
):
"""Constructs a new filter object
The filter allows only one property to be specified. If filtering on
multiple properties is needed, use the ``and_filter`` and ``or_filter``
options to concatenate multiple filters.
:param uuid: Filter based on datacenter unique identifiers
:type uuid: UUIDFilter, optional
:param name: Filter based on datacenter name
:type name: StringFilter, optional
:param and_filter: Concatenate another filter with a logical AND
:type and_filter: DataCenterFilter, optional
:param or_filter: Concatenate another filter with a logical OR
:type or_filter: DataCenterFilter, optional
"""
self.__uuid = uuid
self.__name = name
self.__and = and_filter
self.__or = or_filter
@property
def uuid(self) -> UUIDFilter:
"""Filter based on datacenter unique identifier"""
return self.__uuid
@property
def name(self) -> StringFilter:
"""Filter based on datacenter name"""
return self.__name
@property
def and_filter(self):
"""Allows concatenation of multiple filters via logical AND"""
return self.__and
@property
def or_filter(self):
"""Allows concatenation of multiple filters via logical OR"""
return self.__or
@property
def as_dict(self):
result = dict()
result["uuid"] = self.uuid
result["name"] = self.name
result["and"] = self.and_filter
result["or"] = self.or_filter
return result
class CreateDataCenterInput:
"""An input object to create a datacenter
Allows the creation of a datacenter object in nebulon ON. A
datacenter record allows customers to logically organize their
infrastructure by physical location and associate address and contact
information with the physical location. This is useful for effective support
case handling and reporting purposes.
"""
def __init__(
self,
name: str,
address: AddressInput,
contacts: [ContactInput],
note: str = None,
):
"""Constructs a new input object to create a datacenter
At least one contact with the attribute ``primary`` set to ``True`` must
be provided. If multiple contacts are provided, exactly one contact
must be specified as primary.
:param name: Name for the new datacenter
:type name: str
:param address: The postal address for the new datacenter
:type address: AddressInput
:param contacts: List of contacts for the new datacenter. At least one
contact must be provided. Exactly one contact must be marked
as primary.
:type contacts: [ContactInput]
:param note: An optional note for the new datacenter
:type note: str, optional
"""
self.__name = name
self.__note = note
self.__address = address
self.__contacts = contacts
@property
def name(self) -> str:
"""Name of the datacenter"""
return self.__name
@property
def note(self) -> str:
"""An optional note for the datacenter"""
return self.__note
@property
def address(self) -> AddressInput:
"""Postal address for the datacenter"""
return self.__address
@property
def contacts(self) -> [ContactInput]:
"""List of contacts for the datacenter"""
return self.__contacts
@property
def as_dict(self):
result = dict()
result["name"] = self.name
result["note"] = self.note
result["address"] = self.address
result["contacts"] = self.contacts
return result
class UpdateDataCenterInput:
"""An input object to update datacenter properties
Allows updating of an existing datacenter object in nebulon ON. A
datacenter record allows customers to logically organize their
infrastructure by physical location and associate address and contact
information with the physical location. This is useful for effective support
case handling and reporting purposes.
"""
def __init__(
self,
name: str = None,
address: AddressInput = None,
contacts: [ContactInput] = None,
note: str = None,
):
"""Constructs a new input object to update a datacenter
At least one property must be specified.
:param name: New name for the datacenter
:type name: str, optional
:param address: New postal address for the datacenter
:type address: AddressInput, optional
:param contacts: New list of contacts for the datacenter. If provided,
the list of contacts must have at least one contact. Exactly one
contact must be marked as primary. This list of contacts will
replace the list of contacts that exist on the datacenter object.
:type contacts: [ContactInput], optional
:param note: The new note for the datacenter. For removing the note,
provide an empty ``str``.
:type note: str, optional
"""
self.__name = name
self.__note = note
self.__address = address
self.__contacts = contacts
@property
def name(self) -> str:
"""New name of the datacenter"""
return self.__name
@property
def note(self) -> str:
"""New note for the datacenter"""
return self.__note
@property
def address(self) -> AddressInput:
"""New postal address for the datacenter"""
return self.__address
@property
def contacts(self) -> [ContactInput]:
"""New list of contacts for the datacenter"""
return self.__contacts
@property
def as_dict(self):
result = dict()
result["name"] = self.name
result["note"] = self.note
result["address"] = self.address
result["contacts"] = self.contacts
return result
class DeleteDataCenterInput:
"""An input object to delete a datacenter object
Allows additional options when deleting a datacenter. When cascade is
set to ``True`` all child resources are deleted with the datacenter if
no hosts are associated with them.
"""
def __init__(
self,
cascade: bool
):
"""Constructs a new input object to delete a datacenter object
:param cascade: If set to ``True`` any child resources are deleted with
the datacenter if no hosts are associated with them.
:type cascade: bool, optional
"""
self.__cascade = cascade
@property
def cascade(self) -> bool:
"""Indicates that child items shall be deleted with the datacenter"""
return self.__cascade
@property
def as_dict(self):
result = dict()
result["cascade"] = self.cascade
return result
class Address:
"""An address for a datacenter
This information is used for support cases related to physical equipment
in customer's datacenters and to allow part shipments to the provided
address.
"""
def __init__(
self,
response: dict
):
"""Constructs a new address object
This constructor expects a ``dict`` object from the nebulon ON API. It
will check the returned data against the currently implemented schema
of the SDK.
:param response: The JSON response from the server
:type response: dict
:raises ValueError: An error if illegal data is returned from the server
"""
self.__house_number = read_value(
"houseNumber", response, str, True)
self.__address1 = read_value(
"address1", response, str, True)
self.__address2 = read_value(
"address2", response, str, True)
self.__address3 = read_value(
"address3", response, str, True)
self.__city = read_value(
"city", response, str, True)
self.__state_province_code = read_value(
"stateProvinceCode", response, str, True)
self.__postal_code = read_value(
"postalCode", response, str, True)
self.__country_code = read_value(
"countryCode", response, str, True)
@property
def house_number(self) -> str:
"""House number and letters for the address"""
return self.__house_number
@property
def address1(self) -> str:
"""Address field 1, typically the street address"""
return self.__address1
@property
def address2(self) -> str:
"""Address field 2"""
return self.__address2
@property
def address3(self) -> str:
"""Address field 3"""
return self.__address3
@property
def city(self) -> str:
"""City name for the address"""
return self.__city
@property
def state_province_code(self) -> str:
"""State or province code for the address"""
return self.__state_province_code
@property
def postal_code(self) -> str:
"""Postal code for the address"""
return self.__postal_code
@property
def country_code(self) -> str:
"""Country code for the address"""
return self.__country_code
@staticmethod
def fields():
return [
"houseNumber",
"address1",
"address2",
"address3",
"city",
"stateProvinceCode",
"postalCode",
"countryCode",
]
class Contact:
"""Contact information for a datacenter
This information is used to contact a customer in case of infrastructure
issues and to send replacement parts.
"""
def __init__(
self,
response: dict
):
"""Constructs a new contact object
This constructor expects a ``dict`` object from the nebulon ON API. It
will check the returned data against the currently implemented schema
of the SDK.
:param response: The JSON response from the server
:type response: dict
:raises ValueError: An error if illegal data is returned from the server
"""
self.__user_uuid = read_value(
"userUUID", response, str, True)
self.__email_address = read_value(
"emailAddress", response, str, True)
self.__first_name = read_value(
"firstName", response, str, True)
self.__last_name = read_value(
"lastName", response, str, True)
self.__mobile_phone = read_value(
"mobilePhone", response, str, True)
self.__business_phone = read_value(
"businessPhone", response, str, True)
self.__primary = read_value(
"primary", response, bool, True)
self.__communication_method = read_value(
"communicationMethod", response, CommunicationMethodType, True)
@property
def user_uuid(self) -> str:
"""The unique identifier of a nebulon ON user account"""
return self.__user_uuid
@property
def email_address(self) -> str:
"""The email address of the contact"""
return self.__email_address
@property
def first_name(self) -> str:
"""The first name of the contact"""
return self.__first_name
@property
def last_name(self) -> str:
"""The last name of the contact"""
return self.__last_name
@property
def mobile_phone(self) -> str:
"""The mobile phone number of the contact"""
return self.__mobile_phone
@property
def business_phone(self) -> str:
"""The business phone number of the contact"""
return self.__business_phone
@property
def primary(self) -> bool:
"""Indicates if this contact is the primary contact for a datacenter"""
return self.__primary
@property
def communication_method(self) -> CommunicationMethodType:
"""Indicates the preferred communication method for this contact"""
return self.__communication_method
@staticmethod
def fields():
return [
"userUUID",
"emailAddress",
"firstName",
"lastName",
"mobilePhone",
"businessPhone",
"primary",
"communicationMethod",
]
class DataCenter:
"""A datacenter object
A datacenter record allows customers to logically organize their
infrastructure by physical location and associate address and contact
information with the physical location. This is useful for effective support
case handling and reporting purposes.
"""
def __init__(
self,
response: dict
):
"""Constructs a new datacenter object
This constructor expects a ``dict`` object from the nebulon ON API. It
will check the returned data against the currently implemented schema
of the SDK.
:param response: The JSON response from the server
:type response: dict
:raises ValueError: An error if illegal data is returned from the server
"""
self.__uuid = read_value(
"uuid", response, str, True)
self.__name = read_value(
"name", response, str, True)
self.__note = read_value(
"note", response, str, True)
self.__address = read_value(
"address", response, Address, True)
self.__contacts = read_value(
"contacts", response, Contact, False)
self.__room_uuids = read_value(
"rooms.uuid", response, str, False)
self.__room_count = read_value(
"roomCount", response, int, True)
self.__row_count = read_value(
"rowCount", response, int, True)
self.__rack_count = read_value(
"rackCount", response, int, True)
self.__host_count = read_value(
"hostCount", response, int, True)
@property
def uuid(self) -> str:
"""Unique identifier of the datacenter"""
return self.__uuid
@property
def name(self) -> str:
"""Name of the datacenter"""
return self.__name
@property
def note(self) -> str:
"""An optional note for the datacenter"""
return self.__note
@property
def address(self) -> Address:
"""Postal address for the datacenter"""
return self.__address
@property
def contacts(self) -> list:
"""List of contacts for the datacenter"""
return self.__contacts
@property
def room_uuids(self) -> list:
"""Unique identifiers of rooms in the datacenter"""
return self.__room_uuids
@property
def room_count(self) -> int:
"""Number of rooms in the datacenter"""
return self.__room_count
@property
def row_count(self) -> int:
"""Number of rows in the datacenter"""
return self.__row_count
@property
def rack_count(self) -> int:
"""Number of racks in the datacenter"""
return self.__rack_count
@property
def host_count(self) -> int:
"""Number of hosts (servers) in the datacenter"""
return self.__host_count
@staticmethod
def fields():
return [
"uuid",
"name",
"note",
"address{%s}" % (",".join(Address.fields())),
"contacts{%s}" % (",".join(Contact.fields())),
"rooms{uuid}",
"roomCount",
"rowCount",
"rackCount",
"hostCount",
]
class DataCenterList:
"""Paginated datacenter list object
Contains a list of datacenter objects and information for
pagination. By default a single page includes a maximum of ``100`` items
unless specified otherwise in the paginated query.
Consumers should always check for the property ``more`` as per default
the server does not return the full list of alerts but only one page.
"""
def __init__(
self,
response: dict
):
"""Constructs a new datacenter list object
This constructor expects a ``dict`` object from the nebulon ON API. It
will check the returned data against the currently implemented schema
of the SDK.
:param response: The JSON response from the server
:type response: dict
:raises ValueError: An error if illegal data is returned from the server
"""
self.__more = read_value(
"more", response, bool, True)
self.__total_count = read_value(
"totalCount", response, int, True)
self.__filtered_count = read_value(
"filteredCount", response, int, True)
self.__items = read_value(
"items", response, DataCenter, True)
@property
def items(self) -> [DataCenter]:
"""List of datacenters in the pagination list"""
return self.__items
@property
def more(self) -> bool:
"""Indicates if there are more items on the server"""
return self.__more
@property
def total_count(self) -> int:
"""The total number of items on the server"""
return self.__total_count
@property
def filtered_count(self) -> int:
"""The number of items on the server matching the provided filter"""
return self.__filtered_count
@staticmethod
def fields():
return [
"items{%s}" % (",".join(DataCenter.fields())),
"more",
"totalCount",
"filteredCount",
]
class DatacentersMixin(NebMixin):
"""Mixin to add datacenter related methods to the GraphQL client"""
def get_datacenters(
self,
page: PageInput = None,
dc_filter: DataCenterFilter = None,
sort: DataCenterSort = None
) -> DataCenterList:
"""Retrieves a list of datacenter objects
:param page: The requested page from the server. This is an optional
argument and if omitted the server will default to returning the
first page with a maximum of ``100`` items.
:type page: PageInput, optional
:param dc_filter: A filter object to filter the datacenters on the
server. If omitted, the server will return all objects as a
paginated response.
:type dc_filter: DataCenterFilter, optional
:param sort: A sort definition object to sort the datacenter objects
on supported properties. If omitted objects are returned in the
order as they were created in.
:type sort: DataCenterSort, optional
:returns DataCenterList: A paginated list of datacenters.
:raises GraphQLError: An error with the GraphQL endpoint.
"""
# setup query parameters
parameters = dict()
parameters["page"] = GraphQLParam(
page, "PageInput", False)
parameters["filter"] = GraphQLParam(
dc_filter, "DataCenterFilter", False)
parameters["sort"] = GraphQLParam(
sort, "DataCenterSort", False)
# make the request
response = self._query(
name="getDataCenters",
params=parameters,
fields=DataCenterList.fields()
)
# convert to object
return DataCenterList(response)
def create_datacenter(
self,
create_input: CreateDataCenterInput = None
) -> DataCenter:
"""Allows creation of a new datacenter object
A datacenter record allows customers to logically organize their
infrastructure by physical location and associate address and contact
information with the physical location. This is useful for effective
support case handling and reporting purposes.
:param create_input: A property definition for the new datacenter
:type create_input: CreateDataCenterInput
:returns DataCenter: The new datacenter.
:raises GraphQLError: An error with the GraphQL endpoint.
"""
# setup query parameters
parameters = dict()
parameters["input"] = GraphQLParam(
create_input,
"CreateDataCenterInput",
True
)
# make the request
response = self._mutation(
name="createDataCenter",
params=parameters,
fields=DataCenter.fields()
)
# convert to object
return DataCenter(response)
def delete_datacenter(
self,
uuid: str,
delete_input: DeleteDataCenterInput = None
) -> bool:
"""Allows deletion of an existing datacenter object
The deletion of a datacenter is only possible if the datacenter has
no hosts (servers) associated with any child items.
:param uuid: The unique identifier of the datacenter to delete
:type uuid: str
:param delete_input: Optional parameters for the delete operation
:type delete_input: DeleteDataCenterInput, optional
:returns bool: If the query was successful
:raises GraphQLError: An error with the GraphQL endpoint.
"""
# setup query parameters
parameters = dict()
parameters["uuid"] = GraphQLParam(uuid, "UUID", True)
parameters["input"] = GraphQLParam(
delete_input,
"DeleteDataCenterInput",
False
)
# make the request
response = self._mutation(
name="deleteDataCenter",
params=parameters,
fields=None
)
# convert to object
return response
def update_datacenter(
self,
uuid: str,
update_input: UpdateDataCenterInput
) -> DataCenter:
"""Allows updating properties of an existing datacenter object
:param uuid: The unique identifier of the datacenter to update
:type uuid: str
:param update_input: A property definition for the datacenter updates
:type update_input: UpdateDataCenterInput
:returns DataCenter: The updated datacenter object.
:raises GraphQLError: An error with the GraphQL endpoint.
"""
# setup query parameters
parameters = dict()
parameters["uuid"] = GraphQLParam(uuid, "UUID", True)
parameters["input"] = GraphQLParam(
update_input,
"UpsertDataCenterInput",
False
)
# make the request
response = self._mutation(
name="updateDataCenter",
params=parameters,
fields=DataCenter.fields()
)
# convert to object
return DataCenter(response)
|
<reponame>injoon5/oij-web<gh_stars>1-10
/* eslint-disable jsx-a11y/anchor-has-content */
import Link from '@/components/Link'
import useSWR from 'swr'
const CovidCases = () => {
const fetcher = (...args) => fetch(...args).then((res) => res.json())
const { data, error } = useSWR('/api/covid', fetcher, { refreshInterval: 1000 * 60 * 5 })
if (error) return 'Error!'
if (!data) return 'Loading...'
return data.live.today
}
export default CovidCases
|
<gh_stars>1-10
import * as React from 'react';
import MUIAutocomplete, { createFilterOptions } from '@material-ui/lab/Autocomplete';
import TextField from '@material-ui/core/TextField';
import CircularProgress from '@material-ui/core/CircularProgress';
import { EntityWithName } from '../../services/api';
import { FilterOptionsState } from '@material-ui/lab/useAutocomplete';
import { useDebounce } from '../../services/use-debounce';
export const createCustomFilterOptions = createFilterOptions;
export type AutocompleteProps<T> = {
disabled?: boolean;
options: T[];
selected: T | null;
setSelected(element: T | null): void;
isLoading: boolean;
emptyElement: T;
changeFilter(filter: string): void;
defaultFilter?: (options: T[], state: FilterOptionsState<T>) => T[];
ariaLabel?: string;
elementId?: string;
placeholderText?: string;
loadingText?: string;
className?: string;
};
export function Autocomplete<T extends EntityWithName>({
disabled = false,
options,
selected,
setSelected,
isLoading,
emptyElement,
changeFilter,
defaultFilter = createFilterOptions<T>(),
ariaLabel = 'element name',
elementId = 'element-autocomplete',
placeholderText = 'Search a element',
loadingText = 'Loading elements',
className
}: AutocompleteProps<T>) {
const handleChange = React.useCallback(
(ev: React.ChangeEvent<unknown>, element: T | null) => {
setSelected(element);
},
[setSelected]
);
const handleGetOptions = React.useCallback((option: T) => {
return option.name;
}, []);
const handleRenderOptions = React.useCallback(
(option: T) => {
return (option.id !== emptyElement.id ? '' : 'Create ') + option.name;
},
[emptyElement]
);
const handleFilterOptions = React.useCallback(
(options: T[], params: FilterOptionsState<T>) => {
const filtered = defaultFilter(options, params);
// If there are not matchs...
if (!isLoading && options.length === 0 && filtered.length === 0 && params.inputValue !== '') {
const newElement = { ...emptyElement, name: params.inputValue };
filtered.push(newElement);
}
return filtered;
},
[isLoading, emptyElement, defaultFilter]
);
// Debounce filter
const [filter, setFilter] = useDebounce({
callback: changeFilter,
initialValue: ''
});
const handleInputChange = React.useCallback(
(ev: React.ChangeEvent<unknown>, name: string) => {
setFilter(name);
},
[setFilter]
);
return (
<MUIAutocomplete
disabled={disabled}
autoComplete={true}
fullWidth={true}
id={elementId}
aria-label={ariaLabel}
multiple={false}
value={selected}
className={className}
options={options}
filterOptions={handleFilterOptions}
getOptionLabel={handleGetOptions}
renderOption={handleRenderOptions}
onChange={handleChange}
loading={isLoading}
onInputChange={handleInputChange}
inputValue={filter}
renderInput={params => {
return (
<TextField
{...params}
label={isLoading ? loadingText : placeholderText}
InputProps={{
...params.InputProps,
endAdornment: (
<React.Fragment>
{isLoading ? <CircularProgress color='primary' size={20} /> : null}
{params.InputProps.endAdornment}
</React.Fragment>
)
}}
/>
);
}}
/>
);
}
export default Autocomplete;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.