text stringlengths 1 1.05M |
|---|
#!/usr/bin/env bash
###############################################################################
# Twitter
###############################################################################
# Disable smart quotes as it’s annoying for code tweets
defaults write com.twitter.twitter-mac AutomaticQuoteSubstitutionEnabled -bool false
# Show the app window when clicking the menu bar icon
defaults write com.twitter.twitter-mac MenuItemBehavior -int 1
# Enable the hidden ‘Develop’ menu
defaults write com.twitter.twitter-mac ShowDevelopMenu -bool true
# Open links in the background
defaults write com.twitter.twitter-mac openLinksInBackground -bool true
# Allow closing the ‘new tweet’ window by pressing `Esc`
defaults write com.twitter.twitter-mac ESCClosesComposeWindow -bool true
# Show full names rather than Twitter handles
defaults write com.twitter.twitter-mac ShowFullNames -bool true
# Hide the app in the background if it’s not the front-most window
defaults write com.twitter.twitter-mac HideInBackground -bool true
|
#!/bin/bash
echo 'TIME USS PSS'
|
<filename>src/gatsby-node.js
const webpack = require("webpack")
const fs = require(`fs`)
const flatten = require("flat")
// remove endings "/" and ".html" from fileNames
const removeFileEndings = fileName =>
fileName.replace(/\/+$/, "").replace(".html", "")
exports.onCreateWebpackConfig = ({ actions, plugins }, pluginOptions) => {
const { redirectComponent = null, languages, defaultLanguage } = pluginOptions
if (!languages.includes(defaultLanguage)) {
languages.push(defaultLanguage)
}
// ['en', 'de', 'en-US'] -> /en|de|en/
const regex = new RegExp(languages.map(l => l.split("-")[0]).join("|"))
actions.setWebpackConfig({
plugins: [
plugins.define({
GATSBY_INTL_REDIRECT_COMPONENT_PATH: JSON.stringify(redirectComponent),
}),
new webpack.ContextReplacementPlugin(
/@formatjs[/\\]intl-relativetimeformat[/\\]dist[/\\]locale-data$/,
regex
),
new webpack.ContextReplacementPlugin(
/@formatjs[/\\]intl-pluralrules[/\\]dist[/\\]locale-data$/,
regex
),
],
})
}
exports.onCreatePage = async ({ page, actions }, pluginOptions) => {
//Exit if the page has already been processed.
if (typeof page.context.intl === "object") {
return
}
const { createPage, deletePage } = actions
const {
path = ".",
languages = ["en"],
defaultLanguage = "en",
redirect = false,
sharedMessages = "common.json",
messagesMustBeSplit = false,
} = pluginOptions
const getMessages = (path, language, file) => {
const languagePath = `${path}/${language}`
const filePath = `${languagePath}${removeFileEndings(file)}.json`
const sharedMessagePath = `${languagePath}/${sharedMessages}`
let messages
try {
messages = require(filePath)
} catch (error) {
if (messagesMustBeSplit) {
if (typeof messages === "undefined") {
// && messagesMustBeSplit){
console.error(
`[gatsby-plugin-intl] couldn't read file "${filePath}", messages is undefined.`
)
}
if (error.code === "MODULE_NOT_FOUND") {
process.env.NODE_ENV !== "test" &&
console.error(
`[gatsby-plugin-intl] couldn't find file "${filePath}"`
)
}
}
}
if (fs.existsSync(`${sharedMessagePath}`)) {
messages = messages
? Object.assign(messages, require(`${sharedMessagePath}`))
: require(`${sharedMessagePath}`)
}
if (Object.keys(messages).length === 0 && messages.constructor === Object) {
console.error(
`No translations for language '${language}': Could neither find common file (${sharedMessagePath}) nor file "${filePath}"`
)
return {}
} else {
return flatten(messages)
}
}
const generatePage = (routed, language) => {
const fileName = page.path !== "/" ? page.path : "/index"
const messages = getMessages(path, language, fileName)
const newPath = routed ? `/${language}${page.path}` : page.path
return {
...page,
path: newPath,
context: {
...page.context,
language,
intl: {
language,
languages,
messages,
routed,
originalPath: page.path,
redirect,
defaultLanguage,
},
},
}
}
const newPage = generatePage(false, defaultLanguage)
deletePage(page)
createPage(newPage)
languages.forEach(language => {
const localePage = generatePage(true, language)
const regexp = new RegExp("/404/?$")
if (regexp.test(localePage.path)) {
localePage.matchPath = `/${language}/*`
}
createPage(localePage)
})
}
exports.onPreInit = () => {
console.log("Loading Plugin 'Gatsby-Plugin-Intl'")
}
|
<reponame>xwf20050250/SmallUtils
package com.smallcake.utils;
import android.content.Context;
public class DpPxUtils {
private DpPxUtils() {
/* cannot be instantiated */
throw new UnsupportedOperationException("cannot be instantiated");
}
public static int dp2px(Context context, float dpValue) {
final float scale = context.getResources().getDisplayMetrics().density;
return (int) (dpValue * scale + 0.5f);
}
public static int dp2px( float dpValue) {
final float scale = SmallUtils.getApp().getResources().getDisplayMetrics().density;
return (int) (dpValue * scale + 0.5f);
}
public static int px2dp( float pxValue) {
final float scale = SmallUtils.getApp().getResources().getDisplayMetrics().density;
return (int) (pxValue / scale + 0.5f);
}
}
|
<filename>node_modules/react-icons-kit/icomoon/underline.js<gh_stars>1-10
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.underline = void 0;
var underline = {
"viewBox": "0 0 16 16",
"children": [{
"name": "path",
"attribs": {
"fill": "#000000",
"d": "M11 1h2v6.5c0 2.485-2.239 4.5-5 4.5s-5-2.015-5-4.5v-6.5h2v6.5c0 0.628 0.285 1.23 0.802 1.695 0.577 0.519 1.357 0.805 2.198 0.805s1.621-0.286 2.198-0.805c0.517-0.466 0.802-1.068 0.802-1.695v-6.5zM3 13h10v2h-10z"
}
}]
};
exports.underline = underline; |
package org.apache.tapestry5.integration.app1.base;
import org.apache.tapestry5.annotations.Log;
public abstract class InheritBase
{
@Log
public void setupRender()
{
}
}
|
<reponame>moyudyz/gin_vue_admin_test
package core
import (
"fmt"
"gin-vue-admin/config"
"gin-vue-admin/global"
"gin-vue-admin/utils"
"io"
"os"
"strings"
"time"
"github.com/gin-gonic/gin"
rotatelogs "github.com/lestrrat/go-file-rotatelogs"
oplogging "github.com/op/go-logging"
)
const (
logDir = "log"
logSoftLink = "latest_log"
module = "gin-vue-admin"
)
var (
defaultFormatter = `%{time:2006/01/02 - 15:04:05.000} %{longfile} %{color:bold}▶ [%{level:.6s}] %{message}%{color:reset}`
)
func init() {
c := global.GVA_CONFIG.Log
if c.Prefix == "" {
_ = fmt.Errorf("logger prefix not found")
}
logger := oplogging.MustGetLogger(module)
var backends []oplogging.Backend
registerStdout(c, &backends)
if fileWriter := registerFile(c, &backends); fileWriter != nil {
gin.DefaultWriter = io.MultiWriter(fileWriter, os.Stdout)
}
oplogging.SetBackend(backends...)
global.GVA_LOG = logger
}
func registerStdout(c config.Log, backends *[]oplogging.Backend) {
if c.Stdout != "" {
level, err := oplogging.LogLevel(c.Stdout)
if err != nil {
fmt.Println(err)
}
*backends = append(*backends, createBackend(os.Stdout, c, level))
}
}
func registerFile(c config.Log, backends *[]oplogging.Backend) io.Writer {
if c.File != "" {
if ok, _ := utils.PathExists(logDir); !ok {
// directory not exist
fmt.Println("create log directory")
_ = os.Mkdir(logDir, os.ModePerm)
}
fileWriter, err := rotatelogs.New(
logDir+string(os.PathSeparator)+"%Y-%m-%d-%H-%M.log",
// generate soft link, point to latest log file
rotatelogs.WithLinkName(logSoftLink),
// maximum time to save log files
rotatelogs.WithMaxAge(7*24*time.Hour),
// time period of log file switching
rotatelogs.WithRotationTime(24*time.Hour),
)
if err != nil {
fmt.Println(err)
}
level, err := oplogging.LogLevel(c.File)
if err != nil {
fmt.Println(err)
}
*backends = append(*backends, createBackend(fileWriter, c, level))
return fileWriter
}
return nil
}
func createBackend(w io.Writer, c config.Log, level oplogging.Level) oplogging.Backend {
backend := oplogging.NewLogBackend(w, c.Prefix, 0)
stdoutWriter := false
if w == os.Stdout {
stdoutWriter = true
}
format := getLogFormatter(c, stdoutWriter)
backendLeveled := oplogging.AddModuleLevel(oplogging.NewBackendFormatter(backend, format))
backendLeveled.SetLevel(level, module)
return backendLeveled
}
func getLogFormatter(c config.Log, stdoutWriter bool) oplogging.Formatter {
pattern := defaultFormatter
if !stdoutWriter {
// Color is only required for console output
// Other writers don't need %{color} tag
pattern = strings.Replace(pattern, "%{color:bold}", "", -1)
pattern = strings.Replace(pattern, "%{color:reset}", "", -1)
}
if !c.LogFile {
// Remove %{logfile} tag
pattern = strings.Replace(pattern, "%{longfile}", "", -1)
}
return oplogging.MustStringFormatter(pattern)
}
|
/* eslint global-require: off */
import electron, { app, clipboard, BrowserWindow } from 'electron';
import { autoUpdater } from 'electron-updater';
import log from 'electron-log';
import MenuBuilder from './utils/menu';
import tray from './utils/tray';
import config, { installExtensions } from './config';
import {
onActivate,
onBeforeQuit,
onWillQuit,
initShortcuts,
onBlur,
onClose,
onClosed,
onDidFinishLoad,
onWindowAllClosed,
initClipboard
} from './helpers';
import receiver from './helpers/receiver';
export default class AppUpdater {
constructor() {
log.transports.file.level = 'info';
autoUpdater.logger = log;
autoUpdater.checkForUpdatesAndNotify();
}
}
let robot = require('robotjs');
let mainWindow = null;
let trayBuilder = null;
config();
/**
* Add event listeners...
*/
app.dock.hide();
app.on('ready', async () => {
if (
process.env.NODE_ENV === 'development' ||
process.env.DEBUG_PROD === 'true'
) {
await installExtensions();
}
mainWindow = new BrowserWindow({
show: false,
frame: true,
width: 200,
height: 300
// titleBarStyle: 'hiddenInset'
// titleBarStyle: 'hiddenInset'
});
mainWindow.loadURL(`file://${__dirname}/app.html`);
mainWindow.webContents.on('did-finish-load', () => {
onDidFinishLoad(mainWindow);
});
app.on('activate', () => onActivate(mainWindow));
mainWindow.on('blur', () => onBlur(mainWindow));
app.on('before-quit', () => onBeforeQuit());
mainWindow.on('close', event => onClose(mainWindow, event));
app.on('will-quit', () => onWillQuit());
mainWindow.on('closed', () => onClosed(mainWindow));
initShortcuts(mainWindow);
initClipboard(mainWindow);
receiver(data => {
console.log(data.data);
mainWindow.hide();
app.hide();
// robot.typeString(data.data);
// todo base64 convert etmek lazim
clipboard.write({ text: data.data });
setTimeout(() => {
robot.keyToggle('v', 'down', 'command');
setTimeout(() => {
robot.keyToggle('v', 'up', 'command');
}, 100);
}, 200);
});
const menuBuilder = new MenuBuilder(mainWindow);
menuBuilder.buildMenu();
// eslint-disable-next-line new-cap
trayBuilder = new tray();
trayBuilder.buildTray();
console.log(process.env.ALWAYS_SHOW);
if (process.env.ALWAYS_SHOW) {
mainWindow.show();
mainWindow.focus();
}
// eslint-disable-next-line
new AppUpdater();
});
app.on('window-all-closed', () => onWindowAllClosed());
|
/*****
License
--------------
Copyright © 2017 Bill & Melinda Gates Foundation
The Mojaloop files are made available by the Bill & Melinda Gates Foundation under the Apache License, Version 2.0 (the "License") and you may not use these files except in compliance with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, the Mojaloop files are distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
Contributors
--------------
This is the official list (alphabetical ordering) of the Mojaloop project contributors for this file.
Names of the original copyright holders (individuals or organizations)
should be listed with a '*' in the first column. People who have
contributed from an organization can be listed under the organization
that actually holds the copyright for their contributions (see the
Gates Foundation organization for an example). Those individuals should have
their names indented and be marked with a '-'. Email address can be added
optionally within square brackets <email>.
* Gates Foundation
- Name Surname <<EMAIL>>
* Crosslake
- <NAME> <<EMAIL>>
--------------
******/
"use strict"
import express from "express";
import {
CannotCreateDuplicateAppPrivilegesError,
CannotCreateDuplicateRoleError,
CannotOverrideAppPrivilegesError, CannotStorePlatformRoleError,
CouldNotStoreAppPrivilegesError,
InvalidAppPrivilegesError,
InvalidPlatformRoleError,
NewRoleWithPrivsUsersOrAppsError, PlatformRoleNotFoundError, PrivilegeNotFoundError
} from "../domain/errors";
import {AllPrivilegesResp} from "../domain/types";
import {ILogger} from "@mojaloop/logging-bc-client-lib";
import {AppPrivileges, PlatformRole} from "@mojaloop/security-bc-public-types-lib";
import {AuthorizationAggregate} from "../domain/authorization_agg";
export class ExpressRoutes {
private _logger:ILogger;
private _authorizationAggregate: AuthorizationAggregate;
private _mainRouter = express.Router();
private _privilegesRouter = express.Router();
private _rolesRouter = express.Router();
constructor(authorizationAggregate: AuthorizationAggregate, logger:ILogger) {
this._logger = logger;
this._authorizationAggregate = authorizationAggregate;
// main
this._mainRouter.post("/appbootstrap", this.postAppbootstrap.bind(this));
// privileges
this._privilegesRouter.get("/", this.getAllAppPrivileges.bind(this));
// roles
this._rolesRouter.get("/", this.getAllPlatformRole.bind(this));
this._rolesRouter.post("/", this.postPlatformRole.bind(this));
this._rolesRouter.post("/:roleId/associatePrivileges", this.postAssociatePrivsToPlatformRole.bind(this));
}
get MainRouter():express.Router{
return this._mainRouter;
}
get PrivilegesRouter():express.Router{
return this._privilegesRouter;
}
get RolesRouter():express.Router{
return this._rolesRouter;
}
private async postAppbootstrap(req: express.Request, res: express.Response, next: express.NextFunction){
const data: AppPrivileges = req.body as AppPrivileges;
this._logger.debug(data);
await this._authorizationAggregate.processAppBootstrap(data).then(()=>{
return res.status(200).send();
}).catch((error: Error)=>{
if (error instanceof InvalidAppPrivilegesError) {
return res.status(400).json({
status: "error",
msg: "Received invalid AppPrivileges"
});
} else if (error instanceof CannotCreateDuplicateAppPrivilegesError) {
return res.status(400).json({
status: "error",
msg: "Received duplicate AppPrivileges"
});
} else if (error instanceof CannotOverrideAppPrivilegesError) {
return res.status(400).json({
status: "error",
msg: "Received AppPrivileges with lower version than latest"
});
} else if (error instanceof CouldNotStoreAppPrivilegesError) {
return res.status(500).json({
status: "error",
msg: "Could not store appPrivileges"
});
}else {
return res.status(500).json({
status: "error",
msg: "unknown error"
});
}
});
}
private async getAllAppPrivileges(req: express.Request, res: express.Response, next: express.NextFunction){
await this._authorizationAggregate.getAllPrivileges().then((resp:AllPrivilegesResp[])=>{
return res.send(resp);
}).catch(()=>{
this._logger.error("error in getAllApprPrivileged route")
return res.status(500).json({
status: "error",
msg: "unknown error"
});
});
}
// roles
private async getAllPlatformRole(req: express.Request, res: express.Response, next: express.NextFunction){
await this._authorizationAggregate.getAllRoles().then((resp:PlatformRole[])=>{
return res.send(resp);
}).catch(()=>{
this._logger.error("error in getAllPlatformRole route")
return res.status(500).json({
status: "error",
msg: "unknown error"
});
});
}
private async postPlatformRole(req: express.Request, res: express.Response, next: express.NextFunction){
const data: PlatformRole = req.body as PlatformRole;
this._logger.debug(data);
await this._authorizationAggregate.createLocalRole(data).then((roleId:string)=>{
return res.status(200).send({roleId: roleId});
}).catch((error: Error)=>{
if (error instanceof InvalidPlatformRoleError) {
return res.status(400).json({
status: "error",
msg: "Received invalid PlatformRole"
});
} else if (error instanceof NewRoleWithPrivsUsersOrAppsError) {
return res.status(400).json({
status: "error",
msg: "New roles cannot have privileges, member users or member apps"
});
} else if (error instanceof CannotCreateDuplicateRoleError) {
return res.status(400).json({
status: "error",
msg: "Received duplicate PlatformRole"
});
} else {
return res.status(500).json({
status: "error",
msg: "unknown error"
});
}
});
}
private async postAssociatePrivsToPlatformRole(req: express.Request, res: express.Response, next: express.NextFunction){
const roleId = req.params["roleId"] ?? null;
// body is supposed to be array of strings
const data: string[] = req.body as string[];
this._logger.debug(data);
if(!roleId){
return res.status(400).json({
status: "error",
msg: "invalid PlatformRole"
});
}
if(!Array.isArray(data) || data.length<=0){
return res.status(400).json({
status: "error",
msg: "invalid privilege id list in body"
});
}
await this._authorizationAggregate.associatePrivilegesToRole(data, roleId).then(()=>{
return res.status(200).send();
}).catch((error: Error)=>{
if (error instanceof PlatformRoleNotFoundError) {
return res.status(404).json({
status: "error",
msg: "PlatformRole not found"
});
} else if (error instanceof PrivilegeNotFoundError) {
return res.status(400).json({
status: "error",
msg: "Privilege not found"
});
} else {
return res.status(500).json({
status: "error",
msg: "unknown error"
});
}
});
return;
}
}
|
#!/bin/bash
set -e
# 6.52. Ninja-1.10.0
# Ninja is a small build system with a focus on speed.
echo "Building Ninja..."
echo "Approximate build time: 0.3 SBU"
echo "Required disk space: 89 MB"
tar -xf /sources/ninja-*.tar.* -C /tmp/ \
&& mv /tmp/ninja-* /tmp/ninja \
&& pushd /tmp/ninja
# Add the capability to use the environment variable NINJAJOBS:
sed -i '/int Guess/a \
int j = 0;\
char* jobs = getenv( "NINJAJOBS" );\
if ( jobs != NULL ) j = atoi( jobs );\
if ( j > 0 ) return j;\
' src/ninja.cc
# Build Ninja:
python3 configure.py --bootstrap
# Test the results:
if [ "$LFS_TEST" -eq 1 ]; then
./ninja ninja_test
./ninja_test --gtest_filter=-SubprocessTest.SetWithLots
fi
# Install the package:
install -vm755 ninja /usr/bin/
install -vDm644 misc/bash-completion /usr/share/bash-completion/completions/ninja
install -vDm644 misc/zsh-completion /usr/share/zsh/site-functions/_ninja
# cleanup
popd \
&& rm -rf /tmp/python
|
#!/bin/bash
rm cliStdo.log &> /dev/null
timeout 8 socat -x -v PTY,link=modem0 PTY,link=modem1&
sleep 1
timeout -s SIGINT 6 nodejs ServerUIntCfg/main.js > srvStdo.log 2> srvStdr.log&
sleep 3
timeout -s SIGINT 4 ./ClientUIntCfg/ClientUIntCfg > cliStdo.log 2> cliStdr.log&
sleep 5
#printf "Cli stdo:\n\n"
cat cliStdo.log
#printf "\nCli stdr:\n\n"
>&2 cat cliStdr.log
#printf "\n\nSrv stdo:\n\n"
>&2 cat srvStdo.log
#printf "\nSrv stdr:\n\n"
>&2 cat srvStdr.log |
import { DynamoDB } from "aws-sdk";
import { News, NewsSource } from "../../common/models";
import { Event } from "./models";
import { atob, logWarmState } from "./utils";
// Environment variables
const {
REGION,
NEWS_TABLE_NAME = "",
SOURCES_TABLE_NAME = "",
MEDIA_URL,
DEFAULT_LIMIT,
MAX_LIMIT,
SUMMARY_LENGTH,
} = process.env;
// Init DynamoDB document client
const docClient = new DynamoDB.DocumentClient({
service: new DynamoDB({ region: REGION }),
});
// Initial warm state
let warm = false;
export const handler = async (event: Event) => {
if (event.warm) {
// Lambda warmed, return ASAP.
logWarmState("warmer", warm);
warm = true;
return;
}
logWarmState("user", warm);
warm = true;
// Request parameters
const {
Source,
CreatedAt,
ProjectionExpression,
Limit = DEFAULT_LIMIT,
ExclusiveStartKey,
} = event.queryStringParameters;
// Fetch news and sources
let KeyConditionExpression = "#Source = :source";
let ExpressionAttributeValues: DynamoDB.DocumentClient.ExpressionAttributeValueMap = {
":source": Source,
};
if (CreatedAt) {
KeyConditionExpression += " AND CreatedAt = :createdAt";
ExpressionAttributeValues = {
...ExpressionAttributeValues,
":createdAt": CreatedAt,
};
}
const newsParams: DynamoDB.DocumentClient.QueryInput = {
TableName: NEWS_TABLE_NAME,
KeyConditionExpression,
ExpressionAttributeNames: { "#Source": "Source" },
ExpressionAttributeValues,
ProjectionExpression,
Limit: Math.min(Number(Limit), Number(MAX_LIMIT)),
ScanIndexForward: false,
};
if (ExclusiveStartKey) {
newsParams.ExclusiveStartKey = JSON.parse(atob(ExclusiveStartKey));
}
let newsResult: DynamoDB.DocumentClient.QueryOutput;
let sourcesResult: DynamoDB.DocumentClient.QueryOutput;
try {
newsResult = await docClient.query(newsParams).promise();
sourcesResult = await docClient
.scan({ TableName: SOURCES_TABLE_NAME })
.promise();
} catch (e) {
return {
statusCode: 500,
body: e,
};
}
// Hash the sources for embedding
const sources: { [id: string]: Partial<NewsSource> } = {};
const sourceItems = sourcesResult.Items ?? [];
sourceItems.forEach((item) => {
const source: NewsSource = item as NewsSource;
const { Id, Name, Avatar } = source;
sources[Id] = {
Name,
// Add base url to avatar
Avatar: `${MEDIA_URL}/${Avatar}`,
};
});
// Map the output
const newsItems = newsResult.Items ?? [];
newsResult.Items = newsItems.map((item) => {
const news = item as News;
// Embed the source
if (news.Source) {
news.Source = sources[Source];
}
// Truncate the summary
if (news.Summary) {
news.Summary = `${news.Summary.slice(0, Number(SUMMARY_LENGTH))} ...`;
}
// Add base url to image
if (news.Image) {
news.Image = `${MEDIA_URL}/${news.Image}`;
}
return news;
});
return {
statusCode: 200,
body: JSON.stringify(newsResult),
};
};
|
<reponame>FOCONIS/ebean<gh_stars>1-10
package io.ebean.bean;
/**
* Holds information on mutable values (like plain beans stored as json).
* <p>
* Used internally in EntityBeanIntercept for dirty detection on mutable values.
* Typically dirty detection is based on a hash/checksum of json content or the
* original json content itself.
* <p>
* Refer to the mapping options {@code @DbJson(dirtyDetection)} and {@code @DbJson(keepSource)}.
*/
public interface MutableValueInfo {
/**
* Compares the given json returning null if deemed unchanged or returning
* the MutableValueNext to use if deemed dirty/changed.
* <p>
* Returning MutableValueNext allows an implementation based on hash/checksum
* to only perform that computation once.
*
* @return Null if deemed unchanged or the MutableValueNext if deemed changed.
*/
MutableValueNext nextDirty(String json);
/**
* Compares the given object to an internal value.
* <p>
* This is used to support changelog/beanState. The implementation can serialize the
* object into json form and compare it against the original json.
*/
boolean isEqualToObject(Object obj);
/**
* Creates a new instance from the internal json string.
* <p>
* This is used to provide an original/old value for change logging / persist listeners.
* This is only available for properties that have {@code @DbJson(keepSource=true)}.
*/
default Object get() {
return null;
}
}
|
def simplify(numerator, denominator):
# find the gcd (greatest common divisor)
gcd = gcd(numerator, denominator)
# divide both numerator and denominator by the gcd
simplified_numerator = int(numerator/gcd)
simplified_denominator = int(denominator/gcd)
return simplified_numerator, simplified_denominator |
#include <immintrin.h>
__m256i compareVectors(__m256i a, __m256i b) {
__m256i greaterThanMask = _mm256_cmpgt_epi64(a, b);
return greaterThanMask;
}
int main() {
__m256i vectorA = _mm256_set_epi64x(10, 20, 30, 40);
__m256i vectorB = _mm256_set_epi64x(25, 15, 35, 45);
__m256i result = compareVectors(vectorA, vectorB);
long long int* resArray = (long long int*)&result;
printf("Comparison result: %lld %lld %lld %lld\n", resArray[3], resArray[2], resArray[1], resArray[0]);
return 0;
} |
const average = (arr) => {
let sum = 0;
arr.forEach(num => {
sum += num;
});
return sum/arr.length;
};
console.log(average([2, 4, 6, 8])); // Prints 5 |
#! /bin/bash
go run . -region=sh -zone=sh001 -deploy.env=dev -logtostderr=true |
#!/bin/sh -e
#
# Copyright (c) 2009-2015 Robert Nelson <robertcnelson@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
ARCH=$(uname -m)
DIR=$PWD
. ${DIR}/system.sh
#For:
#toolchain
. ${DIR}/version.sh
dl_gcc_generic () {
WGET="wget -c --directory-prefix=${DIR}/dl/"
if [ ! -f ${DIR}/dl/${directory}/${datestamp} ] ; then
echo "Installing: ${toolchain_name}"
echo "-----------------------------"
${WGET} ${site}/${version}/${filename}
if [ -d ${DIR}/dl/${directory} ] ; then
rm -rf ${DIR}/dl/${directory} || true
fi
tar -xf ${DIR}/dl/${filename} -C ${DIR}/dl/
if [ -f ${DIR}/dl/${directory}/${binary}gcc ] ; then
touch ${DIR}/dl/${directory}/${datestamp}
fi
fi
if [ "x${ARCH}" = "xarmv7l" ] ; then
#using native gcc
CC=
else
CC="${DIR}/dl/${directory}/${binary}"
fi
}
gcc_toolchain () {
site="https://releases.linaro.org/archive"
case "${toolchain}" in
gcc_linaro_eabi_4_8)
#
#https://releases.linaro.org/14.04/components/toolchain/binaries/gcc-linaro-arm-none-eabi-4.8-2014.04_linux.tar.xz
#
gcc_version="4.8"
release="2014.04"
toolchain_name="gcc-linaro-arm-none-eabi"
version="14.04/components/toolchain/binaries"
directory="${toolchain_name}-${gcc_version}-${release}_linux"
filename="${directory}.tar.xz"
datestamp="${release}-${toolchain_name}"
binary="bin/arm-none-eabi-"
;;
gcc_linaro_eabi_4_9_i686)
#
#https://releases.linaro.org/14.09/components/toolchain/binaries/gcc-linaro-arm-none-eabi-4.9-2014.09_linux.tar.xz
#
gcc_version="4.9"
release="2014.09"
toolchain_name="gcc-linaro-arm-none-eabi"
version="14.09/components/toolchain/binaries"
directory="${toolchain_name}-${gcc_version}-${release}_linux"
filename="${directory}.tar.xz"
datestamp="${release}-${toolchain_name}"
binary="bin/arm-none-eabi-"
;;
gcc_linaro_eabi_4_9)
#
#https://releases.linaro.org/14.11/components/toolchain/binaries/arm-none-eabi/gcc-linaro-4.9-2014.11-x86_64_arm-eabi.tar.xz
#
gcc_version="4.9"
release="14.11"
target="arm-none-eabi"
version="${release}/components/toolchain/binaries/${target}"
filename="gcc-linaro-${gcc_version}-20${release}-x86_64_arm-eabi.tar.xz"
directory="gcc-linaro-${gcc_version}-20${release}-x86_64_arm-eabi"
datestamp="${gcc_version}-20${release}-${target}"
binary="bin/${target}-"
;;
gcc_linaro_gnueabi_4_6)
#
#https://releases.linaro.org/12.03/components/toolchain/binaries/gcc-linaro-arm-linux-gnueabi-2012.03-20120326_linux.tar.bz2
#
release="2012.03"
toolchain_name="gcc-linaro-arm-linux-gnueabi"
version="12.03/components/toolchain/binaries"
version_date="20120326"
directory="${toolchain_name}-${release}-${version_date}_linux"
filename="${directory}.tar.bz2"
datestamp="${version_date}-${toolchain_name}"
binary="bin/arm-linux-gnueabi-"
;;
gcc_linaro_gnueabihf_4_7)
#
#https://releases.linaro.org/13.04/components/toolchain/binaries/gcc-linaro-arm-linux-gnueabihf-4.7-2013.04-20130415_linux.tar.xz
#
gcc_version="4.7"
release="2013.04"
toolchain_name="gcc-linaro-arm-linux-gnueabihf"
version="13.04/components/toolchain/binaries"
version_date="20130415"
directory="${toolchain_name}-${gcc_version}-${release}-${version_date}_linux"
filename="${directory}.tar.xz"
datestamp="${version_date}-${toolchain_name}"
binary="bin/arm-linux-gnueabihf-"
;;
gcc_linaro_gnueabihf_4_8)
#
#https://releases.linaro.org/14.04/components/toolchain/binaries/gcc-linaro-arm-linux-gnueabihf-4.8-2014.04_linux.tar.xz
#
gcc_version="4.8"
release="2014.04"
toolchain_name="gcc-linaro-arm-linux-gnueabihf"
version="14.04/components/toolchain/binaries"
directory="${toolchain_name}-${gcc_version}-${release}_linux"
filename="${directory}.tar.xz"
datestamp="${release}-${toolchain_name}"
binary="bin/arm-linux-gnueabihf-"
;;
gcc_linaro_gnueabihf_4_9_i686)
#
#https://releases.linaro.org/14.09/components/toolchain/binaries/gcc-linaro-arm-linux-gnueabihf-4.9-2014.09_linux.tar.xz
#
gcc_version="4.9"
release="2014.09"
toolchain_name="gcc-linaro-arm-linux-gnueabihf"
version="14.09/components/toolchain/binaries"
directory="${toolchain_name}-${gcc_version}-${release}_linux"
filename="${directory}.tar.xz"
datestamp="${release}-${toolchain_name}"
binary="bin/arm-linux-gnueabihf-"
;;
gcc_linaro_gnueabihf_4_9)
#
#https://releases.linaro.org/14.11/components/toolchain/binaries/arm-linux-gnueabihf/gcc-linaro-4.9-2014.11-x86_64_arm-linux-gnueabihf.tar.xz
#
gcc_version="4.9"
release="14.11"
target="arm-linux-gnueabihf"
version="${release}/components/toolchain/binaries/${target}"
filename="gcc-linaro-${gcc_version}-20${release}-x86_64_${target}.tar.xz"
directory="gcc-linaro-${gcc_version}-20${release}-x86_64_${target}"
datestamp="${gcc_version}-20${release}-${target}"
binary="bin/${target}-"
;;
*)
echo "bug: maintainer forgot to set:"
echo "toolchain=\"xzy\" in version.sh"
exit 1
;;
esac
dl_gcc_generic
}
if [ "x${CC}" = "x" ] && [ "x${ARCH}" != "xarmv7l" ] ; then
if [ "x${ARCH}" = "xi686" ] ; then
echo ""
echo "Warning: 32bit is no longer supported by linaro..."
if [ "x${toolchain}" = "xgcc_linaro_eabi_4_9" ] ; then
echo ""
echo "Warning: 32bit is no longer supported by linaro, using old 14.09 gcc-4.9 release..."
echo ""
toolchain="gcc_linaro_eabi_4_9_i686"
fi
if [ "x${toolchain}" = "xgcc_linaro_gnueabihf_4_9" ] ; then
echo ""
echo "Warning: 32bit is no longer supported by linaro, using old 14.09 gcc-4.9 release..."
echo ""
toolchain="gcc_linaro_gnueabihf_4_9_i686"
fi
fi
gcc_toolchain
fi
GCC_TEST=$(LC_ALL=C ${CC}gcc -v 2>&1 | grep "Target:" | grep arm || true)
if [ "x${GCC_TEST}" = "x" ] ; then
echo "-----------------------------"
echo "scripts/gcc: Error: The GCC ARM Cross Compiler you setup in system.sh (CC variable) is invalid."
echo "-----------------------------"
gcc_toolchain
fi
echo "-----------------------------"
echo "scripts/gcc: Using: `LC_ALL=C ${CC}gcc --version`"
echo "-----------------------------"
echo "CC=${CC}" > ${DIR}/.CC
|
<gh_stars>1-10
class User < ActiveRecord::Base
has_many :restores
acts_as_authentic
end
|
<filename>src/main/java/frc/robot/commands/moveArm.java<gh_stars>0
package frc.robot.commands;
import edu.wpi.first.wpilibj.command.Command;
import edu.wpi.first.wpilibj.DigitalInput;
import frc.robot.OI;
import frc.robot.subsystems.Arm;
public class moveArm extends Command {
private OI oi;
private Arm arm;
DigitalInput armLimitSwitch = new DigitalInput(0);
//Getting OI and Drivetrain instances to run commands with.
public moveArm(){
oi = OI.getInstance();
arm = Arm.getInstance();
requires(arm);
}
//Execute runs when the robot is active.
// Here we call the setSpeed function from the Drivetrain Class, using values from the OI class's Joysticks.
public void execute(){
if(oi.getLeftButton(1)) {
arm.setSpeed(-0.25);
} else if (oi.getLeftButton(2) && armLimitSwitch.get()) {
arm.setSpeed(0.25);
}
else {
arm.setSpeed(0);
}
}
//We never want this command to stop, so we return false for the abstract isFinished method.
@Override
protected boolean isFinished() {
return false;
}
}
|
<filename>client/src/settings.js
export default {
colors: {
vuetify_themes: {
light: {
background: "#FFFFFF",
header: "#FFFFFF",
footer: "#F1F1F1",
primary: "#1967C0",
secondary: "#424242",
accent: "#82B1FF",
success: "#4CAF50",
info: "#1967C0",
warning: "#FF550A",
error: "#FF0000",
soft_primary: "#e8f0fe",
soft_secondary: "#F7F8F9",
wizard_navigator: "#F7F7F7",
in_progress_status__bg: "#e8f0fe",
in_progress_status__color: "#333333",
in_execution_status__bg: "#e5e5e5",
in_execution_status__color: "#333333",
error_status__bg: "#ffcacb",
error_status__color: "#a00000",
completed_status__bg: "#c1feaf",
completed_status__color: "#333333"
}
}
},
google_fonts_url:
"https://fonts.googleapis.com/css2?family=Nunito+Sans:ital,wght@0,400;0,600;0,700;1,400;1,600;1,700&display=swap",
google_fonts_name: "Nunito Sans"
};
|
class Customer:
def __init__(self, name, phone_number):
self.name = name
self.phone_number = phone_number
def get_name(self):
return self.name
def get_phone_number(self):
return self.phone_number |
#include "Includes.hpp"
void* __cdecl operator new(size_t size, POOL_TYPE pool, ULONG tag)
{
PVOID newAddress;
newAddress = ExAllocatePoolWithTag(pool, size, tag);
//
// Remove remenants from previous use.
//
if (newAddress)
{
memset(newAddress, 0, size);
}
return newAddress;
}
void __cdecl operator delete(void* p, unsigned __int64)
{
ExFreePool(p);
}
PLIST_PROCESS CSysList::ProcessHead;
PLIST_PROCESS CSysList::ProcessLast;
PLIST_THREAD CSysList::ThreadHead;
PLIST_THREAD CSysList::ThreadLast;
PLIST_MODULE CSysList::ModuleHead;
PLIST_MODULE CSysList::ModuleLast;
PLIST_BLACKLIST CSysList::BlacklistHead;
PLIST_BLACKLIST CSysList::BlacklistLast;
PLIST_DRIVER CSysList::DriverHead;
PLIST_DRIVER CSysList::DriverLast;
CSysList::CSysList()
{
CSysList::ProcessHead = nullptr;
CSysList::ProcessLast = nullptr;
CSysList::ThreadHead = nullptr;
CSysList::ThreadLast = nullptr;
CSysList::ModuleHead = nullptr;
CSysList::ModuleLast = nullptr;
CSysList::BlacklistHead = nullptr;
CSysList::BlacklistLast = nullptr;
CSysList::DriverHead = nullptr;
CSysList::DriverLast = nullptr;
}
CSysList::~CSysList()
{
CSysList::Erease(ID::BLACKLIST);
}
void* CSysList::Alloc(size_t size, POOL_TYPE pool, ULONG Tag)
{
return ExAllocatePoolWithTag(pool, size, Tag);
}
void CSysList::Free(void* p, ULONG Tag)
{
ExFreePoolWithTag(p, Tag);
}
void CSysList::Insert(void* pEntry, USHORT ID)
{
if (ID == ID::PROCESS)
{
auto tmpProc = (PLIST_PROCESS)CSysList::Alloc(sizeof(LIST_PROCESS), NonPagedPool, TAG::PROCESS);
if (!tmpProc)
return;
RtlCopyMemory(tmpProc, pEntry, sizeof(LIST_PROCESS));
if (!CSysList::ProcessHead)
{
tmpProc->Next = nullptr;
CSysList::ProcessHead = tmpProc;
CSysList::ProcessLast = tmpProc;
return;
}
tmpProc->Next = nullptr;
tmpProc->Last = CSysList::ProcessLast;
CSysList::ProcessLast->Next = tmpProc;
CSysList::ProcessLast = tmpProc;
}
else if (ID == ID::DRIVER)
{
auto tmpDriver = (PLIST_DRIVER)CSysList::Alloc(sizeof(LIST_DRIVER), NonPagedPool, TAG::DRIVER);
if (!tmpDriver)
return;
RtlCopyMemory(tmpDriver, pEntry, sizeof(LIST_DRIVER));
if (!CSysList::DriverHead)
{
tmpDriver->Next = nullptr;
CSysList::DriverHead = tmpDriver;
CSysList::DriverLast = tmpDriver;
return;
}
tmpDriver->Next = nullptr;
tmpDriver->Last = CSysList::DriverLast;
CSysList::DriverLast->Next = tmpDriver;
CSysList::DriverLast = tmpDriver;
}
else if (ID == ID::BLACKLIST)
{
auto tmpBlacklist = (PLIST_BLACKLIST)CSysList::Alloc(sizeof(LIST_BLACKLIST), NonPagedPool, TAG::BLACKLIST);
if (!tmpBlacklist)
return;
RtlCopyMemory(tmpBlacklist, pEntry, sizeof(LIST_BLACKLIST));
if (!CSysList::BlacklistHead)
{
tmpBlacklist->Next = nullptr;
CSysList::BlacklistHead = tmpBlacklist;
CSysList::BlacklistLast = tmpBlacklist;
return;
}
tmpBlacklist->Next = nullptr;
tmpBlacklist->Last = BlacklistLast;
CSysList::BlacklistLast->Next = tmpBlacklist;
CSysList::BlacklistLast = tmpBlacklist;
}
else if(ID == ID::THREAD)
{
auto tmpTh = (PLIST_THREAD)CSysList::Alloc(sizeof(LIST_THREAD), NonPagedPool, TAG::THREAD);
if (!tmpTh)
return;
RtlCopyMemory(tmpTh, pEntry, sizeof(LIST_THREAD));
if (!CSysList::ThreadHead)
{
tmpTh->Next = nullptr;
CSysList::ThreadHead = tmpTh;
CSysList::ThreadLast = tmpTh;
return;
}
tmpTh->Next = nullptr;
tmpTh->Last = ThreadLast;
CSysList::ThreadLast->Next = tmpTh;
CSysList::ThreadLast = tmpTh;
}
else
{
auto tmpMod = (PLIST_MODULE)CSysList::Alloc(sizeof(LIST_MODULE), NonPagedPool, TAG::MODULE);
if (!tmpMod)
return;
RtlCopyMemory(tmpMod, pEntry, sizeof(LIST_MODULE));
if (!CSysList::ModuleHead)
{
tmpMod->Next = nullptr;
CSysList::ModuleHead = tmpMod;
CSysList::ModuleLast = tmpMod;
return;
}
tmpMod->Next = nullptr;
tmpMod->Last = ModuleLast;
CSysList::ModuleLast->Next = tmpMod;
CSysList::ModuleLast = tmpMod;
}
}
void CSysList::Erease(USHORT ID)
{
if (!CSysList::GetHead(ID) || !CSysList::GetLast(ID))
return;
if (ID == ID::PROCESS)
{
auto currProcEntry = CSysList::ProcessHead;
while (currProcEntry)
{
auto nextEntry = currProcEntry->Next;
CSysList::Free(currProcEntry, TAG::PROCESS);
currProcEntry = nextEntry;
}
}
else if (ID == ID::DRIVER)
{
auto currDriverEntry = CSysList::DriverHead;
while (currDriverEntry)
{
auto nextEntry = currDriverEntry->Next;
CSysList::Free(currDriverEntry, TAG::DRIVER);
currDriverEntry = nextEntry;
}
}
else if (ID == ID::BLACKLIST)
{
auto currBlacklistEntry = CSysList::BlacklistHead;
while (currBlacklistEntry)
{
auto nextEntry = currBlacklistEntry->Next;
CSysList::Free(currBlacklistEntry, TAG::BLACKLIST);
currBlacklistEntry = nextEntry;
}
}
else if(ID == ID::THREAD)
{
auto currThreadEntry = CSysList::ThreadHead;
while (currThreadEntry)
{
auto nextEntry = currThreadEntry->Next;
CSysList::Free(currThreadEntry, TAG::THREAD);
currThreadEntry = nextEntry;
}
}
else
{
auto currModEntry = CSysList::ModuleHead;
while (currModEntry)
{
auto nextEntry = currModEntry->Next;
CSysList::Free(currModEntry, TAG::MODULE);
currModEntry = nextEntry;
}
}
CSysList::SetHead(ID, nullptr);
CSysList::SetLast(ID, nullptr);
}
ULONG CSysList::GetCount(USHORT ID)
{
ULONG ListCount = 0;
if (!CSysList::GetHead(ID))
return ListCount;
if (ID == ID::PROCESS)
{
auto currProcEntry = CSysList::ProcessHead;
while (currProcEntry)
{
ListCount++;
currProcEntry = currProcEntry->Next;
}
}
else if (ID == DRIVER)
{
auto currDriverEntry = CSysList::DriverHead;
while (currDriverEntry)
{
ListCount++;
currDriverEntry = currDriverEntry->Next;
}
}
else if (ID == BLACKLIST)
{
auto currBlacklistEntry = CSysList::BlacklistHead;
while (currBlacklistEntry)
{
ListCount++;
currBlacklistEntry = currBlacklistEntry->Next;
}
}
else if(ID == THREAD)
{
auto currThreadEntry = CSysList::ThreadHead;
while (currThreadEntry)
{
ListCount++;
currThreadEntry = currThreadEntry->Next;
}
}
else
{
auto currModEntry = CSysList::ModuleHead;
while (currModEntry)
{
ListCount++;
currModEntry = currModEntry->Next;
}
}
return ListCount;
}
void* CSysList::Get(ULONG index, USHORT ID)
{
void* foundEntry = nullptr;
if (!CSysList::GetHead(ID))
return foundEntry;
if (ID == ID::PROCESS)
{
auto currProcEntry = CSysList::ProcessHead;
for (ULONG i = 0; currProcEntry; i++)
{
if (i == index)
{
foundEntry = currProcEntry;
break;
}
currProcEntry = currProcEntry->Next;
}
}
else if (ID == ID::DRIVER)
{
auto currDriverEntry = CSysList::DriverHead;
for (ULONG i = 0; currDriverEntry; i++)
{
if (i == index)
{
foundEntry = currDriverEntry;
break;
}
currDriverEntry = currDriverEntry->Next;
}
}
else if(ID == ID::THREAD)
{
auto currThreadEntry = CSysList::ThreadHead;
for (ULONG i = 0; currThreadEntry; i++)
{
if (i == index)
{
foundEntry = currThreadEntry;
break;
}
currThreadEntry = currThreadEntry->Next;
}
}
else
{
auto currModEntry = CSysList::ModuleHead;
for (ULONG i = 0; currModEntry; i++)
{
if (i == index)
{
foundEntry = currModEntry;
break;
}
currModEntry = currModEntry->Next;
}
}
return foundEntry;
}
void CSysList::Remove(void* Entry, USHORT ID)
{
if (!CSysList::GetHead(ID))
return;
if (CSysList::GetHead(ID) == CSysList::GetLast(ID))
{
CSysList::Erease(ID);
return;
}
if (ID == ID::PROCESS)
{
auto ProcID = *(PHANDLE)Entry;
auto currProcEntry = CSysList::ProcessHead;
while (currProcEntry)
{
if (currProcEntry->PID == ProcID)
{
auto NodeLast = currProcEntry->Last;
auto NodeNext = currProcEntry->Next;
if (!NodeNext)
{
NodeLast->Next = nullptr;
CSysList::ProcessLast = NodeLast;
}
else if (!NodeLast)
{
NodeNext->Last = nullptr;
CSysList::ProcessHead = NodeNext;
}
else
{
NodeLast->Next = NodeNext;
NodeNext->Last = NodeLast;
}
CSysList::Free(currProcEntry, TAG::PROCESS);
currProcEntry = nullptr;
break;
}
currProcEntry = currProcEntry->Next;
}
}
else if (ID == ID::DRIVER)
{
auto drvBase = (uintptr_t)Entry;
auto currDriverEntry = CSysList::DriverHead;
while (currDriverEntry)
{
if (currDriverEntry->ImageBase == drvBase)
{
auto NodeLast = currDriverEntry->Last;
auto NodeNext = currDriverEntry->Next;
if (!NodeNext)
{
NodeLast->Next = nullptr;
CSysList::DriverLast = NodeLast;
}
else if (!NodeLast)
{
NodeNext->Last = nullptr;
CSysList::DriverHead = NodeNext;
}
else
{
NodeLast->Next = NodeNext;
NodeNext->Last = NodeLast;
}
CSysList::Free(currDriverEntry, TAG::DRIVER);
currDriverEntry = nullptr;
break;
}
currDriverEntry = currDriverEntry->Next;
}
}
else if (ID == ID::BLACKLIST)
{
auto wName = (wchar_t*)Entry;
auto currBlacklistEntry = CSysList::BlacklistHead;
while (currBlacklistEntry)
{
if (wcscmp(wName, currBlacklistEntry->wName) == 0)
{
auto NodeLast = currBlacklistEntry->Last;
auto NodeNext = currBlacklistEntry->Next;
if (!NodeNext)
{
NodeLast->Next = nullptr;
CSysList::BlacklistLast = NodeLast;
}
else if (!NodeLast)
{
NodeNext->Last = nullptr;
CSysList::BlacklistHead = NodeNext;
}
else
{
NodeLast->Next = NodeNext;
NodeNext->Last = NodeLast;
}
CSysList::Free(currBlacklistEntry, TAG::BLACKLIST);
currBlacklistEntry = nullptr;
break;
}
currBlacklistEntry = currBlacklistEntry->Next;
}
}
else if(ID == THREAD)
{
auto ThreadID = *(PHANDLE)Entry;
auto currThreadEntry = CSysList::ThreadHead;
while (currThreadEntry)
{
if (currThreadEntry->TID == ThreadID)
{
auto NodeLast = currThreadEntry->Last;
auto NodeNext = currThreadEntry->Next;
if (!NodeNext)
{
NodeLast->Next = nullptr;
CSysList::ThreadLast = NodeLast;
}
else if (!NodeLast)
{
NodeNext->Last = nullptr;
CSysList::ThreadHead = NodeNext;
}
else
{
NodeLast->Next = NodeNext;
NodeNext->Last = NodeLast;
}
CSysList::Free(currThreadEntry, TAG::THREAD);
currThreadEntry = nullptr;
break;
}
currThreadEntry = currThreadEntry->Next;
}
}
else
{
auto modBase = (uintptr_t)Entry;
auto currModEntry = CSysList::ModuleHead;
while (currModEntry)
{
if (currModEntry->DllBase == modBase)
{
auto NodeLast = currModEntry->Last;
auto NodeNext = currModEntry->Next;
if (!NodeNext)
{
NodeLast->Next = nullptr;
CSysList::ModuleLast = NodeLast;
}
else if (!NodeLast)
{
NodeNext->Last = nullptr;
CSysList::ModuleHead = NodeNext;
}
else
{
NodeLast->Next = NodeNext;
NodeNext->Last = NodeLast;
}
CSysList::Free(currModEntry, TAG::MODULE);
currModEntry = nullptr;
break;
}
currModEntry = currModEntry->Next;
}
}
}
void* CSysList::GetHead(USHORT ID)
{
switch (ID)
{
case ID::BLACKLIST:
return CSysList::BlacklistHead;
case ID::DRIVER:
return CSysList::DriverHead;
case ID::MODULE:
return CSysList::ModuleHead;
case ID::THREAD:
return CSysList::ThreadHead;
case ID::PROCESS:
return CSysList::ProcessHead;
default:
return nullptr;
}
}
void* CSysList::GetLast(USHORT ID)
{
switch (ID)
{
case ID::BLACKLIST:
return CSysList::BlacklistLast;
case ID::DRIVER:
return CSysList::DriverLast;
case ID::MODULE:
return CSysList::ModuleLast;
case ID::THREAD:
return CSysList::ThreadLast;
case ID::PROCESS:
return CSysList::ProcessLast;
default:
return nullptr;
}
}
template <typename T>
void CSysList::SetHead(USHORT ID, T p)
{
switch (ID)
{
case ID::BLACKLIST:
CSysList::BlacklistHead = p;
break;
case ID::DRIVER:
CSysList::DriverHead = p;
break;
case ID::MODULE:
CSysList::ModuleHead = p;
break;
case ID::THREAD:
CSysList::ThreadHead = p;
break;
case ID::PROCESS:
CSysList::ProcessHead = p;
break;
default:
break;
}
}
template <typename X>
void CSysList::SetLast(USHORT ID, X p)
{
switch (ID)
{
case ID::BLACKLIST:
CSysList::BlacklistLast = p;
break;
case ID::DRIVER:
CSysList::DriverLast = p;
break;
case ID::MODULE:
CSysList::ModuleLast = p;
break;
case ID::THREAD:
CSysList::ThreadLast = p;
break;
case ID::PROCESS:
CSysList::ProcessLast = p;
break;
default:
break;
}
}
|
var stgr;
stgr = stgr || {};
stgr.updateView = (function() {
'use strict';
var beforeUpdate, trackGA, update, _computePageTitle, _registerEventListeners, _removeBodyClasses, _updateBodyClasses, _updateCurrentPage;
beforeUpdate = function(request) {};
update = function(type) {
var currentPage;
if (type == null) {
type = 'root';
}
currentPage = stgr.model[type];
_removeBodyClasses();
_updateBodyClasses('addClass', [type]);
_updateCurrentPage(type);
stgr.cache.$title.add(stgr.cache.$h1).text(_computePageTitle(type));
stgr.cache.$dynamicContainer.html(stgr.template.primaryTemplate({
data: stgr.model,
currentType: type,
currentPage: currentPage
}));
return _registerEventListeners();
};
trackGA = function(req) {
return _gaq.push(['_trackPageview', req]);
};
_updateBodyClasses = function(method, classesArr) {
return stgr.cache.$body[method](classesArr.join(' '));
};
_removeBodyClasses = function() {
if (stgr.model.settings.currentPage) {
return _updateBodyClasses('removeClass', [stgr.model.settings.currentPage.type]);
}
};
_updateCurrentPage = function(type) {
return stgr.model.settings.currentPage = _.extend(stgr.model.settings.currentPage || {}, {
type: type
});
};
_computePageTitle = function(type) {
return 'stgr';
};
_registerEventListeners = function() {
return $('.accordian-unit').on('click', function(e) {
e.stopPropagation();
if (!$(e.target).is('a')) {
return $(this).toggleClass('expanded');
}
});
};
return {
beforeUpdate: beforeUpdate,
update: update,
trackGA: trackGA
};
})();
|
package com.dg.examples.restclientdemo;
import android.app.Activity;
import android.os.Bundle;
import android.widget.TextView;
import android.widget.Toast;
import com.dg.examples.restclientdemo.communication.GoogleService;
import com.dg.examples.restclientdemo.communication.requests.PatchRequest;
import com.dg.examples.restclientdemo.domain.ResponseModel;
import com.dg.libs.rest.callbacks.HttpCallback;
import com.dg.libs.rest.domain.ResponseStatus;
public class MainActivity extends Activity {
private TextView textViewResponse;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
textViewResponse = (TextView) findViewById(R.id.textViewResponse);
GoogleService.getGoogleBlogsRequest(getApplicationContext(), "Official Google Blogs", new GoogleBlogsCallback())
.executeAsync();
new PatchRequest("Hello", new HttpCallback<Void>() {
@Override
public void onSuccess(Void responseData, ResponseStatus responseStatus) {
Toast.makeText(getApplicationContext(), "Success patch", Toast.LENGTH_LONG).show();
}
@Override
public void onHttpError(ResponseStatus responseStatus) {
Toast.makeText(getApplicationContext(), "FAIL patch", Toast.LENGTH_LONG).show();
}
}).executeAsync();
}
private final class GoogleBlogsCallback implements HttpCallback<ResponseModel> {
@Override
public void onSuccess(ResponseModel responseData, ResponseStatus status) {
textViewResponse.setText(responseData.toString());
}
@Override
public void onHttpError(ResponseStatus responseCode) {
Toast.makeText(getApplicationContext(),
responseCode.getStatusCode() + " " + responseCode.getStatusMessage(),
Toast.LENGTH_LONG).show();
}
}
}
|
// query the element
const elem = document.querySelector('#elem');
// set initial animation
const animation1 = elem.animate([
{transform: 'scale(1)'},
{transform: 'scale(1.5)'},
], {
duration: 500,
fill: 'forwards'
});
// set delay
animation1.addEventListener('finish', () => {
setTimeout(() => {
// set another animation
elem.animate([
{transform: 'scale(1.5)'},
{transform: 'scale(2)'},
], {
duration: 500,
fill: 'forwards'
});
}, 500);
}); |
#!/usr/bin/with-contenv bash
# ==============================================================================
# Community Hass.io Add-ons: SSH & Web Terminal
# Configures the SSH daemon
# ==============================================================================
# shellcheck disable=SC1091
source /usr/lib/hassio-addons/base.sh
readonly SSH_CONFIG_PATH=/etc/ssh/sshd_config
declare username
declare port
# Don't execute this when SSH is disabled
if hass.config.false 'ssh.enable'; then
exit 0
fi
# Port
port=$(hass.config.get 'ssh.port')
if hass.config.has_value 'ssh.port'; then
sed -i "s/Port\\ .*/Port\\ ${port}/" "${SSH_CONFIG_PATH}" \
|| hass.die 'Failed configuring port'
else
sed -i "s/Port\\ .*/Port\\ 22/" "${SSH_CONFIG_PATH}" \
|| hass.die 'Failed configuring port'
fi
# SFTP access
if hass.config.true 'ssh.sftp'; then
sed -i '/Subsystem sftp/s/^#//g' "${SSH_CONFIG_PATH}"
hass.log.notice 'SFTP access is enabled'
fi
# Allow specified user to log in
username=$(hass.config.get 'ssh.username')
username=$(hass.string.lower "${username}")
if [[ "${username}" != "root" ]]; then
sed -i "s/AllowUsers\\ .*/AllowUsers\\ ${username}/" "${SSH_CONFIG_PATH}" \
|| hass.die 'Failed opening SSH for the configured user'
else
sed -i "s/PermitRootLogin\\ .*/PermitRootLogin\\ yes/" "${SSH_CONFIG_PATH}" \
|| hass.die 'Failed opening SSH for the root user'
fi
# Enable password authentication when password is set
if hass.config.has_value 'ssh.password'; then
sed -i "s/PasswordAuthentication.*/PasswordAuthentication\\ yes/" \
"${SSH_CONFIG_PATH}" \
|| hass.die 'Failed to setup SSH password authentication'
fi
# This enabled less strict ciphers, macs, and keyx.
if hass.config.true 'ssh.compatibility_mode'; then
sed -i '/Ciphers\ .*/s/^/#/g' "${SSH_CONFIG_PATH}"
sed -i '/MACs\ .*/s/^/#/g' "${SSH_CONFIG_PATH}"
sed -i '/KexAlgorithms\.* /s/^/#/g' "${SSH_CONFIG_PATH}"
hass.log.notice 'SSH is running in compatibility mode!'
hass.log.warning 'Compatibility mode is less secure!'
hass.log.warning 'Please only enable it when you know what you are doing!'
fi
# Enable Agent forwarding
if hass.config.true 'ssh.allow_agent_forwarding'; then
sed -i "s/AllowAgentForwarding.*/AllowAgentForwarding\\ yes/" \
"${SSH_CONFIG_PATH}" \
|| hass.die 'Failed to setup SSH Agent Forwarding'
fi
# Allow remote port forwarding
if hass.config.true 'ssh.allow_remote_port_forwarding'; then
sed -i "s/GatewayPorts.*/GatewayPorts\\ yes/" \
"${SSH_CONFIG_PATH}" \
|| hass.die 'Failed to setup remote port forwarding'
fi
# Allow TCP forewarding
if hass.config.true 'ssh.allow_tcp_forwarding'; then
sed -i "s/AllowTcpForwarding.*/AllowTcpForwarding\\ yes/" \
"${SSH_CONFIG_PATH}" \
|| hass.die 'Failed to setup SSH TCP Forwarding'
fi
|
<reponame>NIRALUser/BatchMake
// Id
/***************************************************************
* FLU - FLTK Utility Widgets
* Copyright (C) 2002 Ohio Supercomputer Center, Ohio State University
*
* This file and its content is protected by a software license.
* You should have received a copy of this license with this file.
* If not, please contact the Ohio Supercomputer Center immediately:
* Attn: <NAME>: FLU 1224 Kinnear Rd, Columbus, Ohio 43212
*
***************************************************************/
#ifndef _FLU_COMBO_TREE_H
#define _FLU_COMBO_TREE_H
#include "FLU/Flu_Combo_Box.h"
#include "FLU/Flu_Tree_Browser.h"
//! Just like the Fl_Choice widget except the input area is editable and it can display a tree instead of a list (using Flu_Tree_Browser)
class FLU_EXPORT Flu_Combo_Tree : public Flu_Combo_Box
{
public:
//! Normal FLTK widget constructor
Flu_Combo_Tree( int x, int y, int w, int h, const char *l = 0 );
//! Default destructor
~Flu_Combo_Tree();
//! Publicly exposed tree widget (instance of Flu_Tree_Browser)
Flu_Tree_Browser tree;
protected:
bool _value( const char *v );
const char* _next();
const char* _previous();
void _hilight( int x, int y );
inline static void _cb( Fl_Widget *w, void *arg )
{ ((Flu_Combo_Tree*)arg)->cb(); }
void cb();
};
#endif
|
<gh_stars>1-10
var Service, Characteristic, DoorState // set in the module.exports, from homebridge
var pfio = require("piface-node-12")
pfio.init()
module.exports = function(homebridge) {
Service = homebridge.hap.Service
Characteristic = homebridge.hap.Characteristic
DoorState = homebridge.hap.Characteristic.CurrentDoorState
homebridge.registerAccessory("homebridge-garage-piface", "GaragePiFace", GaragePiFaceAccessory)
}
function GaragePiFaceAccessory(log, config) {
this.log = log
this.version = require('./package.json').version
log("GaragePiFaceAccessory version " + this.version)
this.name = config.name
this.doorSwitchOutput = config.switchOutput
this.relayOn = config.switchValue || 1
this.relayOff = 1-this.relayOn //opposite of relayOn (O/1)
this.doorSwitchPressTimeInMs = config.switchPressTimeInMs || 1000
this.closedDoorSensorInput = config.closedSensorInput
this.openDoorSensorInput = config.openSensorInput
this.sensorPollInMs = config.pollInMs || 4000
this.doorOpensInSeconds = config.opensInSeconds || 10
this.closedDoorSensorValue = config.closedSensorValue
this.openDoorSensorValue = config.openSensorValue
log("Switch Output: " + this.doorSwitchOutput)
log("Switch Val: " + (this.relayOn == 1 ? "ACTIVE_HIGH" : "ACTIVE_LOW"))
log("Switch Active Time in ms: " + this.doorSwitchPressTimeInMs)
if (this.hasClosedSensor()) {
log("Closed Sensor: Configured")
log(" Closed Sensor Input: " + this.closedDoorSensorInput)
log(" Closed Sensor Val: " + (this.closedDoorSensorValue == 1 ? "ACTIVE_HIGH" : "ACTIVE_LOW"))
} else {
log("Closed Sensor: Not Configured")
}
if(this.hasOpenSensor()) {
log("Open Sensor: Configured")
log(" Open Sensor Input: " + this.openDoorSensorInput)
log(" Open Sensor Val: " + (this.openDoorSensorValue == 1 ? "ACTIVE_HIGH" : "ACTIVE_LOW"))
} else {
log("Open Sensor: Not Configured")
}
if (!this.hasClosedSensor() && !this.hasOpenSensor()) {
this.wasClosed = true //Set a valid initial state
log("NOTE: Neither Open nor Closed sensor is configured. Will be unable to determine what state the garage is in, and will rely on last known state.")
}
log("Sensor Poll in ms: " + this.sensorPollInMs)
log("Opens in seconds: " + this.doorOpensInSeconds)
this.initService()
}
GaragePiFaceAccessory.prototype = {
determineCurrentDoorState: function() {
if (this.isClosed()) {
return DoorState.CLOSED
} else if (this.hasOpenSensor()) {
return this.isOpen() ? DoorState.OPEN : DoorState.STOPPED
} else {
return DoorState.OPEN
}
},
doorStateToString: function(state) {
switch (state) {
case DoorState.OPEN:
return "OPEN"
case DoorState.CLOSED:
return "CLOSED"
case DoorState.STOPPED:
return "STOPPED"
default:
return "UNKNOWN"
}
},
monitorDoorState: function() {
var isClosed = this.isClosed()
var isOpen = this.isOpen()
if (isClosed != this.wasClosed) {
var state = this.determineCurrentDoorState()
if (!this.operating) {
this.log("State changed to " + this.doorStateToString(state))
this.wasClosed = isClosed
this.currentDoorState.setValue(state)
this.targetState = state
}
}
setTimeout(this.monitorDoorState.bind(this), this.sensorPollInMs)
},
hasOpenSensor: function() {
return this.openDoorSensorInput != null
},
hasClosedSensor: function() {
return this.closedDoorSensorInput != null
},
initService: function() {
this.garageDoorOpener = new Service.GarageDoorOpener(this.name,this.name)
this.currentDoorState = this.garageDoorOpener.getCharacteristic(DoorState)
this.currentDoorState.on('get', this.getState.bind(this))
this.targetDoorState = this.garageDoorOpener.getCharacteristic(Characteristic.TargetDoorState)
this.targetDoorState.on('set', this.setState.bind(this))
this.targetDoorState.on('get', this.getTargetState.bind(this))
var isClosed = this.isClosed()
this.wasClosed = isClosed
this.operating = false
this.infoService = new Service.AccessoryInformation()
this.infoService
.setCharacteristic(Characteristic.Manufacturer, "Opensource Community")
.setCharacteristic(Characteristic.Model, "RaspPi PiFace GarageDoor")
.setCharacteristic(Characteristic.SerialNumber, this.version)
if (this.hasOpenSensor() || this.hasClosedSensor()) {
this.log(this.name + " have a sensor, monitoring state enabled.")
setTimeout(this.monitorDoorState.bind(this), this.sensorPollInMs)
}
this.log("Initial State: " + (isClosed ? "CLOSED" : "OPEN"))
this.currentDoorState.setValue(isClosed ? DoorState.CLOSED : DoorState.OPEN)
this.targetDoorState.setValue(isClosed ? DoorState.CLOSED : DoorState.OPEN)
},
getTargetState: function(callback) {
callback(null, this.targetState)
},
readPin: function(pin) {
return pfio.digital_read(pin)
},
writePin: function(pin,val) {
pfio.digital_write(pin, val)
},
isClosed: function() {
if (this.hasClosedSensor()) {
return this.readPin(this.closedDoorSensorInput) == this.closedDoorSensorValue
} else if (this.hasOpenSensor()) {
return !this.isOpen()
} else {
return this.wasClosed
}
},
isOpen: function() {
if (this.hasOpenSensor()) {
return this.readPin(this.openDoorSensorInput) == this.openDoorSensorValue
} else if (this.hasClosedSensor()) {
return !this.isClosed()
} else {
return !this.wasClosed
}
},
switchOn: function() {
this.writePin(this.doorSwitchOutput, this.relayOn)
this.log("Turning on " + this.name + " (Relay " + this.doorSwitchOutput + ") = " + this.relayOn)
setTimeout(this.switchOff.bind(this), this.doorSwitchPressTimeInMs)
},
switchOff: function() {
this.writePin(this.doorSwitchOutput, this.relayOff)
this.log("Turning off " + this.name + " (Relay " + this.doorSwitchOutput + ") = " + this.relayOff)
},
setFinalDoorState: function() {
if (!this.hasClosedSensor() && !this.hasOpenSensor()) {
var isClosed = !this.isClosed()
var isOpen = this.isClosed()
} else {
var isClosed = this.isClosed()
var isOpen = this.isOpen()
}
if ( (this.targetState == DoorState.CLOSED && !isClosed) || (this.targetState == DoorState.OPEN && !isOpen) ) {
this.log("Was trying to " + (this.targetState == DoorState.CLOSED ? "CLOSE" : "OPEN") + " " + this.name + " , but it is still " + (isClosed ? "CLOSED":"OPEN"))
this.currentDoorState.setValue(DoorState.STOPPED)
} else {
this.log("Set current state to " + (this.targetState == DoorState.CLOSED ? "CLOSED" : "OPEN"))
this.wasClosed = this.targetState == DoorState.CLOSED
this.currentDoorState.setValue(this.targetState)
}
this.operating = false
},
setState: function(state, callback) {
this.log("Setting state to " + state)
this.targetState = state
var isClosed = this.isClosed()
if ((state == DoorState.OPEN && isClosed) || (state == DoorState.CLOSED && !isClosed)) {
this.log("Triggering Relay")
this.operating = true
if (state == DoorState.OPEN) {
this.currentDoorState.setValue(DoorState.OPENING)
} else {
this.currentDoorState.setValue(DoorState.CLOSING)
}
setTimeout(this.setFinalDoorState.bind(this), this.doorOpensInSeconds * 1000)
this.switchOn()
}
callback()
return true
},
getState: function(callback) {
var isClosed = this.isClosed()
var isOpen = this.isOpen()
var state = isClosed ? DoorState.CLOSED : isOpen ? DoorState.OPEN : DoorState.STOPPED
this.log(this.name + (isClosed ? " is CLOSED ("+DoorState.CLOSED+")" : isOpen ? " is OPEN ("+DoorState.OPEN+")" : " is STOPPED (" + DoorState.STOPPED + ")"))
callback(null, state)
},
getServices: function() {
return [this.infoService, this.garageDoorOpener]
}
}
|
#!/bin/bash
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
if [ -e "$SCRIPT_DIR/requirements.txt" ];then
pip install -r "$SCRIPT_DIR/requirements.txt"
fi
install "$SCRIPT_DIR/syscall_searcher.py" "$HOME/.local/bin"
install "$SCRIPT_DIR/syscall_sigs.sh" "$HOME/.local/bin"
|
// This file is part of SWGANH which is released under the MIT license.
// See file LICENSE or go to http://swganh.com/LICENSE
#pragma once
#include <exception>
#include <functional>
#include <memory>
#include <unordered_map>
#include <boost/noncopyable.hpp>
#ifdef WIN32
#include <concurrent_unordered_map.h>
#else
#include <tbb/concurrent_unordered_map.h>
namespace Concurrency {
using ::tbb::concurrent_unordered_map;
}
#endif
#include "swganh/observer/observer_interface.h"
#include "swganh_core/messages/obj_controller_message.h"
#include "swganh_core/messages/controllers/show_fly_text.h"
namespace swganh {
namespace connection {
class ConnectionClientInterface;
}}
namespace swganh {
namespace messages {
class OutOfBand;
}}
namespace swganh {
namespace object {
class Object;
class ObjectControllerInterface : public swganh::observer::ObserverInterface, public std::enable_shared_from_this<ObjectControllerInterface>
{
public:
typedef std::runtime_error InvalidControllerMessage;
public:
ObjectControllerInterface(
std::shared_ptr<Object> object,
std::shared_ptr<swganh::connection::ConnectionClientInterface> client);
~ObjectControllerInterface();
uint64_t GetId() const;
/**
* @return Handle to the object this controller manages.
*/
std::shared_ptr<Object> GetObject() const;
/**
* @return Handle to the remote client controlling the object.
*/
std::shared_ptr<swganh::connection::ConnectionClientInterface> GetRemoteClient();
/**
* Sets handle to the remote client controlling the object.
*
* @param remote_client the new remote client for this controller.
*/
void SetRemoteClient(std::shared_ptr<swganh::connection::ConnectionClientInterface> remote_client);
using swganh::observer::ObserverInterface::Notify;
/**
* Notifies the controller when the object has been updated.
*
* @param message The message to be delivered to the remote client.
*/
void Notify(swganh::messages::BaseSwgMessage* message);
private:
ObjectControllerInterface();
std::shared_ptr<Object> object_;
std::shared_ptr<swganh::connection::ConnectionClientInterface> client_;
};
}} // namespace swganh::object |
static void insertionSort(int[] arr) {
int n = arr.Length;
for (int i = 1; i < n; ++i)
{
int key = arr[i];
int j = i - 1;
/* Move elements of arr[0..i-1], that are
greater than key, to one position ahead
of their current position */
while (j >= 0 && arr[j] > key)
{
arr[j + 1] = arr[j];
j = j - 1;
}
arr[j + 1] = key;
}
}
int[] arr = {2, 4, 1, 6, 8};
insertionSort(arr);
for (int i = 0; i < arr.Length; i++) {
Console.Write(arr[i] + " ");
}
// Output: 1 2 4 6 8 |
#!/usr/bin/env bash
# local version: 1.2.0.0
@test "1" {
#[[ $BATS_RUN_SKIPPED == true ]] || skip
run bash grains.sh 1
[[ $status -eq 0 ]]
[[ $output == "1" ]]
}
@test "2" {
[[ $BATS_RUN_SKIPPED == true ]] || skip
run bash grains.sh 2
[[ $status -eq 0 ]]
[[ $output == "2" ]]
}
@test "3" {
[[ $BATS_RUN_SKIPPED == true ]] || skip
run bash grains.sh 3
[[ $status -eq 0 ]]
[[ $output == "4" ]]
}
@test "4" {
[[ $BATS_RUN_SKIPPED == true ]] || skip
run bash grains.sh 4
[[ $status -eq 0 ]]
[[ $output == "8" ]]
}
@test "16" {
[[ $BATS_RUN_SKIPPED == true ]] || skip
run bash grains.sh 16
[[ $status -eq 0 ]]
[[ $output == "32768" ]]
}
@test "32" {
[[ $BATS_RUN_SKIPPED == true ]] || skip
run bash grains.sh 32
[[ $status -eq 0 ]]
[[ $output == "2147483648" ]]
}
@test "64" {
[[ $BATS_RUN_SKIPPED == true ]] || skip
run bash grains.sh 64
[[ $status -eq 0 ]]
[[ $output == "9223372036854775808" ]]
}
@test "square 0 raises an exception" {
[[ $BATS_RUN_SKIPPED == true ]] || skip
run bash grains.sh 0
[[ $status -eq 1 ]]
[[ $output == "Error: invalid input" ]]
}
@test "negative square raises an exception" {
[[ $BATS_RUN_SKIPPED == true ]] || skip
run bash grains.sh -1
[[ $status -eq 1 ]]
[[ $output == "Error: invalid input" ]]
}
@test "square greater than 64 raises an exception" {
[[ $BATS_RUN_SKIPPED == true ]] || skip
run bash grains.sh 65
[[ $status -eq 1 ]]
[[ $output == "Error: invalid input" ]]
}
@test "returns the total number of grains on the board" {
[[ $BATS_RUN_SKIPPED == true ]] || skip
run bash grains.sh total
[[ $status -eq 0 ]]
[[ $output == "18446744073709551615" ]]
}
|
/* Copyright 2021 freecodeformat.com */
package com.littlejenny.gulimall.order.to.paypal.create;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
/* Time: 2021-08-28 20:19:7 @author freecodeformat.com @website http://www.freecodeformat.com/json2javabean.php */
@Data
public class Transactions {
@JsonProperty("related_resources")
private List<String> relatedResources;
private Amount amount;
private String description;
} |
'use strict';
function toObj(module) {
var obj = {
uid: module.uid,
type: module.type
};
if (module.sourceModuleCount > 0) {
var children = [];
for (var i = 0; i < module.sourceModuleCount; i++) {
children[i] = module.sourceModules[i].uid;
}
obj.children = children;
}
switch (module.type) {
// Generators
case 'Constant':
obj.value = module.value;
break;
case 'Echo':
obj.arg = module.arg;
break;
case 'Simplex':
obj.seed = module.seed;
break;
case 'Voronoi':
obj.seed = module.seed;
obj.meanPoints = module.meanPoints;
break;
// Modifiers
case 'Clamp':
obj.min = module.min;
obj.max = module.max;
break;
case 'Exponent':
obj.exponent = module.exponent;
break;
case 'FBM':
obj.octaves = module.octaves;
obj.persistence = module.persistence;
obj.lacunarity = module.lacunarity;
break;
case 'ScaleBias':
obj.scale = module.scale;
obj.bias = module.bias;
break;
// Selectors
case 'Blend':
// TODO
// obj.ease = module.ease;
break;
case 'Select':
obj.threshold = module.threshold;
obj.edgeFalloff = module.edgeFalloff;
// TODO
// obj.ease = module.ease;
break;
// Transformers
case 'ScalePoint':
obj.scaleX = module.scaleX;
obj.scaleY = module.scaleY;
obj.scaleZ = module.scaleZ;
break;
case 'TranslatePoint':
obj.transX = module.transX;
obj.transY = module.transY;
obj.transZ = module.transZ;
break;
}
return obj;
}
function toObjArray(module) {
var objs = [];
objs.push(toObj(module));
module.sourceModules.forEach((child) => {
toObjArray(child).forEach((childObj) => {
objs.push(childObj);
});
});
return objs;
}
function serialize(module, mkTree) {
if (!mkTree) {
return JSON.stringify(toObj(module));
} else {
return JSON.stringify({
root: module.uid,
collection: toObjArray(module)
});
}
}
module.exports = serialize;
|
import React from 'react'
import reactCSS from 'reactcss'
import { Eyedropper } from '../common'
import ColorizeIcon from './ColorizeIcon'
export const ChromeEyedropper = (props) => {
const styles = reactCSS({
'default': {
wrap: {
position: 'relative',
display: 'flex',
justifyContent: 'flex-start',
alignItems: 'center',
width: '32px',
height: '32px',
cursor: 'pointer',
},
icon: {
width: '18px',
height: '18px',
fill: '#364364',
},
},
})
return (
<div className="eyedropper-wrap" style={ styles.wrap }>
<ColorizeIcon style={ styles.icon } />
<Eyedropper rootElement={ props.rootElement } useScreenCaptureAPI={ props.useScreenCaptureAPI } onChange={ props.onChange } />
</div>
)
}
export default ChromeEyedropper
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies the dSYM of a vendored framework
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DWARF_DSYM_FOLDER_PATH}"
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/BiometricAuthenticator/BiometricAuthenticator.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/BiometricAuthenticator/BiometricAuthenticator.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
def statistics(data):
# Compute min, mean, max of the data
min_val = min(data)
max_val = max(data)
mean_val = sum(data)/len(data)
# Print the result
print("Minimum:", min_val)
print("Maximum:", max_val)
print("Mean:", mean_val)
data = [11, 4, 8, 10, 5, 9, 6]
statistics(data)
Output:
Minimum: 4
Maximum: 11
Mean: 8.142857142857142 |
<filename>offer/src/main/java/com/java/study/zuo/vedio/basic/chapter2/ShortestEnd.java
package com.java.study.zuo.vedio.basic.chapter2;
/**
* <Description>
* 给定一个字符串,往字符串后添加字符编程str2,要求整体回文,且最短
*
* @author hushiye
* @since 2020-08-20 22:43
*/
public class ShortestEnd {
public static char[] manacherString(String str) {
char[] charArr = str.toCharArray();
char[] res = new char[str.length() * 2 + 1];
int index = 0;
for (int i = 0; i != res.length; i++) {
res[i] = (i & 1) == 0 ? '#' : charArr[index++];
}
return res;
}
public static String shortestEnd(String str) {
if (str == null || str.length() == 0) {
return null;
}
char[] charArr = manacherString(str);
int[] pArr = new int[charArr.length];
int index = -1;
int pR = -1;
int maxContainsEnd = -1;
for (int i = 0; i != charArr.length; i++) {
pArr[i] = pR > i ? Math.min(pArr[2 * index - i], pR - i) : 1;
while (i + pArr[i] < charArr.length && i - pArr[i] > -1) {
if (charArr[i + pArr[i]] == charArr[i - pArr[i]])
pArr[i]++;
else {
break;
}
}
if (i + pArr[i] > pR) {
pR = i + pArr[i];
index = i;
}
if (pR == charArr.length) {
maxContainsEnd = pArr[i];
break;
}
}
char[] res = new char[str.length() - maxContainsEnd + 1];
for (int i = 0; i < res.length; i++) {
res[res.length - 1 - i] = charArr[i * 2 + 1];
}
return String.valueOf(res);
}
public static void main(String[] args) {
String str2 = "abba";
System.out.println(shortestEnd(str2));
}
//
//
//
// public static String shortestEnd(String str) {
// if (str == null) {
// return null;
// }
//
// char[] manacherArray = getManacherArray(str);
// int maxCenterIndex = searchMaxCenterIndex(manacherArray);
// int leftIndex = 2 * maxCenterIndex - (manacherArray.length - 1);
// for (int i = leftIndex; i >= 0; i--) {
// if ((i & 1) != 0) {
// str += manacherArray[i];
// }
// }
// return str;
// }
//
// /**
// * @param manacherArray
// * @return
// */
// private static int searchMaxCenterIndex(char[] manacherArray) {
// int[] pArr = new int[manacherArray.length];
// int pR = -1;
// int cIndex = -1;
// int max = Integer.MIN_VALUE;
// for (int i = 0; i < manacherArray.length && pR < manacherArray.length; i++) {
// pArr[i] = pR > i ? Math.min(pArr[2 * cIndex - i], pR - i) : 1;
//
// while (i + pArr[i] < manacherArray.length && i - pArr[i] > -1) {
// if (manacherArray[i + pArr[i]] == manacherArray[i - pArr[i]]) {
// pArr[i]++;
// } else {
// break;
// }
// }
//
// if (i + pArr[i] > pR) {
// pR = i + pArr[i];
// cIndex = i;
// }
// max = Math.max(max, pArr[i] / 2);
// }
//
// return cIndex;
// }
//
// private static char[] getManacherArray(String str) {
// char[] array = str.toCharArray();
// char[] manacherArray = new char[(str.length() << 1) + 1];
// int index = 0;
// for (int i = 0; i < manacherArray.length; i++) {
// manacherArray[i] = (i & 1) == 0 ? '$' : array[index++];
// }
// return manacherArray;
// }
//
// public static char[] manacherString(String str) {
// char[] charArr = str.toCharArray();
// char[] res = new char[str.length() * 2 + 1];
// int index = 0;
// for (int i = 0; i != res.length; i++) {
// res[i] = (i & 1) == 0 ? '#' : charArr[index++];
// }
// return res;
// }
//
// public static String shortestEnd2(String str) {
// if (str == null || str.length() == 0) {
// return null;
// }
// char[] charArr = manacherString(str);
// int[] pArr = new int[charArr.length];
// int index = -1;
// int pR = -1;
// int maxContainsEnd = -1;
// for (int i = 0; i != charArr.length; i++) {
// pArr[i] = pR > i ? Math.min(pArr[2 * index - i], pR - i) : 1;
// while (i + pArr[i] < charArr.length && i - pArr[i] > -1) {
// if (charArr[i + pArr[i]] == charArr[i - pArr[i]])
// pArr[i]++;
// else {
// break;
// }
// }
// if (i + pArr[i] > pR) {
// pR = i + pArr[i];
// index = i;
// }
// if (pR == charArr.length) {
// //获取最大的回文半径
// maxContainsEnd = pArr[i];
// break;
// }
// }
// char[] res = new char[str.length() - maxContainsEnd + 1];
// for (int i = 0; i < res.length; i++) {
// res[res.length - 1 - i] = charArr[i * 2 + 1];
// }
// return String.valueOf(res);
// }
//
// public static void main(String[] args) {
// String str2 = "BABA";
// System.out.println(shortestEnd2(str2));
//
// }
// public static void main(String[] args) {
// String str = "BDABBA";
// System.out.println(shortestEnd(str));
// }
}
|
# sliderdemo.py
# Demo of the slider control courtesy of <NAME>.
import win32con, win32ui
from pywin.mfc import dialog
class MyDialog(dialog.Dialog):
'''
Example using simple controls
'''
_dialogstyle = (win32con.WS_MINIMIZEBOX | win32con.WS_DLGFRAME |
win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE |
win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT )
_buttonstyle = (win32con.BS_PUSHBUTTON | win32con.WS_TABSTOP |
win32con.WS_CHILD | win32con.WS_VISIBLE)
### The static template, contains all "normal" dialog items
DIALOGTEMPLATE = [
# the dialog itself is the first element in the template
["Example slider", (0, 0, 50, 43), _dialogstyle, None, (8, "MS SansSerif")],
# rest of elements are the controls within the dialog
# standard "Close" button
[128, "Close", win32con.IDCANCEL, (0, 30, 50, 13), _buttonstyle], ]
### ID of the control to be created during dialog initialisation
IDC_SLIDER = 9500
def __init__(self ):
dialog.Dialog.__init__(self, self.DIALOGTEMPLATE)
def OnInitDialog(self):
rc = dialog.Dialog.OnInitDialog(self)
# now initialise your controls that you want to create
# programmatically, including those which are OLE controls
# those created directly by win32ui.Create*
# and your "custom controls" which are subclasses/whatever
win32ui.EnableControlContainer()
self.slider = win32ui.CreateSliderCtrl( )
self.slider.CreateWindow( win32con.WS_TABSTOP | win32con.WS_VISIBLE,
(0,0,100,30),
self._obj_,
self.IDC_SLIDER)
self.HookMessage(self.OnSliderMove, win32con.WM_HSCROLL)
return rc
def OnSliderMove(self, params):
print("Slider moved")
def OnCancel(self):
print("The slider control is at position", self.slider.GetPos())
self._obj_.OnCancel()
###
def demo():
dia = MyDialog()
dia.DoModal()
if __name__ == "__main__":
demo() |
#!/bin/bash
file=/etc/resolv.conf
while IFS= read -r line # IFS : inter field separator
do
# echo line is stored in $line
echo $line
done < "$file"
|
script_dir=$(dirname "$(readlink -f "$0")")
export KB_DEPLOYMENT_CONFIG=$script_dir/../deploy.cfg
WD=/kb/module/work
if [ -f $WD/token ]; then
cat $WD/token | xargs sh $script_dir/../bin/run_AtomicRegulonInference_async_job.sh $WD/input.json $WD/output.json
else
echo "File $WD/token doesn't exist, aborting."
exit 1
fi
|
<reponame>Fronikuniu/LavarcBT-portfolio
import { useForm } from 'react-hook-form';
import { Link } from 'react-router-dom';
import { BsFacebook } from 'react-icons/bs';
import { FcGoogle } from 'react-icons/fc';
import { signInWithEmailAndPassword } from 'firebase/auth';
import { useState } from 'react';
import AuthImages from '../helpers/AuthImages';
import { LoginData, LoginErrors } from '../../types';
import { logInWithFacebook, logInWithGoogle } from './loginProviders';
import { auth } from '../configuration/firebase';
import { UseUpdateDoc } from '../hooks/useManageDoc';
import loginErrors from '../helpers/loginErrors';
function Login() {
const [loginError, setLoginError] = useState('');
const {
register,
handleSubmit,
formState: { errors },
} = useForm<LoginData>();
const onSubmit = (loginData: LoginData) =>
signInWithEmailAndPassword(auth, loginData.email, loginData.password)
.then(async (userCredential) => {
const { user } = userCredential;
await UseUpdateDoc('users', [user.uid], { isOnline: true });
})
.catch(({ code }: { code: keyof LoginErrors }) => {
const errorCode = code;
setLoginError(loginErrors[errorCode]);
});
return (
<div className="container">
<div className="login">
<AuthImages />
<div className="auth__form">
<div className="auth__form-login">
<h1>Login</h1>
<p>Welcome again! We hope you will stay with us for longer!</p>
<div className="auth__form-login__socials">
<BsFacebook onClick={logInWithFacebook} role="button" />
<FcGoogle onClick={logInWithGoogle} role="button" />
</div>
<div className="horizontal-or" />
<form onSubmit={handleSubmit(onSubmit)}>
<label htmlFor="login">
Email
<input
type="email"
id="email"
className={
errors.email?.type === 'required' || errors.email?.type === 'pattern'
? 'input-error'
: ''
}
placeholder={errors.email?.type === 'required' ? 'Email is required!' : 'Email'}
autoComplete="email"
{...register('email', { required: true, pattern: /^\S+@\S+$/i })}
/>
</label>
<label htmlFor="password">
Password
<input
type="password"
id="password"
className={
errors.password?.type === 'required' || errors.password?.type === 'minLength'
? 'input-error'
: ''
}
placeholder={
errors.password?.type === 'required' ? 'Password is required!' : 'Password'
}
autoComplete="current-password"
{...register('password', { required: true, minLength: 6 })}
/>
</label>
{loginError ? <p className="par-error">{loginError}</p> : ''}
<p>
Have you forgotten your password? <Link to="/reset-password">Click!</Link>
</p>
<input type="submit" value="Sign up!" />
</form>
<p>
{" Don't have an account?"} <Link to="/auth/register">Sign up.</Link>
</p>
</div>
</div>
</div>
</div>
);
}
export default Login;
|
export enum KernelProfilingInfoMask {
None = 0,
CmdExecTime = 0x00000001,
} |
# % escapes expanded in prompts
setopt prompt_percent
# Allow $ expansion in prompts
setopt prompt_subst
# Initialize the prompt
autoload -U promptinit
promptinit
|
<gh_stars>0
import React, { Component } from "react";
import CardList from "./components/CardList";
///Implement card class
class App extends Component {
constructor(props) {
super(props);
this.state = {
json: null
};
}
componentDidMount() {
let endPoint = "https://api.weather.gov/gridpoints/MLB/25,69/forecast";
fetch(endPoint)
.then(response => response.json())
.then(data => {
//Once the promise resolves and we have the data we can now set the apps new state.
this.setState({
json: data
});
});
}
render() {
if (this.state.json) {
let list = (
<CardList data={this.state.json.properties.periods}></CardList>
);
return list;
} else {
//When the json is null don't render the list.
return <div></div>;
}
}
}
export default App;
|
# Generated by Powerlevel10k configuration wizard on 2021-02-27 at 01:22 CET.
# Based on romkatv/powerlevel10k/config/p10k-rainbow.zsh, checksum 59290.
# Wizard options: ascii, rainbow, 24h time, 2 lines, solid, lightest-ornaments, sparse,
# fluent, transient_prompt, instant_prompt=verbose.
# Type `p10k configure` to generate another config.
#
# Config for Powerlevel10k with powerline prompt style with colorful background.
# Type `p10k configure` to generate your own config based on it.
#
# Tip: Looking for a nice color? Here's a one-liner to print colormap.
#
# for i in {0..255}; do print -Pn "%K{$i} %k%F{$i}${(l:3::0:)i}%f " ${${(M)$((i%6)):#3}:+$'\n'}; done
# Temporarily change options.
'builtin' 'local' '-a' 'p10k_config_opts'
[[ ! -o 'aliases' ]] || p10k_config_opts+=('aliases')
[[ ! -o 'sh_glob' ]] || p10k_config_opts+=('sh_glob')
[[ ! -o 'no_brace_expand' ]] || p10k_config_opts+=('no_brace_expand')
'builtin' 'setopt' 'no_aliases' 'no_sh_glob' 'brace_expand'
() {
emulate -L zsh -o extended_glob
# Unset all configuration options. This allows you to apply configuration changes without
# restarting zsh. Edit ~/.p10k.zsh and type `source ~/.p10k.zsh`.
unset -m '(POWERLEVEL9K_*|DEFAULT_USER)~POWERLEVEL9K_GITSTATUS_DIR'
# Zsh >= 5.1 is required.
autoload -Uz is-at-least && is-at-least 5.1 || return
# The list of segments shown on the left. Fill it with the most important segments.
typeset -g POWERLEVEL9K_LEFT_PROMPT_ELEMENTS=(
# =========================[ Line #1 ]=========================
# os_icon # os identifier
anaconda # conda environment (https://conda.io/)
context # user@hostname
dir # current directory
vcs # git status
# =========================[ Line #2 ]=========================
newline # \n
prompt_char # prompt symbol
)
# The list of segments shown on the right. Fill it with less important segments.
# Right prompt on the last prompt line (where you are typing your commands) gets
# automatically hidden when the input line reaches it. Right prompt above the
# last prompt line gets hidden if it would overlap with left prompt.
typeset -g POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS=(
# =========================[ Line #1 ]=========================
status # exit code of the last command
command_execution_time # duration of the last command
background_jobs # presence of background jobs
direnv # direnv status (https://direnv.net/)
asdf # asdf version manager (https://github.com/asdf-vm/asdf)
virtualenv # python virtual environment (https://docs.python.org/3/library/venv.html)
# anaconda # conda environment (https://conda.io/)
pyenv # python environment (https://github.com/pyenv/pyenv)
goenv # go environment (https://github.com/syndbg/goenv)
nodenv # node.js version from nodenv (https://github.com/nodenv/nodenv)
nvm # node.js version from nvm (https://github.com/nvm-sh/nvm)
nodeenv # node.js environment (https://github.com/ekalinin/nodeenv)
# node_version # node.js version
# go_version # go version (https://golang.org)
# rust_version # rustc version (https://www.rust-lang.org)
# dotnet_version # .NET version (https://dotnet.microsoft.com)
# php_version # php version (https://www.php.net/)
# laravel_version # laravel php framework version (https://laravel.com/)
# java_version # java version (https://www.java.com/)
# package # name@version from package.json (https://docs.npmjs.com/files/package.json)
rbenv # ruby version from rbenv (https://github.com/rbenv/rbenv)
rvm # ruby version from rvm (https://rvm.io)
fvm # flutter version management (https://github.com/leoafarias/fvm)
luaenv # lua version from luaenv (https://github.com/cehoffman/luaenv)
jenv # java version from jenv (https://github.com/jenv/jenv)
plenv # perl version from plenv (https://github.com/tokuhirom/plenv)
phpenv # php version from phpenv (https://github.com/phpenv/phpenv)
scalaenv # scala version from scalaenv (https://github.com/scalaenv/scalaenv)
haskell_stack # haskell version from stack (https://haskellstack.org/)
kubecontext # current kubernetes context (https://kubernetes.io/)
terraform # terraform workspace (https://www.terraform.io)
aws # aws profile (https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-profiles.html)
aws_eb_env # aws elastic beanstalk environment (https://aws.amazon.com/elasticbeanstalk/)
azure # azure account name (https://docs.microsoft.com/en-us/cli/azure)
gcloud # google cloud cli account and project (https://cloud.google.com/)
google_app_cred # google application credentials (https://cloud.google.com/docs/authentication/production)
#context # user@hostname
nordvpn # nordvpn connection status, linux only (https://nordvpn.com/)
ranger # ranger shell (https://github.com/ranger/ranger)
nnn # nnn shell (https://github.com/jarun/nnn)
vim_shell # vim shell indicator (:sh)
midnight_commander # midnight commander shell (https://midnight-commander.org/)
nix_shell # nix shell (https://nixos.org/nixos/nix-pills/developing-with-nix-shell.html)
# vi_mode # vi mode (you don't need this if you've enabled prompt_char)
# vpn_ip # virtual private network indicator
# load # CPU load
# disk_usage # disk usage
# ram # free RAM
# swap # used swap
todo # todo items (https://github.com/todotxt/todo.txt-cli)
timewarrior # timewarrior tracking status (https://timewarrior.net/)
taskwarrior # taskwarrior task count (https://taskwarrior.org/)
time # current time
# =========================[ Line #2 ]=========================
newline
ram # free RAM
# ip # ip address and bandwidth usage for a specified network interface
# public_ip # public IP address
# proxy # system-wide http/https/ftp proxy
battery # internal battery
# wifi # wifi speed
# example # example user-defined segment (see prompt_example function below)
)
# Defines character set used by powerlevel10k. It's best to let `p10k configure` set it for you.
typeset -g POWERLEVEL9K_MODE=ascii
# When set to `moderate`, some icons will have an extra space after them. This is meant to avoid
# icon overlap when using non-monospace fonts. When set to `none`, spaces are not added.
typeset -g POWERLEVEL9K_ICON_PADDING=none
# When set to true, icons appear before content on both sides of the prompt. When set
# to false, icons go after content. If empty or not set, icons go before content in the left
# prompt and after content in the right prompt.
#
# You can also override it for a specific segment:
#
# POWERLEVEL9K_STATUS_ICON_BEFORE_CONTENT=false
#
# Or for a specific segment in specific state:
#
# POWERLEVEL9K_DIR_NOT_WRITABLE_ICON_BEFORE_CONTENT=false
typeset -g POWERLEVEL9K_ICON_BEFORE_CONTENT=
# Add an empty line before each prompt.
typeset -g POWERLEVEL9K_PROMPT_ADD_NEWLINE=true
# Connect left prompt lines with these symbols. You'll probably want to use the same color
# as POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_FOREGROUND below.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_PREFIX=
typeset -g POWERLEVEL9K_MULTILINE_NEWLINE_PROMPT_PREFIX=
typeset -g POWERLEVEL9K_MULTILINE_LAST_PROMPT_PREFIX=
# Connect right prompt lines with these symbols.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_SUFFIX=
typeset -g POWERLEVEL9K_MULTILINE_NEWLINE_PROMPT_SUFFIX=
typeset -g POWERLEVEL9K_MULTILINE_LAST_PROMPT_SUFFIX=
# Filler between left and right prompt on the first prompt line. You can set it to ' ', '·' or
# '-'. The last two make it easier to see the alignment between left and right prompt and to
# separate prompt from command output. You might want to set POWERLEVEL9K_PROMPT_ADD_NEWLINE=false
# for more compact prompt if using using this option.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_CHAR='-'
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_BACKGROUND=
typeset -g POWERLEVEL9K_MULTILINE_NEWLINE_PROMPT_GAP_BACKGROUND=
if [[ $POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_CHAR != ' ' ]]; then
# The color of the filler. You'll probably want to match the color of POWERLEVEL9K_MULTILINE
# ornaments defined above.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_FOREGROUND=244
# Start filler from the edge of the screen if there are no left segments on the first line.
typeset -g POWERLEVEL9K_EMPTY_LINE_LEFT_PROMPT_FIRST_SEGMENT_END_SYMBOL='%{%}'
# End filler on the edge of the screen if there are no right segments on the first line.
typeset -g POWERLEVEL9K_EMPTY_LINE_RIGHT_PROMPT_FIRST_SEGMENT_START_SYMBOL='%{%}'
fi
# Separator between same-color segments on the left.
typeset -g POWERLEVEL9K_LEFT_SUBSEGMENT_SEPARATOR='|'
# Separator between same-color segments on the right.
typeset -g POWERLEVEL9K_RIGHT_SUBSEGMENT_SEPARATOR='|'
# Separator between different-color segments on the left.
typeset -g POWERLEVEL9K_LEFT_SEGMENT_SEPARATOR=''
# Separator between different-color segments on the right.
typeset -g POWERLEVEL9K_RIGHT_SEGMENT_SEPARATOR=''
# The right end of left prompt.
typeset -g POWERLEVEL9K_LEFT_PROMPT_LAST_SEGMENT_END_SYMBOL=''
# The left end of right prompt.
typeset -g POWERLEVEL9K_RIGHT_PROMPT_FIRST_SEGMENT_START_SYMBOL=''
# The left end of left prompt.
typeset -g POWERLEVEL9K_LEFT_PROMPT_FIRST_SEGMENT_START_SYMBOL=''
# The right end of right prompt.
typeset -g POWERLEVEL9K_RIGHT_PROMPT_LAST_SEGMENT_END_SYMBOL=''
# Left prompt terminator for lines without any segments.
typeset -g POWERLEVEL9K_EMPTY_LINE_LEFT_PROMPT_LAST_SEGMENT_END_SYMBOL=
#################################[ os_icon: os identifier ]##################################
# OS identifier color.
typeset -g POWERLEVEL9K_OS_ICON_FOREGROUND=232
typeset -g POWERLEVEL9K_OS_ICON_BACKGROUND=7
# Custom icon.
# typeset -g POWERLEVEL9K_OS_ICON_CONTENT_EXPANSION='⭐'
################################[ prompt_char: prompt symbol ]################################
# Transparent background.
typeset -g POWERLEVEL9K_PROMPT_CHAR_BACKGROUND=
# Green prompt symbol if the last command succeeded.
typeset -g POWERLEVEL9K_PROMPT_CHAR_OK_{VIINS,VICMD,VIVIS,VIOWR}_FOREGROUND=76
# Red prompt symbol if the last command failed.
typeset -g POWERLEVEL9K_PROMPT_CHAR_ERROR_{VIINS,VICMD,VIVIS,VIOWR}_FOREGROUND=196
# Default prompt symbol.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIINS_CONTENT_EXPANSION='>'
# Prompt symbol in command vi mode.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VICMD_CONTENT_EXPANSION='<'
# Prompt symbol in visual vi mode.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIVIS_CONTENT_EXPANSION='V'
# Prompt symbol in overwrite vi mode.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIOWR_CONTENT_EXPANSION='^'
typeset -g POWERLEVEL9K_PROMPT_CHAR_OVERWRITE_STATE=true
# No line terminator if prompt_char is the last segment.
typeset -g POWERLEVEL9K_PROMPT_CHAR_LEFT_PROMPT_LAST_SEGMENT_END_SYMBOL=
# No line introducer if prompt_char is the first segment.
typeset -g POWERLEVEL9K_PROMPT_CHAR_LEFT_PROMPT_FIRST_SEGMENT_START_SYMBOL=
# No surrounding whitespace.
typeset -g POWERLEVEL9K_PROMPT_CHAR_LEFT_{LEFT,RIGHT}_WHITESPACE=
##################################[ dir: current directory ]##################################
# Current directory background color.
typeset -g POWERLEVEL9K_DIR_BACKGROUND=4
# Default current directory foreground color.
typeset -g POWERLEVEL9K_DIR_FOREGROUND=254
# If directory is too long, shorten some of its segments to the shortest possible unique
# prefix. The shortened directory can be tab-completed to the original.
typeset -g POWERLEVEL9K_SHORTEN_STRATEGY=truncate_to_unique
# Replace removed segment suffixes with this symbol.
typeset -g POWERLEVEL9K_SHORTEN_DELIMITER=
# Color of the shortened directory segments.
typeset -g POWERLEVEL9K_DIR_SHORTENED_FOREGROUND=250
# Color of the anchor directory segments. Anchor segments are never shortened. The first
# segment is always an anchor.
typeset -g POWERLEVEL9K_DIR_ANCHOR_FOREGROUND=255
# Display anchor directory segments in bold.
typeset -g POWERLEVEL9K_DIR_ANCHOR_BOLD=true
# Don't shorten directories that contain any of these files. They are anchors.
local anchor_files=(
.bzr
.citc
.git
.hg
.node-version
.python-version
.go-version
.ruby-version
.lua-version
.java-version
.perl-version
.php-version
.tool-version
.shorten_folder_marker
.svn
.terraform
CVS
Cargo.toml
composer.json
go.mod
package.json
stack.yaml
)
typeset -g POWERLEVEL9K_SHORTEN_FOLDER_MARKER="(${(j:|:)anchor_files})"
# If set to "first" ("last"), remove everything before the first (last) subdirectory that contains
# files matching $POWERLEVEL9K_SHORTEN_FOLDER_MARKER. For example, when the current directory is
# /foo/bar/git_repo/nested_git_repo/baz, prompt will display git_repo/nested_git_repo/baz (first)
# or nested_git_repo/baz (last). This assumes that git_repo and nested_git_repo contain markers
# and other directories don't.
#
# Optionally, "first" and "last" can be followed by ":<offset>" where <offset> is an integer.
# This moves the truncation point to the right (positive offset) or to the left (negative offset)
# relative to the marker. Plain "first" and "last" are equivalent to "first:0" and "last:0"
# respectively.
typeset -g POWERLEVEL9K_DIR_TRUNCATE_BEFORE_MARKER=false
# Don't shorten this many last directory segments. They are anchors.
typeset -g POWERLEVEL9K_SHORTEN_DIR_LENGTH=1
# Shorten directory if it's longer than this even if there is space for it. The value can
# be either absolute (e.g., '80') or a percentage of terminal width (e.g, '50%'). If empty,
# directory will be shortened only when prompt doesn't fit or when other parameters demand it
# (see POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS and POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT below).
# If set to `0`, directory will always be shortened to its minimum length.
typeset -g POWERLEVEL9K_DIR_MAX_LENGTH=80
# When `dir` segment is on the last prompt line, try to shorten it enough to leave at least this
# many columns for typing commands.
typeset -g POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS=40
# When `dir` segment is on the last prompt line, try to shorten it enough to leave at least
# COLUMNS * POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT * 0.01 columns for typing commands.
typeset -g POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT=50
# If set to true, embed a hyperlink into the directory. Useful for quickly
# opening a directory in the file manager simply by clicking the link.
# Can also be handy when the directory is shortened, as it allows you to see
# the full directory that was used in previous commands.
typeset -g POWERLEVEL9K_DIR_HYPERLINK=false
# Enable special styling for non-writable and non-existent directories. See POWERLEVEL9K_LOCK_ICON
# and POWERLEVEL9K_DIR_CLASSES below.
typeset -g POWERLEVEL9K_DIR_SHOW_WRITABLE=v3
# The default icon shown next to non-writable and non-existent directories when
# POWERLEVEL9K_DIR_SHOW_WRITABLE is set to v3.
# typeset -g POWERLEVEL9K_LOCK_ICON='⭐'
# POWERLEVEL9K_DIR_CLASSES allows you to specify custom icons and colors for different
# directories. It must be an array with 3 * N elements. Each triplet consists of:
#
# 1. A pattern against which the current directory ($PWD) is matched. Matching is done with
# extended_glob option enabled.
# 2. Directory class for the purpose of styling.
# 3. An empty string.
#
# Triplets are tried in order. The first triplet whose pattern matches $PWD wins.
#
# If POWERLEVEL9K_DIR_SHOW_WRITABLE is set to v3, non-writable and non-existent directories
# acquire class suffix _NOT_WRITABLE and NON_EXISTENT respectively.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_DIR_CLASSES=(
# '~/work(|/*)' WORK ''
# '~(|/*)' HOME ''
# '*' DEFAULT '')
#
# Whenever the current directory is ~/work or a subdirectory of ~/work, it gets styled with one
# of the following classes depending on its writability and existence: WORK, WORK_NOT_WRITABLE or
# WORK_NON_EXISTENT.
#
# Simply assigning classes to directories doesn't have any visible effects. It merely gives you an
# option to define custom colors and icons for different directory classes.
#
# # Styling for WORK.
# typeset -g POWERLEVEL9K_DIR_WORK_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_DIR_WORK_BACKGROUND=4
# typeset -g POWERLEVEL9K_DIR_WORK_FOREGROUND=254
# typeset -g POWERLEVEL9K_DIR_WORK_SHORTENED_FOREGROUND=250
# typeset -g POWERLEVEL9K_DIR_WORK_ANCHOR_FOREGROUND=255
#
# # Styling for WORK_NOT_WRITABLE.
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_BACKGROUND=4
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_FOREGROUND=254
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_SHORTENED_FOREGROUND=250
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_ANCHOR_FOREGROUND=255
#
# # Styling for WORK_NON_EXISTENT.
# typeset -g POWERLEVEL9K_DIR_WORK_NON_EXISTENT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_DIR_WORK_NON_EXISTENT_BACKGROUND=4
# typeset -g POWERLEVEL9K_DIR_WORK_NON_EXISTENT_FOREGROUND=254
# typeset -g POWERLEVEL9K_DIR_WORK_NON_EXISTENT_SHORTENED_FOREGROUND=250
# typeset -g POWERLEVEL9K_DIR_WORK_NON_EXISTENT_ANCHOR_FOREGROUND=255
#
# If a styling parameter isn't explicitly defined for some class, it falls back to the classless
# parameter. For example, if POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_FOREGROUND is not set, it falls
# back to POWERLEVEL9K_DIR_FOREGROUND.
#
typeset -g POWERLEVEL9K_DIR_CLASSES=()
# Custom prefix.
# typeset -g POWERLEVEL9K_DIR_PREFIX='in '
#####################################[ vcs: git status ]######################################
# Version control system colors.
typeset -g POWERLEVEL9K_VCS_CLEAN_BACKGROUND=2
typeset -g POWERLEVEL9K_VCS_MODIFIED_BACKGROUND=3
typeset -g POWERLEVEL9K_VCS_UNTRACKED_BACKGROUND=2
typeset -g POWERLEVEL9K_VCS_CONFLICTED_BACKGROUND=3
typeset -g POWERLEVEL9K_VCS_LOADING_BACKGROUND=8
# Branch icon. Set this parameter to '\uF126 ' for the popular Powerline branch icon.
typeset -g POWERLEVEL9K_VCS_BRANCH_ICON=
# Untracked files icon. It's really a question mark, your font isn't broken.
# Change the value of this parameter to show a different icon.
typeset -g POWERLEVEL9K_VCS_UNTRACKED_ICON='?'
# Formatter for Git status.
#
# Example output: master <42>42 *42 merge ~42 +42 !42 ?42.
#
# You can edit the function to customize how Git status looks.
#
# VCS_STATUS_* parameters are set by gitstatus plugin. See reference:
# https://github.com/romkatv/gitstatus/blob/master/gitstatus.plugin.zsh.
function my_git_formatter() {
emulate -L zsh
if [[ -n $P9K_CONTENT ]]; then
# If P9K_CONTENT is not empty, use it. It's either "loading" or from vcs_info (not from
# gitstatus plugin). VCS_STATUS_* parameters are not available in this case.
typeset -g my_git_format=$P9K_CONTENT
return
fi
# Styling for different parts of Git status.
local meta='%7F' # white foreground
local clean='%0F' # black foreground
local modified='%0F' # black foreground
local untracked='%0F' # black foreground
local conflicted='%1F' # red foreground
local res
if [[ -n $VCS_STATUS_LOCAL_BRANCH ]]; then
local branch=${(V)VCS_STATUS_LOCAL_BRANCH}
# If local branch name is at most 32 characters long, show it in full.
# Otherwise show the first 12 .. the last 12.
# Tip: To always show local branch name in full without truncation, delete the next line.
(( $#branch > 32 )) && branch[13,-13]=".." # <-- this line
res+="${clean}${(g::)POWERLEVEL9K_VCS_BRANCH_ICON}${branch//\%/%%}"
fi
if [[ -n $VCS_STATUS_TAG
# Show tag only if not on a branch.
# Tip: To always show tag, delete the next line.
&& -z $VCS_STATUS_LOCAL_BRANCH # <-- this line
]]; then
local tag=${(V)VCS_STATUS_TAG}
# If tag name is at most 32 characters long, show it in full.
# Otherwise show the first 12 .. the last 12.
# Tip: To always show tag name in full without truncation, delete the next line.
(( $#tag > 32 )) && tag[13,-13]=".." # <-- this line
res+="${meta}#${clean}${tag//\%/%%}"
fi
# Display the current Git commit if there is no branch and no tag.
# Tip: To always display the current Git commit, delete the next line.
[[ -z $VCS_STATUS_LOCAL_BRANCH && -z $VCS_STATUS_LOCAL_BRANCH ]] && # <-- this line
res+="${meta}@${clean}${VCS_STATUS_COMMIT[1,8]}"
# Show tracking branch name if it differs from local branch.
if [[ -n ${VCS_STATUS_REMOTE_BRANCH:#$VCS_STATUS_LOCAL_BRANCH} ]]; then
res+="${meta}:${clean}${(V)VCS_STATUS_REMOTE_BRANCH//\%/%%}"
fi
# <42 if behind the remote.
(( VCS_STATUS_COMMITS_BEHIND )) && res+=" ${clean}<${VCS_STATUS_COMMITS_BEHIND}"
# >42 if ahead of the remote; no leading space if also behind the remote: <42>42.
(( VCS_STATUS_COMMITS_AHEAD && !VCS_STATUS_COMMITS_BEHIND )) && res+=" "
(( VCS_STATUS_COMMITS_AHEAD )) && res+="${clean}>${VCS_STATUS_COMMITS_AHEAD}"
# <-42 if behind the push remote.
(( VCS_STATUS_PUSH_COMMITS_BEHIND )) && res+=" ${clean}<-${VCS_STATUS_PUSH_COMMITS_BEHIND}"
(( VCS_STATUS_PUSH_COMMITS_AHEAD && !VCS_STATUS_PUSH_COMMITS_BEHIND )) && res+=" "
# ->42 if ahead of the push remote; no leading space if also behind: <-42->42.
(( VCS_STATUS_PUSH_COMMITS_AHEAD )) && res+="${clean}->${VCS_STATUS_PUSH_COMMITS_AHEAD}"
# *42 if have stashes.
(( VCS_STATUS_STASHES )) && res+=" ${clean}*${VCS_STATUS_STASHES}"
# 'merge' if the repo is in an unusual state.
[[ -n $VCS_STATUS_ACTION ]] && res+=" ${conflicted}${VCS_STATUS_ACTION}"
# ~42 if have merge conflicts.
(( VCS_STATUS_NUM_CONFLICTED )) && res+=" ${conflicted}~${VCS_STATUS_NUM_CONFLICTED}"
# +42 if have staged changes.
(( VCS_STATUS_NUM_STAGED )) && res+=" ${modified}+${VCS_STATUS_NUM_STAGED}"
# !42 if have unstaged changes.
(( VCS_STATUS_NUM_UNSTAGED )) && res+=" ${modified}!${VCS_STATUS_NUM_UNSTAGED}"
# ?42 if have untracked files. It's really a question mark, your font isn't broken.
# See POWERLEVEL9K_VCS_UNTRACKED_ICON above if you want to use a different icon.
# Remove the next line if you don't want to see untracked files at all.
(( VCS_STATUS_NUM_UNTRACKED )) && res+=" ${untracked}${(g::)POWERLEVEL9K_VCS_UNTRACKED_ICON}${VCS_STATUS_NUM_UNTRACKED}"
# "-" if the number of unstaged files is unknown. This can happen due to
# POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY (see below) being set to a non-negative number lower
# than the number of files in the Git index, or due to bash.showDirtyState being set to false
# in the repository config. The number of staged and untracked files may also be unknown
# in this case.
(( VCS_STATUS_HAS_UNSTAGED == -1 )) && res+=" ${modified}-"
typeset -g my_git_format=$res
}
functions -M my_git_formatter 2>/dev/null
# Don't count the number of unstaged, untracked and conflicted files in Git repositories with
# more than this many files in the index. Negative value means infinity.
#
# If you are working in Git repositories with tens of millions of files and seeing performance
# sagging, try setting POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY to a number lower than the output
# of `git ls-files | wc -l`. Alternatively, add `bash.showDirtyState = false` to the repository's
# config: `git config bash.showDirtyState false`.
typeset -g POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY=-1
# Don't show Git status in prompt for repositories whose workdir matches this pattern.
# For example, if set to '~', the Git repository at $HOME/.git will be ignored.
# Multiple patterns can be combined with '|': '~(|/foo)|/bar/baz/*'.
typeset -g POWERLEVEL9K_VCS_DISABLED_WORKDIR_PATTERN='~'
# Disable the default Git status formatting.
typeset -g POWERLEVEL9K_VCS_DISABLE_GITSTATUS_FORMATTING=true
# Install our own Git status formatter.
typeset -g POWERLEVEL9K_VCS_CONTENT_EXPANSION='${$((my_git_formatter()))+${my_git_format}}'
# Enable counters for staged, unstaged, etc.
typeset -g POWERLEVEL9K_VCS_{STAGED,UNSTAGED,UNTRACKED,CONFLICTED,COMMITS_AHEAD,COMMITS_BEHIND}_MAX_NUM=-1
# Custom icon.
typeset -g POWERLEVEL9K_VCS_VISUAL_IDENTIFIER_EXPANSION=
# Custom prefix.
typeset -g POWERLEVEL9K_VCS_PREFIX='on '
# Show status of repositories of these types. You can add svn and/or hg if you are
# using them. If you do, your prompt may become slow even when your current directory
# isn't in an svn or hg reposotiry.
typeset -g POWERLEVEL9K_VCS_BACKENDS=(git)
##########################[ status: exit code of the last command ]###########################
# Enable OK_PIPE, ERROR_PIPE and ERROR_SIGNAL status states to allow us to enable, disable and
# style them independently from the regular OK and ERROR state.
typeset -g POWERLEVEL9K_STATUS_EXTENDED_STATES=true
# Status on success. No content, just an icon. No need to show it if prompt_char is enabled as
# it will signify success by turning green.
typeset -g POWERLEVEL9K_STATUS_OK=false
typeset -g POWERLEVEL9K_STATUS_OK_VISUAL_IDENTIFIER_EXPANSION='ok'
typeset -g POWERLEVEL9K_STATUS_OK_FOREGROUND=2
typeset -g POWERLEVEL9K_STATUS_OK_BACKGROUND=0
# Status when some part of a pipe command fails but the overall exit status is zero. It may look
# like this: 1|0.
typeset -g POWERLEVEL9K_STATUS_OK_PIPE=true
typeset -g POWERLEVEL9K_STATUS_OK_PIPE_VISUAL_IDENTIFIER_EXPANSION='ok'
typeset -g POWERLEVEL9K_STATUS_OK_PIPE_FOREGROUND=2
typeset -g POWERLEVEL9K_STATUS_OK_PIPE_BACKGROUND=0
# Status when it's just an error code (e.g., '1'). No need to show it if prompt_char is enabled as
# it will signify error by turning red.
typeset -g POWERLEVEL9K_STATUS_ERROR=false
typeset -g POWERLEVEL9K_STATUS_ERROR_VISUAL_IDENTIFIER_EXPANSION='err'
typeset -g POWERLEVEL9K_STATUS_ERROR_FOREGROUND=3
typeset -g POWERLEVEL9K_STATUS_ERROR_BACKGROUND=1
# Status when the last command was terminated by a signal.
typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL=true
# Use terse signal names: "INT" instead of "SIGINT(2)".
typeset -g POWERLEVEL9K_STATUS_VERBOSE_SIGNAME=false
typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL_VISUAL_IDENTIFIER_EXPANSION=
typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL_FOREGROUND=3
typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL_BACKGROUND=1
# Status when some part of a pipe command fails and the overall exit status is also non-zero.
# It may look like this: 1|0.
typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE=true
typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE_VISUAL_IDENTIFIER_EXPANSION='err'
typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE_FOREGROUND=3
typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE_BACKGROUND=1
###################[ command_execution_time: duration of the last command ]###################
# Execution time color.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_FOREGROUND=0
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_BACKGROUND=3
# Show duration of the last command if takes at least this many seconds.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_THRESHOLD=3
# Show this many fractional digits. Zero means round to seconds.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_PRECISION=0
# Duration format: 1d 2h 3m 4s.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_FORMAT='d h m s'
# Custom icon.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_VISUAL_IDENTIFIER_EXPANSION=
# Custom prefix.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_PREFIX='took '
#######################[ background_jobs: presence of background jobs ]#######################
# Background jobs color.
typeset -g POWERLEVEL9K_BACKGROUND_JOBS_FOREGROUND=6
typeset -g POWERLEVEL9K_BACKGROUND_JOBS_BACKGROUND=0
# Don't show the number of background jobs.
typeset -g POWERLEVEL9K_BACKGROUND_JOBS_VERBOSE=false
# Custom icon.
# typeset -g POWERLEVEL9K_BACKGROUND_JOBS_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######################[ direnv: direnv status (https://direnv.net/) ]########################
# Direnv color.
typeset -g POWERLEVEL9K_DIRENV_FOREGROUND=3
typeset -g POWERLEVEL9K_DIRENV_BACKGROUND=0
# Custom icon.
# typeset -g POWERLEVEL9K_DIRENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
###############[ asdf: asdf version manager (https://github.com/asdf-vm/asdf) ]###############
# Default asdf color. Only used to display tools for which there is no color override (see below).
# Tip: Override these parameters for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_FOREGROUND and
# POWERLEVEL9K_ASDF_${TOOL}_BACKGROUND.
typeset -g POWERLEVEL9K_ASDF_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_BACKGROUND=7
# There are four parameters that can be used to hide asdf tools. Each parameter describes
# conditions under which a tool gets hidden. Parameters can hide tools but not unhide them. If at
# least one parameter decides to hide a tool, that tool gets hidden. If no parameter decides to
# hide a tool, it gets shown.
#
# Special note on the difference between POWERLEVEL9K_ASDF_SOURCES and
# POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW. Consider the effect of the following commands:
#
# asdf local python 3.8.1
# asdf global python 3.8.1
#
# After running both commands the current python version is 3.8.1 and its source is "local" as
# it takes precedence over "global". If POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW is set to false,
# it'll hide python version in this case because 3.8.1 is the same as the global version.
# POWERLEVEL9K_ASDF_SOURCES will hide python version only if the value of this parameter doesn't
# contain "local".
# Hide tool versions that don't come from one of these sources.
#
# Available sources:
#
# - shell `asdf current` says "set by ASDF_${TOOL}_VERSION environment variable"
# - local `asdf current` says "set by /some/not/home/directory/file"
# - global `asdf current` says "set by /home/username/file"
#
# Note: If this parameter is set to (shell local global), it won't hide tools.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_SOURCES.
typeset -g POWERLEVEL9K_ASDF_SOURCES=(shell local global)
# If set to false, hide tool versions that are the same as global.
#
# Note: The name of this parameter doesn't reflect its meaning at all.
# Note: If this parameter is set to true, it won't hide tools.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_PROMPT_ALWAYS_SHOW.
typeset -g POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW=false
# If set to false, hide tool versions that are equal to "system".
#
# Note: If this parameter is set to true, it won't hide tools.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_SHOW_SYSTEM.
typeset -g POWERLEVEL9K_ASDF_SHOW_SYSTEM=true
# If set to non-empty value, hide tools unless there is a file matching the specified file pattern
# in the current directory, or its parent directory, or its grandparent directory, and so on.
#
# Note: If this parameter is set to empty value, it won't hide tools.
# Note: SHOW_ON_UPGLOB isn't specific to asdf. It works with all prompt segments.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_SHOW_ON_UPGLOB.
#
# Example: Hide nodejs version when there is no package.json and no *.js files in the current
# directory, in `..`, in `../..` and so on.
#
# typeset -g POWERLEVEL9K_ASDF_NODEJS_SHOW_ON_UPGLOB='*.js|package.json'
typeset -g POWERLEVEL9K_ASDF_SHOW_ON_UPGLOB=
# Ruby version from asdf.
typeset -g POWERLEVEL9K_ASDF_RUBY_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_RUBY_BACKGROUND=1
# typeset -g POWERLEVEL9K_ASDF_RUBY_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_RUBY_SHOW_ON_UPGLOB='*.foo|*.bar'
# Python version from asdf.
typeset -g POWERLEVEL9K_ASDF_PYTHON_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_PYTHON_BACKGROUND=4
# typeset -g POWERLEVEL9K_ASDF_PYTHON_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_PYTHON_SHOW_ON_UPGLOB='*.foo|*.bar'
# Go version from asdf.
typeset -g POWERLEVEL9K_ASDF_GOLANG_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_GOLANG_BACKGROUND=4
# typeset -g POWERLEVEL9K_ASDF_GOLANG_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_GOLANG_SHOW_ON_UPGLOB='*.foo|*.bar'
# Node.js version from asdf.
typeset -g POWERLEVEL9K_ASDF_NODEJS_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_NODEJS_BACKGROUND=2
# typeset -g POWERLEVEL9K_ASDF_NODEJS_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_NODEJS_SHOW_ON_UPGLOB='*.foo|*.bar'
# Rust version from asdf.
typeset -g POWERLEVEL9K_ASDF_RUST_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_RUST_BACKGROUND=208
# typeset -g POWERLEVEL9K_ASDF_RUST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_RUST_SHOW_ON_UPGLOB='*.foo|*.bar'
# .NET Core version from asdf.
typeset -g POWERLEVEL9K_ASDF_DOTNET_CORE_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_DOTNET_CORE_BACKGROUND=5
# typeset -g POWERLEVEL9K_ASDF_DOTNET_CORE_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_DOTNET_CORE_SHOW_ON_UPGLOB='*.foo|*.bar'
# Flutter version from asdf.
typeset -g POWERLEVEL9K_ASDF_FLUTTER_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_FLUTTER_BACKGROUND=4
# typeset -g POWERLEVEL9K_ASDF_FLUTTER_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_FLUTTER_SHOW_ON_UPGLOB='*.foo|*.bar'
# Lua version from asdf.
typeset -g POWERLEVEL9K_ASDF_LUA_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_LUA_BACKGROUND=4
# typeset -g POWERLEVEL9K_ASDF_LUA_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_LUA_SHOW_ON_UPGLOB='*.foo|*.bar'
# Java version from asdf.
typeset -g POWERLEVEL9K_ASDF_JAVA_FOREGROUND=1
typeset -g POWERLEVEL9K_ASDF_JAVA_BACKGROUND=7
# typeset -g POWERLEVEL9K_ASDF_JAVA_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_JAVA_SHOW_ON_UPGLOB='*.foo|*.bar'
# Perl version from asdf.
typeset -g POWERLEVEL9K_ASDF_PERL_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_PERL_BACKGROUND=4
# typeset -g POWERLEVEL9K_ASDF_PERL_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_PERL_SHOW_ON_UPGLOB='*.foo|*.bar'
# Erlang version from asdf.
typeset -g POWERLEVEL9K_ASDF_ERLANG_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_ERLANG_BACKGROUND=1
# typeset -g POWERLEVEL9K_ASDF_ERLANG_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_ERLANG_SHOW_ON_UPGLOB='*.foo|*.bar'
# Elixir version from asdf.
typeset -g POWERLEVEL9K_ASDF_ELIXIR_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_ELIXIR_BACKGROUND=5
# typeset -g POWERLEVEL9K_ASDF_ELIXIR_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_ELIXIR_SHOW_ON_UPGLOB='*.foo|*.bar'
# Postgres version from asdf.
typeset -g POWERLEVEL9K_ASDF_POSTGRES_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_POSTGRES_BACKGROUND=6
# typeset -g POWERLEVEL9K_ASDF_POSTGRES_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_POSTGRES_SHOW_ON_UPGLOB='*.foo|*.bar'
# PHP version from asdf.
typeset -g POWERLEVEL9K_ASDF_PHP_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_PHP_BACKGROUND=5
# typeset -g POWERLEVEL9K_ASDF_PHP_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_PHP_SHOW_ON_UPGLOB='*.foo|*.bar'
# Haskell version from asdf.
typeset -g POWERLEVEL9K_ASDF_HASKELL_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_HASKELL_BACKGROUND=3
# typeset -g POWERLEVEL9K_ASDF_HASKELL_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_HASKELL_SHOW_ON_UPGLOB='*.foo|*.bar'
# Julia version from asdf.
typeset -g POWERLEVEL9K_ASDF_JULIA_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_JULIA_BACKGROUND=2
# typeset -g POWERLEVEL9K_ASDF_JULIA_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_JULIA_SHOW_ON_UPGLOB='*.foo|*.bar'
##########[ nordvpn: nordvpn connection status, linux only (https://nordvpn.com/) ]###########
# NordVPN connection indicator color.
typeset -g POWERLEVEL9K_NORDVPN_FOREGROUND=7
typeset -g POWERLEVEL9K_NORDVPN_BACKGROUND=4
# Hide NordVPN connection indicator when not connected.
typeset -g POWERLEVEL9K_NORDVPN_{DISCONNECTED,CONNECTING,DISCONNECTING}_CONTENT_EXPANSION=
typeset -g POWERLEVEL9K_NORDVPN_{DISCONNECTED,CONNECTING,DISCONNECTING}_VISUAL_IDENTIFIER_EXPANSION=
# Custom icon.
# typeset -g POWERLEVEL9K_NORDVPN_VISUAL_IDENTIFIER_EXPANSION='⭐'
#################[ ranger: ranger shell (https://github.com/ranger/ranger) ]##################
# Ranger shell color.
typeset -g POWERLEVEL9K_RANGER_FOREGROUND=3
typeset -g POWERLEVEL9K_RANGER_BACKGROUND=0
# Custom icon.
# typeset -g POWERLEVEL9K_RANGER_VISUAL_IDENTIFIER_EXPANSION='⭐'
######################[ nnn: nnn shell (https://github.com/jarun/nnn) ]#######################
# Nnn shell color.
typeset -g POWERLEVEL9K_NNN_FOREGROUND=0
typeset -g POWERLEVEL9K_NNN_BACKGROUND=6
# Custom icon.
# typeset -g POWERLEVEL9K_NNN_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########################[ vim_shell: vim shell indicator (:sh) ]###########################
# Vim shell indicator color.
typeset -g POWERLEVEL9K_VIM_SHELL_FOREGROUND=0
typeset -g POWERLEVEL9K_VIM_SHELL_BACKGROUND=2
# Custom icon.
# typeset -g POWERLEVEL9K_VIM_SHELL_VISUAL_IDENTIFIER_EXPANSION='⭐'
######[ midnight_commander: midnight commander shell (https://midnight-commander.org/) ]######
# Midnight Commander shell color.
typeset -g POWERLEVEL9K_MIDNIGHT_COMMANDER_FOREGROUND=3
typeset -g POWERLEVEL9K_MIDNIGHT_COMMANDER_BACKGROUND=0
# Custom icon.
# typeset -g POWERLEVEL9K_MIDNIGHT_COMMANDER_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ nix_shell: nix shell (https://nixos.org/nixos/nix-pills/developing-with-nix-shell.html) ]##
# Nix shell color.
typeset -g POWERLEVEL9K_NIX_SHELL_FOREGROUND=0
typeset -g POWERLEVEL9K_NIX_SHELL_BACKGROUND=4
# Tip: If you want to see just the icon without "pure" and "impure", uncomment the next line.
# typeset -g POWERLEVEL9K_NIX_SHELL_CONTENT_EXPANSION=
# Custom icon.
# typeset -g POWERLEVEL9K_NIX_SHELL_VISUAL_IDENTIFIER_EXPANSION='⭐'
##################################[ disk_usage: disk usage ]##################################
# Colors for different levels of disk usage.
typeset -g POWERLEVEL9K_DISK_USAGE_NORMAL_FOREGROUND=3
typeset -g POWERLEVEL9K_DISK_USAGE_NORMAL_BACKGROUND=0
typeset -g POWERLEVEL9K_DISK_USAGE_WARNING_FOREGROUND=0
typeset -g POWERLEVEL9K_DISK_USAGE_WARNING_BACKGROUND=3
typeset -g POWERLEVEL9K_DISK_USAGE_CRITICAL_FOREGROUND=7
typeset -g POWERLEVEL9K_DISK_USAGE_CRITICAL_BACKGROUND=1
# Thresholds for different levels of disk usage (percentage points).
typeset -g POWERLEVEL9K_DISK_USAGE_WARNING_LEVEL=90
typeset -g POWERLEVEL9K_DISK_USAGE_CRITICAL_LEVEL=95
# If set to true, hide disk usage when below $POWERLEVEL9K_DISK_USAGE_WARNING_LEVEL percent.
typeset -g POWERLEVEL9K_DISK_USAGE_ONLY_WARNING=false
# Custom icon.
# typeset -g POWERLEVEL9K_DISK_USAGE_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ vi_mode: vi mode (you don't need this if you've enabled prompt_char) ]###########
# Foreground color.
typeset -g POWERLEVEL9K_VI_MODE_FOREGROUND=0
# Text and color for normal (a.k.a. command) vi mode.
typeset -g POWERLEVEL9K_VI_COMMAND_MODE_STRING=NORMAL
typeset -g POWERLEVEL9K_VI_MODE_NORMAL_BACKGROUND=2
# Text and color for visual vi mode.
typeset -g POWERLEVEL9K_VI_VISUAL_MODE_STRING=VISUAL
typeset -g POWERLEVEL9K_VI_MODE_VISUAL_BACKGROUND=4
# Text and color for overtype (a.k.a. overwrite and replace) vi mode.
typeset -g POWERLEVEL9K_VI_OVERWRITE_MODE_STRING=OVERTYPE
typeset -g POWERLEVEL9K_VI_MODE_OVERWRITE_BACKGROUND=3
# Text and color for insert vi mode.
typeset -g POWERLEVEL9K_VI_INSERT_MODE_STRING=
typeset -g POWERLEVEL9K_VI_MODE_INSERT_FOREGROUND=8
######################################[ ram: free RAM ]#######################################
# RAM color.
typeset -g POWERLEVEL9K_RAM_FOREGROUND=0
typeset -g POWERLEVEL9K_RAM_BACKGROUND=3
# Custom icon.
# typeset -g POWERLEVEL9K_RAM_VISUAL_IDENTIFIER_EXPANSION='⭐'
typeset -g POWERLEVEL9K_RAM_VISUAL_IDENTIFIER_EXPANSION='RAM'
#####################################[ swap: used swap ]######################################
# Swap color.
typeset -g POWERLEVEL9K_SWAP_FOREGROUND=0
typeset -g POWERLEVEL9K_SWAP_BACKGROUND=3
# Custom icon.
# typeset -g POWERLEVEL9K_SWAP_VISUAL_IDENTIFIER_EXPANSION='⭐'
######################################[ load: CPU load ]######################################
# Show average CPU load over this many last minutes. Valid values are 1, 5 and 15.
typeset -g POWERLEVEL9K_LOAD_WHICH=5
# Load color when load is under 50%.
typeset -g POWERLEVEL9K_LOAD_NORMAL_FOREGROUND=0
typeset -g POWERLEVEL9K_LOAD_NORMAL_BACKGROUND=2
# Load color when load is between 50% and 70%.
typeset -g POWERLEVEL9K_LOAD_WARNING_FOREGROUND=0
typeset -g POWERLEVEL9K_LOAD_WARNING_BACKGROUND=3
# Load color when load is over 70%.
typeset -g POWERLEVEL9K_LOAD_CRITICAL_FOREGROUND=0
typeset -g POWERLEVEL9K_LOAD_CRITICAL_BACKGROUND=1
# Custom icon.
# typeset -g POWERLEVEL9K_LOAD_VISUAL_IDENTIFIER_EXPANSION='⭐'
################[ todo: todo items (https://github.com/todotxt/todo.txt-cli) ]################
# Todo color.
typeset -g POWERLEVEL9K_TODO_FOREGROUND=0
typeset -g POWERLEVEL9K_TODO_BACKGROUND=8
# Hide todo when the total number of tasks is zero.
typeset -g POWERLEVEL9K_TODO_HIDE_ZERO_TOTAL=true
# Hide todo when the number of tasks after filtering is zero.
typeset -g POWERLEVEL9K_TODO_HIDE_ZERO_FILTERED=false
# Todo format. The following parameters are available within the expansion.
#
# - P9K_TODO_TOTAL_TASK_COUNT The total number of tasks.
# - P9K_TODO_FILTERED_TASK_COUNT The number of tasks after filtering.
#
# These variables correspond to the last line of the output of `todo.sh -p ls`:
#
# TODO: 24 of 42 tasks shown
#
# Here 24 is P9K_TODO_FILTERED_TASK_COUNT and 42 is P9K_TODO_TOTAL_TASK_COUNT.
#
# typeset -g POWERLEVEL9K_TODO_CONTENT_EXPANSION='$P9K_TODO_FILTERED_TASK_COUNT'
# Custom icon.
# typeset -g POWERLEVEL9K_TODO_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ timewarrior: timewarrior tracking status (https://timewarrior.net/) ]############
# Timewarrior color.
typeset -g POWERLEVEL9K_TIMEWARRIOR_FOREGROUND=255
typeset -g POWERLEVEL9K_TIMEWARRIOR_BACKGROUND=8
# If the tracked task is longer than 24 characters, truncate and append "..".
# Tip: To always display tasks without truncation, delete the following parameter.
# Tip: To hide task names and display just the icon when time tracking is enabled, set the
# value of the following parameter to "".
typeset -g POWERLEVEL9K_TIMEWARRIOR_CONTENT_EXPANSION='${P9K_CONTENT:0:24}${${P9K_CONTENT:24}:+..}'
# Custom icon.
# typeset -g POWERLEVEL9K_TIMEWARRIOR_VISUAL_IDENTIFIER_EXPANSION='⭐'
##############[ taskwarrior: taskwarrior task count (https://taskwarrior.org/) ]##############
# Taskwarrior color.
typeset -g POWERLEVEL9K_TASKWARRIOR_FOREGROUND=0
typeset -g POWERLEVEL9K_TASKWARRIOR_BACKGROUND=6
# Taskwarrior segment format. The following parameters are available within the expansion.
#
# - P9K_TASKWARRIOR_PENDING_COUNT The number of pending tasks: `task +PENDING count`.
# - P9K_TASKWARRIOR_OVERDUE_COUNT The number of overdue tasks: `task +OVERDUE count`.
#
# Zero values are represented as empty parameters.
#
# The default format:
#
# '${P9K_TASKWARRIOR_OVERDUE_COUNT:+"!$P9K_TASKWARRIOR_OVERDUE_COUNT/"}$P9K_TASKWARRIOR_PENDING_COUNT'
#
# typeset -g POWERLEVEL9K_TASKWARRIOR_CONTENT_EXPANSION='$P9K_TASKWARRIOR_PENDING_COUNT'
# Custom icon.
# typeset -g POWERLEVEL9K_TASKWARRIOR_VISUAL_IDENTIFIER_EXPANSION='⭐'
##################################[ context: user@hostname ]##################################
# Context color when running with privileges.
typeset -g POWERLEVEL9K_CONTEXT_ROOT_FOREGROUND=1
typeset -g POWERLEVEL9K_CONTEXT_ROOT_BACKGROUND=0
# Context color in SSH without privileges.
typeset -g POWERLEVEL9K_CONTEXT_{REMOTE,REMOTE_SUDO}_FOREGROUND=3
typeset -g POWERLEVEL9K_CONTEXT_{REMOTE,REMOTE_SUDO}_BACKGROUND=0
# Default context color (no privileges, no SSH).
typeset -g POWERLEVEL9K_CONTEXT_FOREGROUND=3
typeset -g POWERLEVEL9K_CONTEXT_BACKGROUND=0
# Context format when running with privileges: user@hostname.
typeset -g POWERLEVEL9K_CONTEXT_ROOT_TEMPLATE='%n@%m'
# Context format when in SSH without privileges: user@hostname.
typeset -g POWERLEVEL9K_CONTEXT_{REMOTE,REMOTE_SUDO}_TEMPLATE='%n@%m'
# Default context format (no privileges, no SSH): user@hostname.
typeset -g POWERLEVEL9K_CONTEXT_TEMPLATE='%n@%m'
# Don't show context unless running with privileges or in SSH.
# Tip: Remove the next line to always show context.
# typeset -g POWERLEVEL9K_CONTEXT_{DEFAULT,SUDO}_{CONTENT,VISUAL_IDENTIFIER}_EXPANSION=
# Custom icon.
# typeset -g POWERLEVEL9K_CONTEXT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Custom prefix.
#typeset -g POWERLEVEL9K_CONTEXT_PREFIX='with '
typeset -g POWERLEVEL9K_CONTEXT_VISUAL_IDENTIFIER_EXPANSION='💻'
###[ virtualenv: python virtual environment (https://docs.python.org/3/library/venv.html) ]###
# Python virtual environment color.
typeset -g POWERLEVEL9K_VIRTUALENV_FOREGROUND=0
typeset -g POWERLEVEL9K_VIRTUALENV_BACKGROUND=4
# Don't show Python version next to the virtual environment name.
typeset -g POWERLEVEL9K_VIRTUALENV_SHOW_PYTHON_VERSION=false
# If set to "false", won't show virtualenv if pyenv is already shown.
# If set to "if-different", won't show virtualenv if it's the same as pyenv.
typeset -g POWERLEVEL9K_VIRTUALENV_SHOW_WITH_PYENV=false
# Separate environment name from Python version only with a space.
typeset -g POWERLEVEL9K_VIRTUALENV_{LEFT,RIGHT}_DELIMITER=
# Custom icon.
# typeset -g POWERLEVEL9K_VIRTUALENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
#####################[ anaconda: conda environment (https://conda.io/) ]######################
# Anaconda environment color.
typeset -g POWERLEVEL9K_ANACONDA_FOREGROUND=0
typeset -g POWERLEVEL9K_ANACONDA_BACKGROUND=4
# Anaconda segment format. The following parameters are available within the expansion.
#
# - CONDA_PREFIX Absolute path to the active Anaconda/Miniconda environment.
# - CONDA_DEFAULT_ENV Name of the active Anaconda/Miniconda environment.
# - CONDA_PROMPT_MODIFIER Configurable prompt modifier (see below).
# - P9K_ANACONDA_PYTHON_VERSION Current python version (python --version).
#
# CONDA_PROMPT_MODIFIER can be configured with the following command:
#
# conda config --set env_prompt '({default_env}) '
#
# The last argument is a Python format string that can use the following variables:
#
# - prefix The same as CONDA_PREFIX.
# - default_env The same as CONDA_DEFAULT_ENV.
# - name The last segment of CONDA_PREFIX.
# - stacked_env Comma-separated list of names in the environment stack. The first element is
# always the same as default_env.
#
# Note: '({default_env}) ' is the default value of env_prompt.
#
# The default value of POWERLEVEL9K_ANACONDA_CONTENT_EXPANSION expands to $CONDA_PROMPT_MODIFIER
# without the surrounding parentheses, or to the last path component of CONDA_PREFIX if the former
# is empty.
typeset -g POWERLEVEL9K_ANACONDA_CONTENT_EXPANSION='${${${${CONDA_PROMPT_MODIFIER#\(}% }%\)}:-${CONDA_PREFIX:t}}'
# Custom icon.
# typeset -g POWERLEVEL9K_ANACONDA_VISUAL_IDENTIFIER_EXPANSION='⭐'
typeset -g POWERLEVEL9K_ANACONDA_VISUAL_IDENTIFIER_EXPANSION='🐍'
################[ pyenv: python environment (https://github.com/pyenv/pyenv) ]################
# Pyenv color.
typeset -g POWERLEVEL9K_PYENV_FOREGROUND=0
typeset -g POWERLEVEL9K_PYENV_BACKGROUND=4
# Hide python version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_PYENV_SOURCES=(shell local global)
# If set to false, hide python version if it's the same as global:
# $(pyenv version-name) == $(pyenv global).
typeset -g POWERLEVEL9K_PYENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide python version if it's equal to "system".
typeset -g POWERLEVEL9K_PYENV_SHOW_SYSTEM=true
# Pyenv segment format. The following parameters are available within the expansion.
#
# - P9K_CONTENT Current pyenv environment (pyenv version-name).
# - P9K_PYENV_PYTHON_VERSION Current python version (python --version).
#
# The default format has the following logic:
#
# 1. Display "$P9K_CONTENT $P9K_PYENV_PYTHON_VERSION" if $P9K_PYENV_PYTHON_VERSION is not
# empty and unequal to $P9K_CONTENT.
# 2. Otherwise display just "$P9K_CONTENT".
typeset -g POWERLEVEL9K_PYENV_CONTENT_EXPANSION='${P9K_CONTENT}${${P9K_PYENV_PYTHON_VERSION:#$P9K_CONTENT}:+ $P9K_PYENV_PYTHON_VERSION}'
# Custom icon.
# typeset -g POWERLEVEL9K_PYENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
################[ goenv: go environment (https://github.com/syndbg/goenv) ]################
# Goenv color.
typeset -g POWERLEVEL9K_GOENV_FOREGROUND=0
typeset -g POWERLEVEL9K_GOENV_BACKGROUND=4
# Hide go version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_GOENV_SOURCES=(shell local global)
# If set to false, hide go version if it's the same as global:
# $(goenv version-name) == $(goenv global).
typeset -g POWERLEVEL9K_GOENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide go version if it's equal to "system".
typeset -g POWERLEVEL9K_GOENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_GOENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ nodenv: node.js version from nodenv (https://github.com/nodenv/nodenv) ]##########
# Nodenv color.
typeset -g POWERLEVEL9K_NODENV_FOREGROUND=2
typeset -g POWERLEVEL9K_NODENV_BACKGROUND=0
# Hide node version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_NODENV_SOURCES=(shell local global)
# If set to false, hide node version if it's the same as global:
# $(nodenv version-name) == $(nodenv global).
typeset -g POWERLEVEL9K_NODENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide node version if it's equal to "system".
typeset -g POWERLEVEL9K_NODENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_NODENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##############[ nvm: node.js version from nvm (https://github.com/nvm-sh/nvm) ]###############
# Nvm color.
typeset -g POWERLEVEL9K_NVM_FOREGROUND=0
typeset -g POWERLEVEL9K_NVM_BACKGROUND=5
# Custom icon.
# typeset -g POWERLEVEL9K_NVM_VISUAL_IDENTIFIER_EXPANSION='⭐'
############[ nodeenv: node.js environment (https://github.com/ekalinin/nodeenv) ]############
# Nodeenv color.
typeset -g POWERLEVEL9K_NODEENV_FOREGROUND=2
typeset -g POWERLEVEL9K_NODEENV_BACKGROUND=0
# Don't show Node version next to the environment name.
typeset -g POWERLEVEL9K_NODEENV_SHOW_NODE_VERSION=false
# Separate environment name from Node version only with a space.
typeset -g POWERLEVEL9K_NODEENV_{LEFT,RIGHT}_DELIMITER=
# Custom icon.
# typeset -g POWERLEVEL9K_NODEENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##############################[ node_version: node.js version ]###############################
# Node version color.
typeset -g POWERLEVEL9K_NODE_VERSION_FOREGROUND=7
typeset -g POWERLEVEL9K_NODE_VERSION_BACKGROUND=2
# Show node version only when in a directory tree containing package.json.
typeset -g POWERLEVEL9K_NODE_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_NODE_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######################[ go_version: go version (https://golang.org) ]########################
# Go version color.
typeset -g POWERLEVEL9K_GO_VERSION_FOREGROUND=255
typeset -g POWERLEVEL9K_GO_VERSION_BACKGROUND=2
# Show go version only when in a go project subdirectory.
typeset -g POWERLEVEL9K_GO_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_GO_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#################[ rust_version: rustc version (https://www.rust-lang.org) ]##################
# Rust version color.
typeset -g POWERLEVEL9K_RUST_VERSION_FOREGROUND=0
typeset -g POWERLEVEL9K_RUST_VERSION_BACKGROUND=208
# Show rust version only when in a rust project subdirectory.
typeset -g POWERLEVEL9K_RUST_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_RUST_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
###############[ dotnet_version: .NET version (https://dotnet.microsoft.com) ]################
# .NET version color.
typeset -g POWERLEVEL9K_DOTNET_VERSION_FOREGROUND=7
typeset -g POWERLEVEL9K_DOTNET_VERSION_BACKGROUND=5
# Show .NET version only when in a .NET project subdirectory.
typeset -g POWERLEVEL9K_DOTNET_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_DOTNET_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#####################[ php_version: php version (https://www.php.net/) ]######################
# PHP version color.
typeset -g POWERLEVEL9K_PHP_VERSION_FOREGROUND=0
typeset -g POWERLEVEL9K_PHP_VERSION_BACKGROUND=5
# Show PHP version only when in a PHP project subdirectory.
typeset -g POWERLEVEL9K_PHP_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_PHP_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ laravel_version: laravel php framework version (https://laravel.com/) ]###########
# Laravel version color.
typeset -g POWERLEVEL9K_LARAVEL_VERSION_FOREGROUND=1
typeset -g POWERLEVEL9K_LARAVEL_VERSION_BACKGROUND=7
# Custom icon.
# typeset -g POWERLEVEL9K_LARAVEL_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#############[ rbenv: ruby version from rbenv (https://github.com/rbenv/rbenv) ]##############
# Rbenv color.
typeset -g POWERLEVEL9K_RBENV_FOREGROUND=0
typeset -g POWERLEVEL9K_RBENV_BACKGROUND=1
# Hide ruby version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_RBENV_SOURCES=(shell local global)
# If set to false, hide ruby version if it's the same as global:
# $(rbenv version-name) == $(rbenv global).
typeset -g POWERLEVEL9K_RBENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide ruby version if it's equal to "system".
typeset -g POWERLEVEL9K_RBENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_RBENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
####################[ java_version: java version (https://www.java.com/) ]####################
# Java version color.
typeset -g POWERLEVEL9K_JAVA_VERSION_FOREGROUND=1
typeset -g POWERLEVEL9K_JAVA_VERSION_BACKGROUND=7
# Show java version only when in a java project subdirectory.
typeset -g POWERLEVEL9K_JAVA_VERSION_PROJECT_ONLY=true
# Show brief version.
typeset -g POWERLEVEL9K_JAVA_VERSION_FULL=false
# Custom icon.
# typeset -g POWERLEVEL9K_JAVA_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
###[ package: name@version from package.json (https://docs.npmjs.com/files/package.json) ]####
# Package color.
typeset -g POWERLEVEL9K_PACKAGE_FOREGROUND=0
typeset -g POWERLEVEL9K_PACKAGE_BACKGROUND=6
# Package format. The following parameters are available within the expansion.
#
# - P9K_PACKAGE_NAME The value of `name` field in package.json.
# - P9K_PACKAGE_VERSION The value of `version` field in package.json.
#
# typeset -g POWERLEVEL9K_PACKAGE_CONTENT_EXPANSION='${P9K_PACKAGE_NAME//\%/%%}@${P9K_PACKAGE_VERSION//\%/%%}'
# Custom icon.
# typeset -g POWERLEVEL9K_PACKAGE_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######################[ rvm: ruby version from rvm (https://rvm.io) ]########################
# Rvm color.
typeset -g POWERLEVEL9K_RVM_FOREGROUND=0
typeset -g POWERLEVEL9K_RVM_BACKGROUND=240
# Don't show @gemset at the end.
typeset -g POWERLEVEL9K_RVM_SHOW_GEMSET=false
# Don't show ruby- at the front.
typeset -g POWERLEVEL9K_RVM_SHOW_PREFIX=false
# Custom icon.
# typeset -g POWERLEVEL9K_RVM_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ fvm: flutter version management (https://github.com/leoafarias/fvm) ]############
# Fvm color.
typeset -g POWERLEVEL9K_FVM_FOREGROUND=0
typeset -g POWERLEVEL9K_FVM_BACKGROUND=4
# Custom icon.
# typeset -g POWERLEVEL9K_FVM_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ luaenv: lua version from luaenv (https://github.com/cehoffman/luaenv) ]###########
# Lua color.
typeset -g POWERLEVEL9K_LUAENV_FOREGROUND=0
typeset -g POWERLEVEL9K_LUAENV_BACKGROUND=4
# Hide lua version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_LUAENV_SOURCES=(shell local global)
# If set to false, hide lua version if it's the same as global:
# $(luaenv version-name) == $(luaenv global).
typeset -g POWERLEVEL9K_LUAENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide lua version if it's equal to "system".
typeset -g POWERLEVEL9K_LUAENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_LUAENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
###############[ jenv: java version from jenv (https://github.com/jenv/jenv) ]################
# Java color.
typeset -g POWERLEVEL9K_JENV_FOREGROUND=1
typeset -g POWERLEVEL9K_JENV_BACKGROUND=7
# Hide java version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_JENV_SOURCES=(shell local global)
# If set to false, hide java version if it's the same as global:
# $(jenv version-name) == $(jenv global).
typeset -g POWERLEVEL9K_JENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide java version if it's equal to "system".
typeset -g POWERLEVEL9K_JENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_JENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ plenv: perl version from plenv (https://github.com/tokuhirom/plenv) ]############
# Perl color.
typeset -g POWERLEVEL9K_PLENV_FOREGROUND=0
typeset -g POWERLEVEL9K_PLENV_BACKGROUND=4
# Hide perl version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_PLENV_SOURCES=(shell local global)
# If set to false, hide perl version if it's the same as global:
# $(plenv version-name) == $(plenv global).
typeset -g POWERLEVEL9K_PLENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide perl version if it's equal to "system".
typeset -g POWERLEVEL9K_PLENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_PLENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
############[ phpenv: php version from phpenv (https://github.com/phpenv/phpenv) ]############
# PHP color.
typeset -g POWERLEVEL9K_PHPENV_FOREGROUND=0
typeset -g POWERLEVEL9K_PHPENV_BACKGROUND=5
# Hide php version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_PHPENV_SOURCES=(shell local global)
# If set to false, hide php version if it's the same as global:
# $(phpenv version-name) == $(phpenv global).
typeset -g POWERLEVEL9K_PHPENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide PHP version if it's equal to "system".
typeset -g POWERLEVEL9K_PHPENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_PHPENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######[ scalaenv: scala version from scalaenv (https://github.com/scalaenv/scalaenv) ]#######
# Scala color.
typeset -g POWERLEVEL9K_SCALAENV_FOREGROUND=0
typeset -g POWERLEVEL9K_SCALAENV_BACKGROUND=1
# Hide scala version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_SCALAENV_SOURCES=(shell local global)
# If set to false, hide scala version if it's the same as global:
# $(scalaenv version-name) == $(scalaenv global).
typeset -g POWERLEVEL9K_SCALAENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide scala version if it's equal to "system".
typeset -g POWERLEVEL9K_SCALAENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_SCALAENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ haskell_stack: haskell version from stack (https://haskellstack.org/) ]###########
# Haskell color.
typeset -g POWERLEVEL9K_HASKELL_STACK_FOREGROUND=0
typeset -g POWERLEVEL9K_HASKELL_STACK_BACKGROUND=3
# Hide haskell version if it doesn't come from one of these sources.
#
# shell: version is set by STACK_YAML
# local: version is set by stack.yaml up the directory tree
# global: version is set by the implicit global project (~/.stack/global-project/stack.yaml)
typeset -g POWERLEVEL9K_HASKELL_STACK_SOURCES=(shell local)
# If set to false, hide haskell version if it's the same as in the implicit global project.
typeset -g POWERLEVEL9K_HASKELL_STACK_ALWAYS_SHOW=true
# Custom icon.
# typeset -g POWERLEVEL9K_HASKELL_STACK_VISUAL_IDENTIFIER_EXPANSION='⭐'
################[ terraform: terraform workspace (https://www.terraform.io) ]#################
# Don't show terraform workspace if it's literally "default".
typeset -g POWERLEVEL9K_TERRAFORM_SHOW_DEFAULT=false
# POWERLEVEL9K_TERRAFORM_CLASSES is an array with even number of elements. The first element
# in each pair defines a pattern against which the current terraform workspace gets matched.
# More specifically, it's P9K_CONTENT prior to the application of context expansion (see below)
# that gets matched. If you unset all POWERLEVEL9K_TERRAFORM_*CONTENT_EXPANSION parameters,
# you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_TERRAFORM_CLASSES defines the workspace class. Patterns are tried in order. The
# first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_TERRAFORM_CLASSES=(
# '*prod*' PROD
# '*test*' TEST
# '*' OTHER)
#
# If your current terraform workspace is "project_test", its class is TEST because "project_test"
# doesn't match the pattern '*prod*' but does match '*test*'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_TERRAFORM_TEST_FOREGROUND=2
# typeset -g POWERLEVEL9K_TERRAFORM_TEST_BACKGROUND=0
# typeset -g POWERLEVEL9K_TERRAFORM_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_TERRAFORM_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <'
typeset -g POWERLEVEL9K_TERRAFORM_CLASSES=(
# '*prod*' PROD # These values are examples that are unlikely
# '*test*' TEST # to match your needs. Customize them as needed.
'*' OTHER)
typeset -g POWERLEVEL9K_TERRAFORM_OTHER_FOREGROUND=4
typeset -g POWERLEVEL9K_TERRAFORM_OTHER_BACKGROUND=0
# typeset -g POWERLEVEL9K_TERRAFORM_OTHER_VISUAL_IDENTIFIER_EXPANSION='⭐'
#############[ kubecontext: current kubernetes context (https://kubernetes.io/) ]#############
# Show kubecontext only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show kubecontext.
typeset -g POWERLEVEL9K_KUBECONTEXT_SHOW_ON_COMMAND='kubectl|helm|kubens|kubectx|oc|istioctl|kogito|k9s|helmfile|fluxctl|stern'
# Kubernetes context classes for the purpose of using different colors, icons and expansions with
# different contexts.
#
# POWERLEVEL9K_KUBECONTEXT_CLASSES is an array with even number of elements. The first element
# in each pair defines a pattern against which the current kubernetes context gets matched.
# More specifically, it's P9K_CONTENT prior to the application of context expansion (see below)
# that gets matched. If you unset all POWERLEVEL9K_KUBECONTEXT_*CONTENT_EXPANSION parameters,
# you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_KUBECONTEXT_CLASSES defines the context class. Patterns are tried in order. The
# first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_KUBECONTEXT_CLASSES=(
# '*prod*' PROD
# '*test*' TEST
# '*' DEFAULT)
#
# If your current kubernetes context is "deathray-testing/default", its class is TEST
# because "deathray-testing/default" doesn't match the pattern '*prod*' but does match '*test*'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_FOREGROUND=0
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_BACKGROUND=2
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <'
typeset -g POWERLEVEL9K_KUBECONTEXT_CLASSES=(
# '*prod*' PROD # These values are examples that are unlikely
# '*test*' TEST # to match your needs. Customize them as needed.
'*' DEFAULT)
typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_FOREGROUND=7
typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_BACKGROUND=5
# typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Use POWERLEVEL9K_KUBECONTEXT_CONTENT_EXPANSION to specify the content displayed by kubecontext
# segment. Parameter expansions are very flexible and fast, too. See reference:
# http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion.
#
# Within the expansion the following parameters are always available:
#
# - P9K_CONTENT The content that would've been displayed if there was no content
# expansion defined.
# - P9K_KUBECONTEXT_NAME The current context's name. Corresponds to column NAME in the
# output of `kubectl config get-contexts`.
# - P9K_KUBECONTEXT_CLUSTER The current context's cluster. Corresponds to column CLUSTER in the
# output of `kubectl config get-contexts`.
# - P9K_KUBECONTEXT_NAMESPACE The current context's namespace. Corresponds to column NAMESPACE
# in the output of `kubectl config get-contexts`. If there is no
# namespace, the parameter is set to "default".
# - P9K_KUBECONTEXT_USER The current context's user. Corresponds to column AUTHINFO in the
# output of `kubectl config get-contexts`.
#
# If the context points to Google Kubernetes Engine (GKE) or Elastic Kubernetes Service (EKS),
# the following extra parameters are available:
#
# - P9K_KUBECONTEXT_CLOUD_NAME Either "gke" or "eks".
# - P9K_KUBECONTEXT_CLOUD_ACCOUNT Account/project ID.
# - P9K_KUBECONTEXT_CLOUD_ZONE Availability zone.
# - P9K_KUBECONTEXT_CLOUD_CLUSTER Cluster.
#
# P9K_KUBECONTEXT_CLOUD_* parameters are derived from P9K_KUBECONTEXT_CLUSTER. For example,
# if P9K_KUBECONTEXT_CLUSTER is "gke_my-account_us-east1-a_my-cluster-01":
#
# - P9K_KUBECONTEXT_CLOUD_NAME=gke
# - P9K_KUBECONTEXT_CLOUD_ACCOUNT=my-account
# - P9K_KUBECONTEXT_CLOUD_ZONE=us-east1-a
# - P9K_KUBECONTEXT_CLOUD_CLUSTER=my-cluster-01
#
# If P9K_KUBECONTEXT_CLUSTER is "arn:aws:eks:us-east-1:123456789012:cluster/my-cluster-01":
#
# - P9K_KUBECONTEXT_CLOUD_NAME=eks
# - P9K_KUBECONTEXT_CLOUD_ACCOUNT=123456789012
# - P9K_KUBECONTEXT_CLOUD_ZONE=us-east-1
# - P9K_KUBECONTEXT_CLOUD_CLUSTER=my-cluster-01
typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION=
# Show P9K_KUBECONTEXT_CLOUD_CLUSTER if it's not empty and fall back to P9K_KUBECONTEXT_NAME.
POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION+='${P9K_KUBECONTEXT_CLOUD_CLUSTER:-${P9K_KUBECONTEXT_NAME}}'
# Append the current context's namespace if it's not "default".
POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION+='${${:-/$P9K_KUBECONTEXT_NAMESPACE}:#/default}'
# Custom prefix.
typeset -g POWERLEVEL9K_KUBECONTEXT_PREFIX='at '
#[ aws: aws profile (https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-profiles.html) ]#
# Show aws only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show aws.
typeset -g POWERLEVEL9K_AWS_SHOW_ON_COMMAND='aws|awless|terraform|pulumi|terragrunt'
# POWERLEVEL9K_AWS_CLASSES is an array with even number of elements. The first element
# in each pair defines a pattern against which the current AWS profile gets matched.
# More specifically, it's P9K_CONTENT prior to the application of context expansion (see below)
# that gets matched. If you unset all POWERLEVEL9K_AWS_*CONTENT_EXPANSION parameters,
# you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_AWS_CLASSES defines the profile class. Patterns are tried in order. The
# first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_AWS_CLASSES=(
# '*prod*' PROD
# '*test*' TEST
# '*' DEFAULT)
#
# If your current AWS profile is "company_test", its class is TEST
# because "company_test" doesn't match the pattern '*prod*' but does match '*test*'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_AWS_TEST_FOREGROUND=28
# typeset -g POWERLEVEL9K_AWS_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_AWS_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <'
typeset -g POWERLEVEL9K_AWS_CLASSES=(
# '*prod*' PROD # These values are examples that are unlikely
# '*test*' TEST # to match your needs. Customize them as needed.
'*' DEFAULT)
typeset -g POWERLEVEL9K_AWS_DEFAULT_FOREGROUND=7
typeset -g POWERLEVEL9K_AWS_DEFAULT_BACKGROUND=1
# typeset -g POWERLEVEL9K_AWS_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ aws_eb_env: aws elastic beanstalk environment (https://aws.amazon.com/elasticbeanstalk/) ]#
# AWS Elastic Beanstalk environment color.
typeset -g POWERLEVEL9K_AWS_EB_ENV_FOREGROUND=2
typeset -g POWERLEVEL9K_AWS_EB_ENV_BACKGROUND=0
# Custom icon.
# typeset -g POWERLEVEL9K_AWS_EB_ENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ azure: azure account name (https://docs.microsoft.com/en-us/cli/azure) ]##########
# Show azure only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show azure.
typeset -g POWERLEVEL9K_AZURE_SHOW_ON_COMMAND='az|terraform|pulumi|terragrunt'
# Azure account name color.
typeset -g POWERLEVEL9K_AZURE_FOREGROUND=7
typeset -g POWERLEVEL9K_AZURE_BACKGROUND=4
# Custom icon.
# typeset -g POWERLEVEL9K_AZURE_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ gcloud: google cloud account and project (https://cloud.google.com/) ]###########
# Show gcloud only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show gcloud.
typeset -g POWERLEVEL9K_GCLOUD_SHOW_ON_COMMAND='gcloud|gcs'
# Google cloud color.
typeset -g POWERLEVEL9K_GCLOUD_FOREGROUND=7
typeset -g POWERLEVEL9K_GCLOUD_BACKGROUND=4
# Google cloud format. Change the value of POWERLEVEL9K_GCLOUD_PARTIAL_CONTENT_EXPANSION and/or
# POWERLEVEL9K_GCLOUD_COMPLETE_CONTENT_EXPANSION if the default is too verbose or not informative
# enough. You can use the following parameters in the expansions. Each of them corresponds to the
# output of `gcloud` tool.
#
# Parameter | Source
# -------------------------|--------------------------------------------------------------------
# P9K_GCLOUD_CONFIGURATION | gcloud config configurations list --format='value(name)'
# P9K_GCLOUD_ACCOUNT | gcloud config get-value account
# P9K_GCLOUD_PROJECT_ID | gcloud config get-value project
# P9K_GCLOUD_PROJECT_NAME | gcloud projects describe $P9K_GCLOUD_PROJECT_ID --format='value(name)'
#
# Note: ${VARIABLE//\%/%%} expands to ${VARIABLE} with all occurrences of '%' replaced with '%%'.
#
# Obtaining project name requires sending a request to Google servers. This can take a long time
# and even fail. When project name is unknown, P9K_GCLOUD_PROJECT_NAME is not set and gcloud
# prompt segment is in state PARTIAL. When project name gets known, P9K_GCLOUD_PROJECT_NAME gets
# set and gcloud prompt segment transitions to state COMPLETE.
#
# You can customize the format, icon and colors of gcloud segment separately for states PARTIAL
# and COMPLETE. You can also hide gcloud in state PARTIAL by setting
# POWERLEVEL9K_GCLOUD_PARTIAL_VISUAL_IDENTIFIER_EXPANSION and
# POWERLEVEL9K_GCLOUD_PARTIAL_CONTENT_EXPANSION to empty.
typeset -g POWERLEVEL9K_GCLOUD_PARTIAL_CONTENT_EXPANSION='${P9K_GCLOUD_PROJECT_ID//\%/%%}'
typeset -g POWERLEVEL9K_GCLOUD_COMPLETE_CONTENT_EXPANSION='${P9K_GCLOUD_PROJECT_NAME//\%/%%}'
# Send a request to Google (by means of `gcloud projects describe ...`) to obtain project name
# this often. Negative value disables periodic polling. In this mode project name is retrieved
# only when the current configuration, account or project id changes.
typeset -g POWERLEVEL9K_GCLOUD_REFRESH_PROJECT_NAME_SECONDS=60
# Custom icon.
# typeset -g POWERLEVEL9K_GCLOUD_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ google_app_cred: google application credentials (https://cloud.google.com/docs/authentication/production) ]#
# Show google_app_cred only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show google_app_cred.
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_SHOW_ON_COMMAND='terraform|pulumi|terragrunt'
# Google application credentials classes for the purpose of using different colors, icons and
# expansions with different credentials.
#
# POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES is an array with even number of elements. The first
# element in each pair defines a pattern against which the current kubernetes context gets
# matched. More specifically, it's P9K_CONTENT prior to the application of context expansion
# (see below) that gets matched. If you unset all POWERLEVEL9K_GOOGLE_APP_CRED_*CONTENT_EXPANSION
# parameters, you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES defines the context class. Patterns are tried in order.
# The first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES=(
# '*:*prod*:*' PROD
# '*:*test*:*' TEST
# '*' DEFAULT)
#
# If your current Google application credentials is "service_account deathray-testing x@y.com",
# its class is TEST because it doesn't match the pattern '* *prod* *' but does match '* *test* *'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_FOREGROUND=28
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_CONTENT_EXPANSION='$P9K_GOOGLE_APP_CRED_PROJECT_ID'
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES=(
# '*:*prod*:*' PROD # These values are examples that are unlikely
# '*:*test*:*' TEST # to match your needs. Customize them as needed.
'*' DEFAULT)
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_FOREGROUND=7
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_BACKGROUND=4
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Use POWERLEVEL9K_GOOGLE_APP_CRED_CONTENT_EXPANSION to specify the content displayed by
# google_app_cred segment. Parameter expansions are very flexible and fast, too. See reference:
# http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion.
#
# You can use the following parameters in the expansion. Each of them corresponds to one of the
# fields in the JSON file pointed to by GOOGLE_APPLICATION_CREDENTIALS.
#
# Parameter | JSON key file field
# ---------------------------------+---------------
# P9K_GOOGLE_APP_CRED_TYPE | type
# P9K_GOOGLE_APP_CRED_PROJECT_ID | project_id
# P9K_GOOGLE_APP_CRED_CLIENT_EMAIL | client_email
#
# Note: ${VARIABLE//\%/%%} expands to ${VARIABLE} with all occurrences of '%' replaced by '%%'.
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_CONTENT_EXPANSION='${P9K_GOOGLE_APP_CRED_PROJECT_ID//\%/%%}'
###############################[ public_ip: public IP address ]###############################
# Public IP color.
typeset -g POWERLEVEL9K_PUBLIC_IP_FOREGROUND=7
typeset -g POWERLEVEL9K_PUBLIC_IP_BACKGROUND=0
# Custom icon.
# typeset -g POWERLEVEL9K_PUBLIC_IP_VISUAL_IDENTIFIER_EXPANSION='⭐'
########################[ vpn_ip: virtual private network indicator ]#########################
# VPN IP color.
typeset -g POWERLEVEL9K_VPN_IP_FOREGROUND=0
typeset -g POWERLEVEL9K_VPN_IP_BACKGROUND=6
# When on VPN, show just an icon without the IP address.
# Tip: To display the private IP address when on VPN, remove the next line.
typeset -g POWERLEVEL9K_VPN_IP_CONTENT_EXPANSION=
# Regular expression for the VPN network interface. Run `ifconfig` or `ip -4 a show` while on VPN
# to see the name of the interface.
typeset -g POWERLEVEL9K_VPN_IP_INTERFACE='(gpd|wg|(.*tun)|tailscale)[0-9]*'
# If set to true, show one segment per matching network interface. If set to false, show only
# one segment corresponding to the first matching network interface.
# Tip: If you set it to true, you'll probably want to unset POWERLEVEL9K_VPN_IP_CONTENT_EXPANSION.
typeset -g POWERLEVEL9K_VPN_IP_SHOW_ALL=false
# Custom icon.
# typeset -g POWERLEVEL9K_VPN_IP_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ ip: ip address and bandwidth usage for a specified network interface ]###########
# IP color.
typeset -g POWERLEVEL9K_IP_BACKGROUND=4
typeset -g POWERLEVEL9K_IP_FOREGROUND=0
# The following parameters are accessible within the expansion:
#
# Parameter | Meaning
# ----------------------+---------------
# P9K_IP_IP | IP address
# P9K_IP_INTERFACE | network interface
# P9K_IP_RX_BYTES | total number of bytes received
# P9K_IP_TX_BYTES | total number of bytes sent
# P9K_IP_RX_RATE | receive rate (since last prompt)
# P9K_IP_TX_RATE | send rate (since last prompt)
typeset -g POWERLEVEL9K_IP_CONTENT_EXPANSION='${P9K_IP_RX_RATE:+<$P9K_IP_RX_RATE }${P9K_IP_TX_RATE:+>$P9K_IP_TX_RATE }$P9K_IP_IP'
# Show information for the first network interface whose name matches this regular expression.
# Run `ifconfig` or `ip -4 a show` to see the names of all network interfaces.
typeset -g POWERLEVEL9K_IP_INTERFACE='[ew].*'
# Custom icon.
# typeset -g POWERLEVEL9K_IP_VISUAL_IDENTIFIER_EXPANSION='⭐'
#########################[ proxy: system-wide http/https/ftp proxy ]##########################
# Proxy color.
typeset -g POWERLEVEL9K_PROXY_FOREGROUND=4
typeset -g POWERLEVEL9K_PROXY_BACKGROUND=0
# Custom icon.
# typeset -g POWERLEVEL9K_PROXY_VISUAL_IDENTIFIER_EXPANSION='⭐'
################################[ battery: internal battery ]#################################
# Show battery in red when it's below this level and not connected to power supply.
typeset -g POWERLEVEL9K_BATTERY_LOW_THRESHOLD=20
typeset -g POWERLEVEL9K_BATTERY_LOW_FOREGROUND=1
# Show battery in green when it's charging or fully charged.
typeset -g POWERLEVEL9K_BATTERY_{CHARGING,CHARGED}_FOREGROUND=2
# Show battery in yellow when it's discharging.
typeset -g POWERLEVEL9K_BATTERY_DISCONNECTED_FOREGROUND=3
# Battery pictograms going from low to high level of charge.
typeset -g POWERLEVEL9K_BATTERY_STAGES=('battery')
# Don't show the remaining time to charge/discharge.
typeset -g POWERLEVEL9K_BATTERY_VERBOSE=false
typeset -g POWERLEVEL9K_BATTERY_BACKGROUND=0
typeset -g POWERLEVEL9K_BATTERY_VISUAL_IDENTIFIER_EXPANSION='🔋'
#####################################[ wifi: wifi speed ]#####################################
# WiFi color.
typeset -g POWERLEVEL9K_WIFI_FOREGROUND=0
typeset -g POWERLEVEL9K_WIFI_BACKGROUND=4
# Custom icon.
# typeset -g POWERLEVEL9K_WIFI_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Use different colors and icons depending on signal strength ($P9K_WIFI_BARS).
#
# # Wifi colors and icons for different signal strength levels (low to high).
# typeset -g my_wifi_fg=(0 0 0 0 0) # <-- change these values
# typeset -g my_wifi_icon=('WiFi' 'WiFi' 'WiFi' 'WiFi' 'WiFi') # <-- change these values
#
# typeset -g POWERLEVEL9K_WIFI_CONTENT_EXPANSION='%F{${my_wifi_fg[P9K_WIFI_BARS+1]}}$P9K_WIFI_LAST_TX_RATE Mbps'
# typeset -g POWERLEVEL9K_WIFI_VISUAL_IDENTIFIER_EXPANSION='%F{${my_wifi_fg[P9K_WIFI_BARS+1]}}${my_wifi_icon[P9K_WIFI_BARS+1]}'
#
# The following parameters are accessible within the expansions:
#
# Parameter | Meaning
# ----------------------+---------------
# P9K_WIFI_SSID | service set identifier, a.k.a. network name
# P9K_WIFI_LINK_AUTH | authentication protocol such as "wpa2-psk" or "none"; empty if unknown
# P9K_WIFI_LAST_TX_RATE | wireless transmit rate in megabits per second
# P9K_WIFI_RSSI | signal strength in dBm, from -120 to 0
# P9K_WIFI_NOISE | noise in dBm, from -120 to 0
# P9K_WIFI_BARS | signal strength in bars, from 0 to 4 (derived from P9K_WIFI_RSSI and P9K_WIFI_NOISE)
####################################[ time: current time ]####################################
# Current time color.
typeset -g POWERLEVEL9K_TIME_FOREGROUND=0
typeset -g POWERLEVEL9K_TIME_BACKGROUND=7
# Format for the current time: 09:51:02. See `man 3 strftime`.
typeset -g POWERLEVEL9K_TIME_FORMAT='%D{%H:%M:%S}'
# If set to true, time will update when you hit enter. This way prompts for the past
# commands will contain the start times of their commands as opposed to the default
# behavior where they contain the end times of their preceding commands.
typeset -g POWERLEVEL9K_TIME_UPDATE_ON_COMMAND=false
# Custom icon.
typeset -g POWERLEVEL9K_TIME_VISUAL_IDENTIFIER_EXPANSION=
# Custom prefix.
typeset -g POWERLEVEL9K_TIME_PREFIX='at '
# Example of a user-defined prompt segment. Function prompt_example will be called on every
# prompt if `example` prompt segment is added to POWERLEVEL9K_LEFT_PROMPT_ELEMENTS or
# POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS. It displays an icon and yellow text on red background
# greeting the user.
#
# Type `p10k help segment` for documentation and a more sophisticated example.
function prompt_example() {
p10k segment -b 1 -f 3 -i '*' -t 'hello, %n'
}
# User-defined prompt segments may optionally provide an instant_prompt_* function. Its job
# is to generate the prompt segment for display in instant prompt. See
# https://github.com/romkatv/powerlevel10k/blob/master/README.md#instant-prompt.
#
# Powerlevel10k will call instant_prompt_* at the same time as the regular prompt_* function
# and will record all `p10k segment` calls it makes. When displaying instant prompt, Powerlevel10k
# will replay these calls without actually calling instant_prompt_*. It is imperative that
# instant_prompt_* always makes the same `p10k segment` calls regardless of environment. If this
# rule is not observed, the content of instant prompt will be incorrect.
#
# Usually, you should either not define instant_prompt_* or simply call prompt_* from it. If
# instant_prompt_* is not defined for a segment, the segment won't be shown in instant prompt.
function instant_prompt_example() {
# Since prompt_example always makes the same `p10k segment` calls, we can call it from
# instant_prompt_example. This will give us the same `example` prompt segment in the instant
# and regular prompts.
prompt_example
}
# User-defined prompt segments can be customized the same way as built-in segments.
typeset -g POWERLEVEL9K_EXAMPLE_FOREGROUND=3
typeset -g POWERLEVEL9K_EXAMPLE_BACKGROUND=1
# typeset -g POWERLEVEL9K_EXAMPLE_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Transient prompt works similarly to the builtin transient_rprompt option. It trims down prompt
# when accepting a command line. Supported values:
#
# - off: Don't change prompt when accepting a command line.
# - always: Trim down prompt when accepting a command line.
# - same-dir: Trim down prompt when accepting a command line unless this is the first command
# typed after changing current working directory.
typeset -g POWERLEVEL9K_TRANSIENT_PROMPT=always
# Instant prompt mode.
#
# - off: Disable instant prompt. Choose this if you've tried instant prompt and found
# it incompatible with your zsh configuration files.
# - quiet: Enable instant prompt and don't print warnings when detecting console output
# during zsh initialization. Choose this if you've read and understood
# https://github.com/romkatv/powerlevel10k/blob/master/README.md#instant-prompt.
# - verbose: Enable instant prompt and print a warning when detecting console output during
# zsh initialization. Choose this if you've never tried instant prompt, haven't
# seen the warning, or if you are unsure what this all means.
typeset -g POWERLEVEL9K_INSTANT_PROMPT=verbose
# Hot reload allows you to change POWERLEVEL9K options after Powerlevel10k has been initialized.
# For example, you can type POWERLEVEL9K_BACKGROUND=red and see your prompt turn red. Hot reload
# can slow down prompt by 1-2 milliseconds, so it's better to keep it turned off unless you
# really need it.
typeset -g POWERLEVEL9K_DISABLE_HOT_RELOAD=true
# If p10k is already loaded, reload configuration.
# This works even with POWERLEVEL9K_DISABLE_HOT_RELOAD=true.
(( ! $+functions[p10k] )) || p10k reload
}
# Tell `p10k configure` which file it should overwrite.
typeset -g POWERLEVEL9K_CONFIG_FILE=${${(%):-%x}:a}
(( ${#p10k_config_opts} )) && setopt ${p10k_config_opts[@]}
'builtin' 'unset' 'p10k_config_opts'
|
<reponame>streamglider/streamglider
//
// StreamCastViewController.h
// StreamCast
//
// Created by <NAME> on 7/15/10.
// Copyright 2010 StreamGlider, Inc. All rights reserved.
//
// This program is free software if used non-commercially: you can redistribute it and/or modify
// it under the terms of the BSD 4 Clause License as published by
// the Free Software Foundation.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// BSD 4 Clause License for more details.
//
// You should have received a copy of the BSD 4 Clause License
// along with this program. If not, see the README.md file with this program.
#import <UIKit/UIKit.h>
#import "LoaderDelegate.h"
#import "PageBarViewController.h"
@class Stream;
@class StreamsTableViewController;
@class EditStreamViewController;
@class Frame;
@class SlideShowViewController;
@class PreviewViewController;
@class StartupAnimationViewController;
@interface StreamCastViewController : UIViewController <LoaderDelegate>
@property (nonatomic, retain) IBOutlet StreamsTableViewController *tableViewController;
@property (nonatomic, retain) IBOutlet EditStreamViewController *editViewController;
@property (nonatomic, retain) IBOutlet SlideShowViewController *slideShowViewController;
@property (nonatomic, retain, setter=setPreviewViewController:) IBOutlet PreviewViewController *previewViewController;
@property (nonatomic, retain) IBOutlet UIView *tableView;
@property (nonatomic, retain) IBOutlet UIView *tableViewContainer;
@property (assign) BOOL displayingPreview;
- (void)displayViewForFrame:(Frame*)frame;
- (void)playPreviewAnimation;
- (void)displayBrowserForFrame:(Frame*)frame;
- (void)displayBrowserForRequest:(NSURLRequest*)req;
- (IBAction)handleSharedStreamsTapped;
- (IBAction)handleEditTapped;
- (void)updateButtons;
- (void)pause;
- (void)resume;
@end
|
#!/bin/bash
#======================================
# Functions...
#--------------------------------------
test -f /.kconfig && . /.kconfig
test -f /.profile && . /.profile
#======================================
# Greeting...
#--------------------------------------
echo "Configure image: [$kiwi_iname]..."
#======================================
# Imitate upstream image:
# velero/velero
# same file structure
#--------------------------------------
ln -sf /usr/bin/velero /velero
exit 0
|
<filename>test/ajax.tests.js
describe('CHIM AJAX Service', () => {
const assert = chai.assert;
let field1 = 'field1',
field2 = 'field2',
value1 = 'value1',
value2 = 'value2';
const formData = [
{ name: field1, value: value1 },
{ name: field2, value: value2 }
];
const jsonData = {
field1: value1,
field2: value2
};
/**
* http.bin.org/VERB sends back your VERB as the response data
*/
it('sends a GET request', (done) => {
chim.go.get('http://httpbin.org/get', (response) => {
const getRequest = response;
assert.isDefined(getRequest);
assert.isEmpty(getRequest.args);
done();
});
});
it('fails to send a GET request if no URL is provided', (done) => {
try {
chim.go.get();
} catch (err) {
assert.isDefined(err);
done();
}
});
it('sends a GET request with data URL-encoded', (done) => {
chim.go.get('http://httpbin.org/get', formData, (response) => {
const getRequest = response;
assert.isDefined(getRequest);
assert.hasAllKeys(getRequest.args, [field1, field2]);
done();
});
});
it('sends a GET request for a plain text file', (done) => {
chim.go.get('http://httpbin.org/robots.txt', (textFileContents) => {
assert.isDefined(textFileContents);
done();
});
});
it('sends a GET request for an HTML document', (done) => {
chim.go.get('http://httpbin.org/html', (html) => {
assert.isDefined(html);
assert.isTrue(html instanceof HTMLDocument);
done();
}, 'document');
});
it('sends a GET request for an image in binary format (blob)', (done) => {
chim.go.get('http://httpbin.org/image/png', (image) => {
assert.isDefined(image);
done();
});
});
it('sends a POST request with form data URL-encoded', (done) => {
chim.go.post('http://httpbin.org/post', formData, (response) => {
const postRequest = response;
assert.isDefined(postRequest);
assert.hasAllKeys(postRequest.args, [field1, field2]);
done();
});
});
it('sends a POST request with data in JSON format', (done) => {
chim.go.post('http://httpbin.org/post', jsonData, (response) => {
const postRequestWithData = response;
assert.isDefined(postRequestWithData);
assert.hasAllKeys(JSON.parse(postRequestWithData.data), [field1, field2]);
done();
});
});
it('sends a PUT request with data in JSON format', (done) => {
chim.go.put('http://httpbin.org/put', jsonData, (response) => {
const putRequest = response;
assert.isDefined(putRequest);
assert.hasAllKeys(JSON.parse(putRequest.data), [field1, field2]);
done();
});
});
it('fails when server returns 3xx error code', (done) => {
chim.go.get('http://httpbin.org/status/304', (response) => {
assert.isUndefined(response);
done();
});
});
it('fails when server returns 4xx error code', (done) => {
chim.go.get('http://httpbin.org/status/404', (response) => {
assert.isUndefined(response);
done();
});
});
it('fails when server returns 5xx error code', (done) => {
chim.go.get('http://httpbin.org/status/504', (response) => {
assert.isUndefined(response);
done();
});
});
}); |
def sumArray(arr):
if not arr:
return 0
else:
return arr[0] + sumArray(arr[1:])
print(sumArray(arr)) |
package weixin.lottery.entity;
import org.jeecgframework.core.common.entity.IdEntity;
import javax.persistence.*;
import java.util.Date;
/**
* 系统活动父类表
* Created by aa on 2016/1/21.
*/
@Entity
@Table(name = "weixin_commonforhd")
@Inheritance(strategy = InheritanceType.JOINED)
public class WeixinCommonforhdEntity extends IdEntity implements java.io.Serializable {
private static final long serialVersionUID = 1L;
// /**
// * 主键
// */
// private java.lang.String id;
/**
* 创建人名称
*/
private java.lang.String createName;
/**
* 创建日期
*/
private java.util.Date createDate;
/**
* 活动名称
*/
private java.lang.String title;
/**
* 活动描述
*/
private java.lang.String description;
/**
* 开始时间
*/
private java.util.Date starttime;
/**
* 结束时间
*/
private java.util.Date endtime;
/**
* 微信公众号
*/
private java.lang.String accountid;
/**
* 流量类型
*/
private String flowtype; //1:全国流量 2:省内流量
@Column(name = "CREATENAME", nullable = false, length = 50)
public String getCreateName() {
return createName;
}
public void setCreateName(String createName) {
this.createName = createName;
}
@Column(name = "CREATEDATE", nullable = false, length = 36)
public Date getCreateDate() {
return createDate;
}
public void setCreateDate(Date createDate) {
this.createDate = createDate;
}
@Column(name = "TITLE", nullable = false, length = 50)
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
@Column(name = "DESCRIPTION", nullable = false, length = 4000)
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
@Column(name = "STARTTIME", nullable = false, length = 36)
public Date getStarttime() {
return starttime;
}
public void setStarttime(Date starttime) {
this.starttime = starttime;
}
@Column(name = "ENDTIME", nullable = false, length = 36)
public Date getEndtime() {
return endtime;
}
public void setEndtime(Date endtime) {
this.endtime = endtime;
}
@Column(name = "ACCOUNTID", nullable = false, length = 36)
public String getAccountid() {
return accountid;
}
public void setAccountid(String accountid) {
this.accountid = accountid;
}
@Column(name = "FLOWTYPE", nullable = false, length = 36)
public String getFlowtype() {
return flowtype;
}
public void setFlowtype(String flowtype) {
this.flowtype = flowtype;
}
}
|
export containerId=$(docker ps -l -q)
xhost +local:`docker inspect --format='{{ .Config.Hostname }}' $containerId`
docker start $containerId |
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-shuffled/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-shuffled/1024+0+512-shuffled-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_first_two_thirds_sixth --eval_function penultimate_sixth_eval |
#ifndef CGEN_H_
#define CGEN_H_
#include <stdlib.h>
#include <string.h>
#include <stdio.h>
#include <ctype.h>
#include <stdarg.h>
#include "schema.h"
#define CJOB_FPRINTF(...) do { if (fprintf(__VA_ARGS__) < 0) \
return CJOB_IO_ERROR; } while (0)
#define CJOB_FMT_HEADER_STRING(job, ...) do \
{ if (add_header_string(job, strformat(__VA_ARGS__)) \
!= CJOB_SUCCESS) \
return CJOB_MEM_ERROR; } while (0)
#define CJOB_FMT_HEADER_BOTTOM_STRING(job, ...) do \
{ if (add_header_bottom_string(job, \
strformat(__VA_ARGS__)) != CJOB_SUCCESS) \
return CJOB_MEM_ERROR; } while (0)
#define CJOB_FMT_SOURCE_STRING(job, ...) do \
{ if (add_source_string(job, strformat(__VA_ARGS__)) \
!= CJOB_SUCCESS) \
return CJOB_MEM_ERROR; } while (0)
#define CJOB_FMT_PUB_FUNCTION(job, ...) do \
{ if (add_public_function(job, strformat(__VA_ARGS__)) \
!= CJOB_SUCCESS) \
return CJOB_MEM_ERROR; } while (0)
#define CJOB_FMT_PRIV_FUNCTION(job, ...) do \
{ if (add_private_function(job, strformat(__VA_ARGS__)) \
!= CJOB_SUCCESS) \
return CJOB_MEM_ERROR; } while (0)
/* Main entry point for the C compiler. The main point of interest here is
the cgen_main() function, which accepts as its input the `argv` and `argc`
that were passed to the main() function. This function processes the
command-line arguments and contructs a "CJob", which is a data structure
used to capture arbitrary compilation jobs requested at the command line.
If all the command-line options could be successfully parsed, and
there were no compile-time errors relating to the processing of the schema,
the CJob is then "run" by a static function called "run_cjob".
In sum, a compiled Haris library is made up, roughly, of two parts:
1) The core library. This body of code is largely unchanging, and
contains functions that can be used to construct in-memory C structures,
destroy these same in-memory C structures, and convert between these
structures and small in-memory buffers. In short, the library contains
the code that's necessary for a Haris runtime to work, no matter what
protocol we're generating.
2) The protocol library (libraries). This section builds off of the
core library, and contains the code that will transmit Haris messages
along the chosen protocol.
Information about the content and implementation of these libraries can
be found in the relevant headers.
*/
typedef enum {
CJOB_SUCCESS, CJOB_SCHEMA_ERROR, CJOB_JOB_ERROR, CJOB_IO_ERROR,
CJOB_MEM_ERROR, CJOB_PARSE_ERROR
} CJobStatus;
typedef struct {
int num_strings;
int strings_alloc;
char **strings;
} CJobStringStack;
/* A structure that is used to organize the output of a C compilation job.
In fact, only one function
actually writes the output to the output files; the rest of the functions
store strings in this data structure, which are written out to disk later.
There are four stacks here; which stack you store a string in decides
A) which file it is written to. Strings in the header_strings stack are
written to the header file; the rest are written to the source file.
B) What, if any, action should be taken with the content of the strings.
If you store a string in the public_ or private_function stacks, then a
prototype will be adapted from the function definition you've given and
written to the correct place in either the header file or the source file.
The advantage of using an additional structure is to make it easier to
extend the compiler or modify its behavior.
The strings that are stored MUST be dynamically allocated, as they will
all be `free`d when the time comes to destroy the CJob. In general, you
should not try to access a string at all once you push it onto the stack;
that is, if you have a char *s = ..., and you push s onto one of
the stacks, you should never try to access s again, as it must be assumed
to be immediately invalidated. The library will free it later. If you must
continue to access it after pushing it onto the stack, push a copy
(that is, push strdup(s)).
*/
typedef struct {
CJobStringStack header_strings; /* Strings that will be copied verbatim into
the header file */
CJobStringStack header_bottom_strings; /* Strings that will be copied
verbatim into the header file, but
at the bottom of the file, after
the function declarations */
CJobStringStack source_strings; /* Strings to copy into the .c file */
CJobStringStack public_functions; /* Functions that are part of the public
interface of the library */
CJobStringStack private_functions; /* Functions that are statically
defined */
} CJobStrings;
typedef struct {
int buffer;
int file;
int fd;
} CJobProtocols;
typedef struct {
ParsedSchema *schema; /* The schema to be compiled */
const char *prefix; /* Prefix all global names with this string */
const char *output; /* Write the output code to a file with this name */
CJobProtocols protocols;
CJobStrings strings; /* The strings that we will copy into the result source
and header files; this is built up dynamically at
compile time */
} CJob;
/* The entry point for the C compiler (same arguments as the true main
function). */
CJobStatus cgen_main(int, char **);
/* A collection of public functions that are used by more than one of the
source files of the C compiler. */
CJobStatus add_header_string(CJob *, char *);
CJobStatus add_header_bottom_string(CJob *, char *);
CJobStatus add_source_string(CJob *, char *);
CJobStatus add_public_function(CJob *, char *);
CJobStatus add_private_function(CJob *, char *);
/* A loose wrapper around asprintf; consumes a format string and a set of
parameters and writes it out to a new dynamically allocated string.
Returns either that new string or NULL if there was a memory or format
error. */
char *strformat(const char *, ...);
int child_is_embeddable(const ChildField *);
int scalar_bit_pattern(ScalarTag type);
int sizeof_scalar(ScalarTag type);
const char *scalar_type_name(ScalarTag);
#endif
|
<reponame>antoniny/codenation-central-error
package com.challenge.service.dto;
import com.challenge.entity.Role;
import com.challenge.entity.User;
import com.fasterxml.jackson.annotation.JsonFormat;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.time.LocalDateTime;
@Data
@NoArgsConstructor
public class UserDto {
private String name;
private String email;
private String status;
private String roleName;
@JsonFormat(pattern = "yyyy-MM-dd HH:mm")
private LocalDateTime createdAt;
public UserDto(User user) {
this.name = user.getName();
this.email = user.getEmail();
this.roleName = user.getRoles().stream().map(Role::getName).findAny().orElse(null);
this.createdAt = user.getCreatedAt();
this.status = StatusUserEnum.fromId(user.getStatus()).getDescription();
}
}
|
<filename>src/main/java/moe/mkx/uimf/groupbuilder/model/LoginUser.java
package moe.mkx.uimf.groupbuilder.model;
import com.fasterxml.jackson.annotation.JsonProperty;
import javax.validation.constraints.NotBlank;
import java.util.UUID;
public class LoginUser {
private final UUID userID;
@NotBlank
private final String username;
@NotBlank
private final String password;
@NotBlank
private final String email;
public LoginUser(@JsonProperty("id") UUID userID,
@JsonProperty("name") String username,
@JsonProperty("password") String password,
@JsonProperty("email") String email) {
this.userID = userID;
this.username = username;
this.password = password;
this.email = email;
}
public UUID getUserID(){
return userID;
}
public String getUsername(){
return username;
}
public String getPassword() {
return password;
}
public String getEmail() {
return email;
}
}
|
#pragma once
#include "ZZX/Core/Core.h"
namespace ZZX
{
enum class FrameBufferTextureFormat
{
None = 0,
// color
RGBA8,
RED_INTEGER,
// depth/stencil
DEPTH24STENCIL8,
// defaults
Depth = DEPTH24STENCIL8
};
struct FrameBufferTextureSpecification
{
FrameBufferTextureSpecification() = default;
FrameBufferTextureSpecification(FrameBufferTextureFormat format)
: TextureFormat(format)
{}
FrameBufferTextureFormat TextureFormat = FrameBufferTextureFormat::None;
// TODO: filtering/wrap
};
struct FramebufferAttachmentSpecification
{
FramebufferAttachmentSpecification() = default;
FramebufferAttachmentSpecification(std::initializer_list<FrameBufferTextureSpecification> attachments)
: Attachments(attachments)
{}
std::vector<FrameBufferTextureSpecification> Attachments;
};
struct FramebufferSpecification
{
uint32_t Width, Height;
FramebufferAttachmentSpecification Attachments;
uint32_t Samples = 1;
bool SwapChainTarget = false;
};
class Framebuffer
{
public:
virtual ~Framebuffer() = default;
virtual void Bind() = 0;
virtual void Unbind() = 0;
virtual void Resize(uint32_t w, uint32_t h) = 0;
virtual int ReadPixel(uint32_t attachmentIndex, int x, int y) = 0;
virtual void ClearAttachment(uint32_t attachmentIndex, int value) = 0;
virtual const FramebufferSpecification& GetSpecification() const = 0;
virtual uint32_t GetColorAttachmentRendererID(uint32_t index = 0) const = 0;
static Ref<Framebuffer> Create(const FramebufferSpecification& spec);
};
}
|
DJANGO_SETTINGS_MODULE=social_distribution.test_settings python3 manage.py test |
#!/usr/bin/env bash
# buildah-bud-demo.sh
# author : ipbabble
# Assumptions install buildah, podman & docker
# Do NOT start the docker deamon
# Set some of the variables below
demoimg=buildahbuddemo
quayuser=ipbabble
myname="William Henry"
distro=fedora
distrorelease=28
pkgmgr=dnf # switch to yum if using yum
#Setting up some colors for helping read the demo output
red=$(tput setaf 1)
green=$(tput setaf 2)
yellow=$(tput setaf 3)
blue=$(tput setaf 4)
cyan=$(tput setaf 6)
reset=$(tput sgr0)
echo -e "Using ${green}GREEN${reset} to introduce Buildah steps"
echo -e "Using ${yellow}YELLOW${reset} to introduce code"
echo -e "Using ${blue}BLUE${reset} to introduce Podman steps"
echo -e "Using ${cyan}CYAN${reset} to introduce bash commands"
echo -e "Using ${red}RED${reset} to introduce Docker commands"
echo -e "Building an image called ${demoimg}"
read -p "${green}Start of the script${reset}"
set -x
DOCKERFILE=./Dockerfile
/bin/cat <<EOM >$DOCKERFILE
FROM docker://docker.io/fedora:latest
MAINTAINER ${myname}
RUN dnf -y update; dnf -y clean all
RUN dnf -y install nginx --setopt install_weak_deps=false; dnf -y clean all
RUN echo "daemon off;" >> /etc/nginx/nginx.conf
RUN echo "nginx on Fedora" > /usr/share/nginx/html/index.html
EXPOSE 80
CMD [ "/usr/sbin/nginx" ]
EOM
read -p "${cyan}Display the Dockerfile:${reset}"
cat $DOCKERFILE
read -p "${green}Create a new container image from Dockerfile${reset}"
buildah bud -t $demoimg .
read -p "${green}List the images we have.${reset}"
buildah images
read -p "${green}Inspect the container image meta data${yellow}"
buildah inspect --type image $demoimg
read -p "${blue}Run the container using Podman.${reset}"
containernum=$(podman run -d -p 80:80 $demoimg)
read -p "${cyan}Check that nginx is up and running with our new page${reset}"
curl localhost
read -p "${blue}Stop the container and rm it${reset}"
podman ps
podman stop $containernum
podman rm $containernum
read -p "${cyan}Check that nginx is down${reset}"
curl localhost
read -p "${cyan}Start the Docker daemon. Using restart incase it is already started${reset}"
systemctl restart docker
read -p "${red}List the Docker images in the repository - should be empty${reset}"
docker images
read -p "${blue}Push the image to the local Docker repository using docker-daemon${reset}"
podman push $demoimg docker-daemon:$quayuser/${demoimg}:latest
read -p "${red}List the Docker images in the repository${reset}"
docker images
read -p "${red}Start the container from the new Docker repo image${reset}"
dockercontainer=$(docker run -d -p 80:80 $quayuser/$demoimg)
read -p "${cyan}Check that nginx is up and running with our new page${reset}"
curl localhost
read -p "${red}Stop the container and remove it and the image${reset}"
docker stop $dockercontainer
docker rm $dockercontainer
docker rmi $demoimg
read -p "${cyan}Stop Docker${reset}"
systemctl stop docker
echo -e "${red}We are done!${reset}" |
<reponame>s00d/webpack-notifier
import {join} from 'path';
import {contentImageSerializer, reduceArraySerializer, testChangesFlow as _testChangesFlow, PartialTestArguments} from './helpers/utils';
import CustomWarningPlugin from './helpers/CustomWarningPlugin';
import ChildCompilationPlugin from './helpers/ChildCompilationPlugin';
expect.addSnapshotSerializer(reduceArraySerializer);
expect.addSnapshotSerializer(contentImageSerializer);
describe.each([
['1', require('webpack-1/package.json').version, require('webpack-1')],
['2', require('webpack-2/package.json').version, require('webpack-2')],
['3', require('webpack-3/package.json').version, require('webpack-3')],
['4', require('webpack-4/package.json').version, require('webpack-4')],
['5', require('webpack-5/package.json').version, require('webpack-5')],
['latest', require('webpack-latest/package.json').version, require('webpack-latest')],
])('webpack@%s', (name, webpackVersion, webpack) => {
const testChangesFlow = (...args: PartialTestArguments) => _testChangesFlow(webpackVersion, webpack, ...args);
describe('WebpackNotifierPlugin', () => {
describe('one compilation', () => {
test.each([
[['successful'], undefined],
[['error'], undefined],
[['warning'], undefined],
])('%j %j', testChangesFlow);
});
describe('title', () => {
test.each([
[['successful'], {title: 'Webpack'}],
[['successful'], {title}],
[['error'], {title}],
[['warning'], {title}],
])('%j %j', testChangesFlow);
function title({msg}: {msg: string}) {
if (msg.startsWith('Error')) return 'build error ❌';
if (msg.startsWith('Warning')) return 'build warning ⚠️';
return 'build complete ✅';
}
describe('new title function API', () => {
test.each([
[['successful'], {title: 'Webpack'}],
[['successful'], {title}],
[['error'], {title}],
[['warning'], {title}],
])('%j %j', testChangesFlow);
function title(params: {status: string, message: string}) {
return `Build status is ${params.status} with message ${params.message}`;
}
});
});
describe('emoji message', () => {
test.each<PartialTestArguments>([
[['successful'], {emoji: true}],
[['error'], {emoji: true}],
[['warning'], {emoji: true}],
[['successful'], {emoji: true}, {plugins: [new CustomWarningPlugin()]}],
])('%j %j %j', testChangesFlow);
});
describe('contentImage', () => {
const contentImage = {
success: join(__dirname, '../successImage.png'),
warning: join(__dirname, '../warningsImage.png'),
error: join(__dirname, '../errorsImage.png')
};
test.each([
[['successful'], {
contentImage: join(__dirname, '../another-logo.png')
}],
])('%j {contentImage: "../another-logo.png"}', testChangesFlow);
test.each([
[['successful'], {contentImage}],
[['error'], {contentImage}],
[['warning'], {contentImage}],
])('%j {contentImage: {success: "../successImage.png"}, error: "../errorImage.png"}, warning: "../warningImage.png"}}', testChangesFlow);
});
describe('verbosity level configuration', () => {
describe('Default', () => {
test.each([
[['successful', 'successful', 'successful'], undefined],
[['error', 'error', 'successful'], undefined],
])('%j %j', testChangesFlow);
});
describe('Exclude Warnings', () => {
test.each([
[['warning'], {excludeWarnings: true}],
])('%j %j', testChangesFlow);
});
describe('Always Notify', () => {
test.each([
[['successful', 'successful'], {alwaysNotify: true}],
])('%j %j', testChangesFlow);
});
describe('Notify on error', () => {
test.each([
[['successful', 'warning', 'error'], {onlyOnError: true}],
])('%j %j', testChangesFlow);
});
describe('Skip Notification on the First Build', () => {
test.each([
[['successful', 'successful'], {skipFirstNotification: true}],
])('%j %j', testChangesFlow);
});
});
describe('custom warning', () => {// TODO maybe deprecated
test.each([
[['successful'], undefined, {plugins: [new CustomWarningPlugin()]}],
])('%j %j', testChangesFlow);
});
describe('child compilation errors', () => {
test.each([
[['successful'], undefined, {plugins: [new ChildCompilationPlugin('Warning')]}],
[['successful'], undefined, {plugins: [new ChildCompilationPlugin('Error')]}],
[['successful'], undefined, {plugins: [new ChildCompilationPlugin(), new ChildCompilationPlugin('Warning')]}],
])('%j %j %j', testChangesFlow, 10e3);
});
});
}); |
const express = require('express');
const mysql = require('mysql');
const parser = require('body-parser');
const connection = mysql.createConnection({
host: 'localhost',
user: 'root',
password: '',
database: 'mydb'
});
connection.connect(function(err) {
if (err) throw err;
console.log('Connected to the database!');
});
const app = express();
app.set('port', process.env.PORT || 8080);
app.use(parser.json());
app.post('/users', (req, res) => {
let name = req.body.name;
let email = req.body.email;
let sql = `INSERT INTO users (name, email) VALUES ('${name}', '${email}')`;
connection.query(sql, (err, result) => {
if (err) throw err;
res.status(201).send({
message: `User added with ID ${result.insertId}`
});
});
});
app.put('/users/:id', (req, res) => {
let id = req.params.id;
let name = req.body.name;
let email = req.body.email;
let sql = `UPDATE users SET name='${name}', email='${email}' WHERE id=${id}`;
connection.query(sql, (err, result) => {
if (err) throw err;
res.send({
message: `User updated with ID ${id}`
});
});
});
app.delete('/users/:id', (req, res) => {
let id = req.params.id;
let sql = `DELETE FROM users WHERE id=${id}`;
connection.query(sql, (err, result) => {
if (err) throw err;
res.send({
message: `User deleted with ID ${id}`
});
});
});
const server = app.listen(app.get('port'), () => {
console.log(`Express running → PORT ${server.address().port}`);
}); |
import { IGeofenceGroup } from "@/models/IGeofenceGroup";
import { IPictogram } from "@/models/IPictogram";
import { IStand } from "@/models/IStand";
import { ITerminalResource } from "@/models/ITerminalResource";
import { IVehicle } from "@/models/IVehicle";
class BaseModelDataService {
private _pictogramData = require('../assets/Pictograms.json');
private _standData = require('../assets/Stands.json');
private _terminalResourceData = require('../assets/TerminalResources.json');
private _vehicleData = require('../assets/Vehicles.json');
private _geofenceGroupsRawData = require('../assets/Geofences.json');
private _geofenceGroups: IGeofenceGroup[] = [];
private _geofencesInitialized = false;
public getPictrograms = async (): Promise<IPictogram[]> => {
return this._pictogramData.data.NativePictograms;
};
public getTerminalResources = async (): Promise<ITerminalResource[]> => {
return this._terminalResourceData.data.TerminalResources;
};
public getVehicles = async (): Promise<IVehicle[]> => {
return this._vehicleData.data.Vehicles;
};
public getStands = async (): Promise<IStand[]> => {
return this._standData.data.Stands;
};
public getGeofenceGroups = async (): Promise<IGeofenceGroup[]> => {
if (!this._geofencesInitialized) {
this._geofencesInitialized = true;
this._geofenceGroups = this._geofenceGroupsRawData.DataMessage.map(geofencegroup => {
return {
name: geofencegroup.Name,
visibleAtStartup: geofencegroup.VisibleAtStartup,
geofences: geofencegroup.GeoFences.map(geofence => {
return {
name: geofence.Name,
isClosed: geofence.IsClosed,
id: geofence.Id,
hasLocation: geofence.HasLocation,
geoFencePoints: geofence.GeoFencePoints.map(point => {
return {
latitude: point.Latitude,
longitude: point.Longitude,
}
})
}
})
}
});
}
return this._geofenceGroups;
};
}
export default new BaseModelDataService(); |
#!/bin/sh
set -eu
clang++ -fbracket-depth=999999 -march=native -mtune=native -std=gnu++11 -O3 -flto -fuse-ld=lld -fomit-frame-pointer -fwrapv -Wno-attributes -fno-strict-aliasing -Da24_hex='0x3039' -Da24_val='12345' -Da_minus_two_over_four_array='{0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x30,0x39}' -Dbitwidth='32' -Dlimb_weight_gaps_array='{26,25,25,26,25,25}' -Dmodulus_array='{0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xef}' -Dmodulus_bytes_val='19' -Dmodulus_limbs='6' -Dq_mpz='(1_mpz<<152) - 17' "$@"
|
public double calculate(double x) {
return Math.pow(x, 2) + (3 * x) + 2;
} |
function install_xcode_cli {
echo "Installing Xcode CLI tools..."
xcode-select --install
}
function install_brew {
echo "Installing Homebrew..."
if !(hash brew 2>/dev/null); then
ruby \
-e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" \
</dev/null
brew doctor
else
echo "Brew was already installed, upgrading"
brew update
brew upgrade
fi
}
function install_brew_cask {
echo "Installing Homebrew Cask..."
brew cask > /dev/null 2>&1;
if [ $? -ne 0 ]; then
brew tap homebrew/cask
brew doctor
else
echo "Brew cask was already installed, upgrading"
brew update
brew upgrade
fi
brew tap homebrew/cask-versions
brew tap homebrew/cask-drivers
brew tap homebrew/cask-fonts
brew tap fishtown-analytics/dbt
}
function setup_brew {
echo "Setting up brew..."
install_brew
install_brew_cask
}
function install_brew_deps {
echo "Installing brew dependencies..."
cat OSX/brew-requirements.txt | xargs brew install
brew cleanup
brew doctor
}
function install_brew_cask_deps {
echo "Installing brew cask dependencies..."
cat OSX/cask-requirements.txt | xargs brew install --force
brew cleanup
brew doctor
}
function install_npm_globals {
echo "Installing npm globals... using yarn."
if hash yarn 2>/dev/null; then
cat OSX/npm-global-requirements.txt | xargs sudo yarn global add
fi
}
function install_python_globals {
echo "Installing python globals..."
cat OSX/python-global-requirements.txt | xargs sudo easy_install
}
function install_dotfiles {
echo "Installing dotfiles"
# Copy boilerplate bash profile and init settings
test -f ~/.zshrc || `cp zshrc ~/.zshrc && source ~/.zshrc`
cd $ZSH_CUSTOM/plugins && git clone https://github.com/chrissicool/zsh-256color && cd
rm -rf ${ZSH_CUSTOM:-~/.oh-my-zsh/custom}/plugins/zsh-autosuggestions && git clone https://github.com/zsh-users/zsh-autosuggestions ${ZSH_CUSTOM:-~/.oh-my-zsh/custom}/plugins/zsh-autosuggestions
}
function setup_mac {
echo "---> Enable full keyboard access for all controls (e.g. enable Tab in modal dialogs)"
defaults write NSGlobalDomain AppleKeyboardUIMode -int 3 2>/dev/null
echo "---> Set a blazingly fast trackpad speed"
defaults write -g com.apple.trackpad.scaling -int 5 2>/dev/null
echo "---> Automatically illuminate built-in MacBook keyboard in low light"
defaults write com.apple.BezelServices kDim -bool true 2>/dev/null
echo "---> Turn off keyboard illumination when computer is not used for 5 minutes"
defaults write com.apple.BezelServices kDimTime -int 300 2>/dev/null
echo "---> Disable the warning when changing a file extension"
defaults write com.apple.finder FXEnableExtensionChangeWarning -bool false 2>/dev/null
}
#Manually copy needed files such as ssh if present
echo "---> Ask for the administrator password upfront"
sudo -v
# Keep-alive: update existing `sudo` time stamp until finished
while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null &
setup_mac
install_dotfiles
install_xcode_cli
setup_brew
install_brew_cask_deps
install_brew_deps
install_npm_globals
install_python_globals |
#!/bin/bash
INDIR=~/kostka-dir/Will_testing-metagenome-assemblers/DeepC_Metagenomes_Mason/trimmed2/size_filtered/paired_ends/CoupledReads/fasta_files
for FILE in $(find $INDIR -type f -name "*sam");
do
qsub -v INFILE=$FILE /nv/hp10/woverholt3/job_scripts/metagenome_scripts/multiple_qsub_sam2bam.pbs
done
|
class Statistics::AverageBidsPerAuction
def to_s
Average.new(
completed_auctions.map(&:bids).flatten.count,
completed_auctions.count
).to_s
end
private
def completed_auctions
@_completed_auctions ||= AuctionQuery.new.completed
end
end
|
package org.odk.collect.geo;
import static android.app.Activity.RESULT_OK;
import static android.provider.Settings.ACTION_LOCATION_SOURCE_SETTINGS;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.odk.collect.geo.Constants.EXTRA_RETAIN_MOCK_ACCURACY;
import static org.robolectric.Shadows.shadowOf;
import android.app.Application;
import android.content.Intent;
import android.location.Location;
import androidx.lifecycle.Lifecycle;
import androidx.test.core.app.ActivityScenario;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.odk.collect.externalapp.ExternalAppUtils;
import org.odk.collect.location.LocationClient;
import org.odk.collect.location.LocationClientProvider;
import org.robolectric.shadows.ShadowApplication;
@RunWith(AndroidJUnit4.class)
public class GeoPointActivityTest {
LocationClient locationClient = mock(LocationClient.class);
@Before
public void setUp() throws Exception {
ShadowApplication shadowApplication = shadowOf(ApplicationProvider.<Application>getApplicationContext());
shadowApplication.grantPermissions("android.permission.ACCESS_FINE_LOCATION");
shadowApplication.grantPermissions("android.permission.ACCESS_COARSE_LOCATION");
LocationClientProvider.setTestClient(locationClient);
}
@Test
public void testLocationClientLifecycle() {
Intent intent = new Intent(ApplicationProvider.getApplicationContext(), GeoPointActivity.class);
intent.putExtra(GeoPointActivity.EXTRA_ACCURACY_THRESHOLD, 5.0);
ActivityScenario<GeoPointActivity> scenario = ActivityScenario.launch(intent);
// Activity.onResume() should call LocationClient.start().
verify(locationClient).start();
when(locationClient.isLocationAvailable()).thenReturn(true);
when(locationClient.getLastLocation()).thenReturn(mock(Location.class));
// Make sure we're requesting updates and logging our previous location:
scenario.onActivity(activity -> {
activity.onClientStart();
verify(locationClient).requestLocationUpdates(activity);
verify(locationClient).getLastLocation();
});
// Simulate the location updating:
Location firstLocation = mock(Location.class);
when(firstLocation.getAccuracy()).thenReturn(0.0f);
scenario.onActivity(activity -> {
activity.onLocationChanged(firstLocation);
// First update should never result in a selected point to avoid network location bug:
assertFalse(activity.isFinishing());
assertThat(activity.getDialogMessage(), containsString(activity.getAccuracyMessage(firstLocation)));
});
// Second update with poor accuracy should change dialog message:
float poorAccuracy = (float) 6.0;
Location secondLocation = mock(Location.class);
when(secondLocation.getAccuracy()).thenReturn(poorAccuracy);
scenario.onActivity(activity -> {
activity.onLocationChanged(secondLocation);
assertFalse(activity.isFinishing());
assertThat(activity.getDialogMessage(), containsString(activity.getAccuracyMessage(secondLocation)));
});
// Third location with good accuracy should change dialog and finish activity.
float goodAccuracy = (float) 4.0;
Location thirdLocation = mock(Location.class);
when(thirdLocation.getAccuracy()).thenReturn(goodAccuracy);
scenario.onActivity(activity -> {
activity.onLocationChanged(thirdLocation);
assertTrue(activity.isFinishing());
assertThat(activity.getDialogMessage(), containsString(activity.getAccuracyMessage(thirdLocation)));
});
assertEquals(scenario.getResult().getResultCode(), RESULT_OK);
Intent resultIntent = scenario.getResult().getResultData();
String resultString = ExternalAppUtils.getReturnedSingleValue(resultIntent);
scenario.onActivity(activity -> {
assertEquals(resultString, activity.getResultStringForLocation(thirdLocation));
});
}
@Test
public void activityShouldOpenSettingsIfLocationUnavailable() {
when(locationClient.isLocationAvailable()).thenReturn(false);
Intent intent = new Intent(ApplicationProvider.getApplicationContext(), GeoPointActivity.class);
ActivityScenario<GeoPointActivity> scenario = ActivityScenario.launch(intent);
scenario.onActivity(activity -> {
activity.onClientStart();
assertTrue(activity.isFinishing());
Intent nextStartedActivity = shadowOf(activity).getNextStartedActivity();
assertEquals(nextStartedActivity.getAction(), ACTION_LOCATION_SOURCE_SETTINGS);
});
}
@Test
public void activityShouldOpenSettingsIfLocationClientCantConnect() {
Intent intent = new Intent(ApplicationProvider.getApplicationContext(), GeoPointActivity.class);
ActivityScenario<GeoPointActivity> scenario = ActivityScenario.launch(intent);
scenario.onActivity(activity -> {
activity.onClientStartFailure();
assertTrue(activity.isFinishing());
Intent nextStartedActivity = shadowOf(activity).getNextStartedActivity();
assertEquals(nextStartedActivity.getAction(), ACTION_LOCATION_SOURCE_SETTINGS);
});
}
@Test
public void activityShouldShutOffLocationClientWhenItPauses() {
Intent intent = new Intent(ApplicationProvider.getApplicationContext(), GeoPointActivity.class);
ActivityScenario<GeoPointActivity> scenario = ActivityScenario.launch(intent);
verify(locationClient).start();
scenario.moveToState(Lifecycle.State.STARTED);
verify(locationClient).stop();
}
@Test
public void setsLocationClientRetainMockAccuracyToFalse() {
Intent intent = new Intent(ApplicationProvider.getApplicationContext(), GeoPointActivity.class);
ActivityScenario.launch(intent);
verify(locationClient).setRetainMockAccuracy(false);
}
@Test
public void passingRetainMockAccuracyExtra_showSetItOnLocationClient() {
Intent intent = new Intent(ApplicationProvider.getApplicationContext(), GeoPointActivity.class);
intent.putExtra(EXTRA_RETAIN_MOCK_ACCURACY, true);
ActivityScenario.launch(intent);
verify(locationClient).setRetainMockAccuracy(true);
intent.putExtra(EXTRA_RETAIN_MOCK_ACCURACY, false);
ActivityScenario.launch(intent);
verify(locationClient).setRetainMockAccuracy(false);
}
}
|
<gh_stars>0
package com.zhcs.service;
import com.zhcs.entity.InsureEntity;
import java.util.List;
import java.util.Map;
//*****************************************************************************
/**
* <p>Title:InsureService</p>
* <p>Description: 保险管理</p>
* <p>Copyright: Copyright (c) 2017</p>
* <p>Company: 深圳市智慧城市管家信息科技有限公司 </p>
* @author 刘晓东 - Alter
* @version v1.0 2017年2月23日
*/
//*****************************************************************************
public interface InsureService {
InsureEntity queryObject(Long id);
List<InsureEntity> queryList(Map<String, Object> map);
List<InsureEntity> queryList1(Map<String, Object> map);
int queryTotal(Map<String, Object> map);
void save(InsureEntity insure);
void update(InsureEntity insure);
void delete(Long id);
void deleteBatch(Long[] ids);
}
|
<reponame>mephux/ssm<filename>state.go
package ssm
// StateList type for []string
type StateList []string
// States list of states
type States []State
// Callback type
type Callback func()
// State holds state metadata and
// callbacks
type State struct {
Name string
Initial bool
To StateList
From StateList
// Callbacks
BeforeEnter Callback
AfterEnter Callback
BeforeExit Callback
AfterExit Callback
}
// CanHaveEvent checks if a passed event is callable
func (s *State) CanHaveEvent(event *Event) bool {
for _, localState := range s.To {
if localState == event.To {
return true
}
}
for _, fromState := range event.From {
if s.Name == fromState {
return true
}
}
return false
}
// CanChangeToState checks if a passed in state passes
// validation and returns.
func (s *State) CanChangeToState(state *State) bool {
if len(s.To) <= 0 {
return true
}
for _, ls := range s.To {
if ls == state.Name {
return true
}
}
return false
}
// CanChangeFromState checks if the passed in state passes
// validation and returns
func (s *State) CanChangeFromState(state *State) bool {
for _, localState := range state.From {
if localState == s.Name {
return true
}
}
return false
}
|
<reponame>iamfantaser/philosophers
#include "../../includes/main.h"
void philo_print(t_philosopher *philo, char *str)
{
sem_wait(philo->write_sem);
printf("%lld %d %s", (ft_time() - philo->time_start) / 1000,
philo->id, str);
sem_post(philo->write_sem);
}
void philo_clear_sem_all(t_info *info)
{
sem_close(info->write_sem);
sem_close(info->death);
sem_close(info->waiters);
}
void philo_terminate(t_info *info)
{
int i;
i = 0;
while (i < info->count)
{
kill(info->phil[i].pid, SIGTERM);
i++;
}
}
int philo_validation(int argc, char **argv)
{
int i;
int j;
int res;
i = 1;
res = 1;
if (argc < 5 || argc > 6)
res = 0;
while (i < argc)
{
j = 0;
while (argv[i][j])
{
if (argv[i][j] < '0' || argv[i][j] > '9' )
res = 0;
j++;
}
i++;
}
if (res == 0)
return (write(2, "Args ERROR\n", 15) == 0);
else
return (1);
}
int philo_do_action(t_info *info, t_philosopher *philo)
{
if (philo->state == HUNGRY)
return (philo_action(info, philo));
else if (philo->state == SLEEPING)
return (philo_sleep(philo));
else if (philo->state == THINKING)
return (philo_think(philo));
return (3);
}
|
def find_largest_smallest(numbers):
if not numbers:
return (None, None)
elif len(numbers) == 1:
return (numbers[0], numbers[0])
else:
largest = numbers[0]
smallest = numbers[0]
for i in numbers:
if i > largest:
largest = i
if i < smallest:
smallest = i
print('Before the smallest is:', smallest, 'Current number is:', i)
return (largest, smallest) |
import os
import sys
from datetime import date, datetime
from libs.database import db
class CrudModel:
dblite = None # a DbLite() object
db_name = "comments.db"
db_table = "comments"
conn = None # a SQLite database connection handle
def __init__(self):
self.dblite = db.DbLite()
self.conn = self.connect_to_database(self.db_name, self.dblite)
'''
usage from e.g. app.py:
crud = crud_model.CrudModel()
rows = crud.select_all()
'''
def select_all(self, sql = "", db_table = None):
if db_table == None: db_table = self.db_table
#read all
if sql == "": sql = 'SELECT * FROM ' + db_table + ' ORDER BY created DESC'
rows = self.dblite.read(self.conn, sql)
return rows
'''
usage from e.g. app.py:
crud = crud_model.CrudModel()
row = crud.select_record(id=59)
'''
def select_record(self, id = None, db_table = None, sql = None, data = None):
if db_table == None: db_table = self.db_table
if sql == None:
# prepared statement
sql = 'SELECT * FROM ' + db_table + ' WHERE id = ?'
# data to inject
data = (id)
try:
row = self.dblite.read(self.conn, sql, id)
except:
print('Status: error reading record')
return row
'''
usage from e.g. app.py:
crud = crud_model.CrudModel()
'''
def insert_record(self, id = None, title = "", content = "", tags = "", created = "", updated = "", deleted = "", db_table = None, sql = None, data = None):
if db_table == None: db_table = self.db_table
if sql == None:
created = self.get_date()
updated = ""
# prepared statement
sql = 'INSERT INTO ' + db_table + '(title, content, tags, created, updated) VALUES(?,?,?,?,?)'
# data to inject
data = (title, content, tags, created, updated)
try:
row_id = self.dblite.create(self.conn, sql, data)
print('Status: successfully saved new article')
return row_id
except:
print('Status: error saving new article')
def update_record(self, id = None, title = "", content = "", tags = "", db_table = None, sql = None, data = None):
if db_table == None: db_table = self.db_table
if sql == None:
updated = self.get_date()
# prepared statement
sql = 'UPDATE ' + db_table + ' SET title = ?, content = ?, tags = ?, updated = ? WHERE id = ?'
# data to inject
data = (title, content, tags, updated, id)
try:
row_id = self.dblite.update(self.conn, sql, data)
print('Status: successfully updated existing article (id:', id, ')')
return id
except:
print('Error: failed updating existing article')
def delete_record(self, id = None, db_table = None):
if db_table == None: db_table = self.db_table
# prepared statement
sql = 'DELETE FROM ' + db_table + ' WHERE id = ?'
# data to inject
data = (id)
try:
row_id = self.dblite.delete(self.conn, sql, data)
print('Status: successfully deleted article (id:', id, ')')
return row_id
except:
print('Error: failed deleting article')
def connect_to_database(self, file_name = "", dblite = None):
#database = db.DbLite() # use our SQLite-db-transactions Class DbLite
base_dir = self.base_dir()
db_name = base_dir + "/libs/database/dbfiles/" + file_name
try:
#conn = database.connect(db_name) # connect to existing or create and connect to new db
conn = dblite.connect(db_name) # connect to existing or create and connect to new db
print('connected to database ', db_name)
return conn # return connection handle
except:
print('Error: could not connect to database ', db_name)
return False
'''
base_directory is the path to \src in our project
'''
def base_dir(self):
pathname = os.path.dirname(sys.argv[0])
base_dir = os.path.abspath(pathname)
return base_dir
def get_date(self):
now = datetime.now() # e.g. 2019-06-04 15:56:19.545652
return now |
#!/usr/bin/env bash
#
# This file detects the C/C++ compiler and exports it to the CC/CXX environment variables
#
if [[ "$#" -lt 2 ]]; then
echo "Usage..."
echo "detect-compiler.sh <Architecture> <compiler> <compiler major version> <compiler minor version>"
echo "Specify the target architecture."
echo "Specify the name of compiler (clang or gcc)."
echo "Specify the major version of compiler."
echo "Specify the minor version of compiler."
exit 1
fi
build_arch="$1"
compiler="$2"
cxxCompiler="$compiler++"
majorVersion="$3"
minorVersion="$4"
# clear the existing CC and CXX from environment
CC=
CXX=
if [[ "$compiler" == "gcc" ]]; then cxxCompiler="g++"; fi
check_version_exists() {
desired_version=-1
# Set up the environment to be used for building with the desired compiler.
if command -v "$compiler-$1.$2" > /dev/null; then
desired_version="-$1.$2"
elif command -v "$compiler$1$2" > /dev/null; then
desired_version="$1$2"
elif command -v "$compiler-$1$2" > /dev/null; then
desired_version="-$1$2"
fi
echo "$desired_version"
}
if [[ -z "$CLR_CC" ]]; then
# Set default versions
if [[ -z "$majorVersion" ]]; then
# note: gcc (all versions) and clang versions higher than 6 do not have minor version in file name, if it is zero.
if [[ "$compiler" == "clang" ]]; then versions=( 9 8 7 6.0 5.0 4.0 3.9 3.8 3.7 3.6 3.5 )
elif [[ "$compiler" == "gcc" ]]; then versions=( 9 8 7 6 5 4.9 ); fi
for version in "${versions[@]}"; do
parts=(${version//./ })
desired_version="$(check_version_exists "${parts[0]}" "${parts[1]}")"
if [[ "$desired_version" != "-1" ]]; then majorVersion="${parts[0]}"; break; fi
done
if [[ -z "$majorVersion" ]]; then
if command -v "$compiler" > /dev/null; then
if [[ "$(uname)" != "Darwin" ]]; then
echo "WARN: Specific version of $compiler not found, falling back to use the one in PATH."
fi
CC="$(command -v "$compiler")"
CXX="$(command -v "$cxxCompiler")"
else
echo "ERROR: No usable version of $compiler found."
exit 1
fi
else
if [[ "$compiler" == "clang" && "$majorVersion" -lt 5 ]]; then
if [[ "$build_arch" == "arm" || "$build_arch" == "armel" ]]; then
if command -v "$compiler" > /dev/null; then
echo "WARN: Found clang version $majorVersion which is not supported on arm/armel architectures, falling back to use clang from PATH."
CC="$(command -v "$compiler")"
CXX="$(command -v "$cxxCompiler")"
else
echo "ERROR: Found clang version $majorVersion which is not supported on arm/armel architectures, and there is no clang in PATH."
exit 1
fi
fi
fi
fi
else
desired_version="$(check_version_exists "$majorVersion" "$minorVersion")"
if [[ "$desired_version" == "-1" ]]; then
echo "ERROR: Could not find specific version of $compiler: $majorVersion $minorVersion."
exit 1
fi
fi
if [[ -z "$CC" ]]; then
CC="$(command -v "$compiler$desired_version")"
CXX="$(command -v "$cxxCompiler$desired_version")"
if [[ -z "$CXX" ]]; then CXX="$(command -v "$cxxCompiler")"; fi
fi
else
if [[ ! -f "$CLR_CC" ]]; then
echo "ERROR: CLR_CC is set but path '$CLR_CC' does not exist"
exit 1
fi
CC="$CLR_CC"
CXX="$CLR_CXX"
fi
if [[ -z "$CC" ]]; then
echo "ERROR: Unable to find $compiler."
exit 1
fi
SCAN_BUILD_COMMAND="$(command -v "scan-build$desired_version")"
export CC CXX SCAN_BUILD_COMMAND
|
<gh_stars>0
import React from 'react';
import { connect } from 'dva';
import Container from '../components/Container';
import PropTypes from 'prop-types';
import styles from './IndexPage.css';
class IndexPage extends React.Component {
render(){
return (
<Container loading={this.props.loading}>
</Container>
);
}
}
IndexPage.propTypes = {
loading: PropTypes.bool
};
const mapStateToProps = (state) => {
return {
loading: state.loading.global,
//ui: state.ui
}
}
export default connect(mapStateToProps)(IndexPage);
|
import { IGridSeparator } from '../../typings/interfaces'
import { IColumnOperationFactory } from '../../typings/interfaces/grid-column-operation-factory.interface'
import { Operation } from '../operation.abstract'
export class GetColumnSeparators extends Operation {
constructor(factory: IColumnOperationFactory) { super(factory.gridController) }
public run(columnKey: string): IGridSeparator[] {
return this.columnOperations.GetColumnMeta.run(columnKey)?.separators || []
}
}
|
// 1788. 피보나치 수의 확장
// 2019.05.18
// 수학, 구현
#include<iostream>
using namespace std;
int d[1000001]; // d[i] : i번쨰 피보나치수
int main()
{
int n;
cin >> n;
int tmp = n;
if (n < 0) tmp *= -1;
d[0] = 0;
d[1] = 1;
for (int i = 2; i <= tmp; i++)
{
d[i] = d[i - 1] + d[i - 2];
d[i] %= 1000000000;
}
if (n < 0) // n이 음수
{
n *= -1;
if (n % 2 == 0)
{
cout << -1 << endl;
}
else
{
cout << 1 << endl;
}
}
else if (n>0) // n이 양수
{
cout << 1 << endl;
}
else // 0
{
cout << 0 << endl;
}
cout << d[n] << endl;
return 0;
}
|
<filename>extern/typed-geometry/src/typed-geometry/functions/objects/size.hh
#pragma once
#include <typed-geometry/types/size.hh>
#include <typed-geometry/types/objects/aabb.hh>
#include <typed-geometry/types/objects/box.hh>
#include <typed-geometry/detail/operators/ops_pos.hh>
namespace tg
{
template <int D, class ScalarT>
[[nodiscard]] constexpr size<D, ScalarT> size_of(aabb<D, ScalarT> const& b)
{
return size<D, ScalarT>(b.max - b.min);
}
// TODO: size_of(box)
}
|
import requests
from bs4 import BeautifulSoup
url = 'www.example.com'
page = requests.get(url)
soup = BeautifulSoup(page.content, 'html.parser')
data = [element.text for element in soup.find_all()]
print(data) |
import UIKit
class ViewController: UIViewController {
var expenses = [Expense]()
override func viewDidLoad() {
super.viewDidLoad()
}
func addExpense(amount: Double, category: String) {
expenses.append(Expense(amount: amount, category: category))
}
}
struct Expense {
var amount: Double
var category: String
} |
<filename>src/containers/artists/ArtistsList.tsx
import { Artist } from '@favid-inc/api';
import { StyleType, ThemedComponentProps, ThemeType, withStyles } from '@kitten/theme';
import { Input, InputProps, List, Text } from '@kitten/ui';
import { SearchIconOutline } from '@src/assets/icons';
import { ArtistCard, ArtistCardProps } from '@src/components/artist/artistCard.component';
import { textStyle } from '@src/components/common';
import React from 'react';
import {
ActivityIndicator,
ListRenderItemInfo,
NativeScrollEvent,
NativeSyntheticEvent,
ScrollView,
View,
} from 'react-native';
export interface CategoryOfArtistModel {
key: string;
artists: Artist[];
}
interface ComponentProps {
categoryOfArtists: CategoryOfArtistModel[];
loading: boolean;
onDetails: (Artist) => void;
onSearchStringChange: (text: string) => void;
}
interface State {
selectedExerciseIndex: number;
}
export type ArtistsListComponentProps = ThemedComponentProps & ComponentProps;
class ArtistListComponent extends React.Component<ArtistsListComponentProps, State> {
public state: State = {
selectedExerciseIndex: 0,
};
public render() {
const { themedStyle, categoryOfArtists, loading } = this.props;
if (loading) {
return (
<View style={themedStyle.container}>
{this.renderSearchInput()}
<ActivityIndicator size='large' />
</View>
);
}
if (!categoryOfArtists || !categoryOfArtists.length) {
return (
<View style={themedStyle.container}>
{this.renderSearchInput()}
<Text style={themedStyle.subtitle} appearance='hint'>
Nenhum artista encontrado.
</Text>
</View>
);
}
return (
<View style={themedStyle.container}>
{this.renderSearchInput()}
<ScrollView contentContainerStyle={themedStyle.container}>
{categoryOfArtists.map((artistsList) => (
<View key={artistsList.key}>
<Text style={themedStyle.pagerLabel} appearance='hint'>
{artistsList.key}
</Text>
<List
style={themedStyle.pagerContainer}
horizontal={true}
renderItem={this.renderPagerCard}
data={artistsList.artists}
showsHorizontalScrollIndicator={false}
onScroll={this.onExerciseListScroll}
/>
</View>
))}
</ScrollView>
</View>
);
}
private onExerciseListScroll = (event: NativeSyntheticEvent<NativeScrollEvent>) => {
const { themedStyle } = this.props;
const { x: xOffset } = event.nativeEvent.contentOffset;
const { width: itemWidth } = themedStyle.pagerCard;
const selectedExerciseIndex: number = Math.round(xOffset / itemWidth);
if (selectedExerciseIndex !== this.state.selectedExerciseIndex) {
this.setState({ selectedExerciseIndex });
}
};
private renderPagerCard = (info: ListRenderItemInfo<Artist>): React.ReactElement<ArtistCardProps> => {
const { themedStyle } = this.props;
const marginStyle: StyleType = themedStyle.pagerCardMargin;
return (
<ArtistCard
index={info.index}
style={[themedStyle.pagerCard, marginStyle]}
artist={info.item}
onDetails={this.props.onDetails}
/>
);
};
private onSearchStringChange = (text: string): void => {
this.props.onSearchStringChange(text);
};
private renderSearchInput = (): React.ReactElement<InputProps> | null => {
const { themedStyle } = this.props;
return (
<Input
style={themedStyle.input}
textStyle={textStyle.paragraph}
icon={SearchIconOutline}
placeholder='Busque um artista...'
onChangeText={this.onSearchStringChange}
/>
);
};
}
export const ArtistList = withStyles(ArtistListComponent, (theme: ThemeType) => ({
container: {
paddingHorizontal: 10,
paddingVertical: 8,
},
pagerContainer: {
marginVertical: 8,
},
subtitle: {
marginVertical: 16,
textAlign: 'center',
...textStyle.subtitle,
},
pagerLabel: {
marginVertical: 16,
...textStyle.paragraph,
},
pagerCard: {
width: 226,
},
listCard: {
marginVertical: 8,
},
pagerCardMargin: {
marginRight: 16,
},
pagerIndicatorContainer: {
flexDirection: 'row',
justifyContent: 'center',
alignItems: 'center',
marginVertical: 8,
},
pagerIndicatorSelected: {
backgroundColor: theme['background-basic-color-4'],
},
indicatorMarginRight: {
marginRight: 12,
},
input: {
marginHorizontal: 10,
},
}));
|
# -*- sh -*-
# Create $ZSH/run/u if it doesn't exist
[[ -d $ZSH/run/u ]] || {
mkdir -p $ZSH/run/u
chmod 1777 $ZSH/run/u
}
# Create per-UID directory
[[ -d $ZSH/run/u/$HOST-$UID ]] || {
mkdir -p $ZSH/run/u/$HOST-$UID
}
|
<filename>src/main/java/com/crowdin/client/sourcefiles/SourceFilesApi.java
package com.crowdin.client.sourcefiles;
import com.crowdin.client.core.CrowdinApi;
import com.crowdin.client.core.http.HttpRequestConfig;
import com.crowdin.client.core.http.exceptions.HttpBadRequestException;
import com.crowdin.client.core.http.exceptions.HttpException;
import com.crowdin.client.core.model.ClientConfig;
import com.crowdin.client.core.model.Credentials;
import com.crowdin.client.core.model.DownloadLink;
import com.crowdin.client.core.model.DownloadLinkResponseObject;
import com.crowdin.client.core.model.PatchRequest;
import com.crowdin.client.core.model.ResponseList;
import com.crowdin.client.core.model.ResponseObject;
import com.crowdin.client.sourcefiles.model.AddBranchRequest;
import com.crowdin.client.sourcefiles.model.AddDirectoryRequest;
import com.crowdin.client.sourcefiles.model.AddFileRequest;
import com.crowdin.client.sourcefiles.model.Branch;
import com.crowdin.client.sourcefiles.model.BranchResponseList;
import com.crowdin.client.sourcefiles.model.BranchResponseObject;
import com.crowdin.client.sourcefiles.model.Directory;
import com.crowdin.client.sourcefiles.model.DirectoryResponseList;
import com.crowdin.client.sourcefiles.model.DirectoryResponseObject;
import com.crowdin.client.sourcefiles.model.File;
import com.crowdin.client.sourcefiles.model.FileResponseList;
import com.crowdin.client.sourcefiles.model.FileResponseObject;
import com.crowdin.client.sourcefiles.model.FileRevision;
import com.crowdin.client.sourcefiles.model.FileRevisionResponseList;
import com.crowdin.client.sourcefiles.model.FileRevisionResponseObject;
import com.crowdin.client.sourcefiles.model.UpdateOrRestoreFileRequest;
import java.util.List;
import java.util.Map;
import java.util.Optional;
public class SourceFilesApi extends CrowdinApi {
public SourceFilesApi(Credentials credentials) {
super(credentials);
}
public SourceFilesApi(Credentials credentials, ClientConfig clientConfig) {
super(credentials, clientConfig);
}
/**
* @param projectId project identifier
* @param name filter by branch name
* @param limit maximum number of items to retrieve (default 25)
* @param offset starting offset in the collection (default 0)
* @return list of branches
*/
public ResponseList<Branch> listBranches(Long projectId, String name, Integer limit, Integer offset) throws HttpException, HttpBadRequestException {
Map<String, Optional<Object>> queryParams = HttpRequestConfig.buildUrlParams(
"name", Optional.ofNullable(name),
"limit", Optional.ofNullable(limit),
"offset", Optional.ofNullable(offset)
);
BranchResponseList branchResponseList = this.httpClient.get(this.url + "/projects/" + projectId + "/branches", new HttpRequestConfig(queryParams), BranchResponseList.class);
return BranchResponseList.to(branchResponseList);
}
/**
* @param projectId project identifier
* @param request request object
* @return newly created branch
*/
public ResponseObject<Branch> addBranch(Long projectId, AddBranchRequest request) throws HttpException, HttpBadRequestException {
BranchResponseObject branchResponseObject = this.httpClient.post(this.url + "/projects/" + projectId + "/branches", request, new HttpRequestConfig(), BranchResponseObject.class);
return ResponseObject.of(branchResponseObject.getData());
}
/**
* @param projectId project identifier
* @param branchId branch identifier
* @return branch
*/
public ResponseObject<Branch> getBranch(Long projectId, Long branchId) throws HttpException, HttpBadRequestException {
BranchResponseObject branchResponseObject = this.httpClient.get(this.url + "/projects/" + projectId + "/branches/" + branchId, new HttpRequestConfig(), BranchResponseObject.class);
return ResponseObject.of(branchResponseObject.getData());
}
/**
* @param projectId project identifier
* @param branchId branch identifier
*/
public void deleteBranch(Long projectId, Long branchId) throws HttpException, HttpBadRequestException {
this.httpClient.delete(this.url + "/projects/" + projectId + "/branches/" + branchId, new HttpRequestConfig(), Void.class);
}
/**
* @param projectId project identifier
* @param branchId branch identifier
* @param request request object
* @return updated branch
*/
public ResponseObject<Branch> editBranch(Long projectId, Long branchId, List<PatchRequest> request) throws HttpException, HttpBadRequestException {
BranchResponseObject groupResponseObject = this.httpClient.patch(this.url + "/projects/" + projectId + "/branches/" + branchId, request, new HttpRequestConfig(), BranchResponseObject.class);
return ResponseObject.of(groupResponseObject.getData());
}
/**
* @param projectId project identifier
* @param branchId filter by branch id
* @param directoryId filter by directory id
* @param recursion use to list directories recursively
* @param limit maximum number of items to retrieve (default 25)
* @param offset starting offset in the collection (default 0)
* @return list of directories
*/
public ResponseList<Directory> listDirectories(Long projectId, Long branchId, Long directoryId, Object recursion, Integer limit, Integer offset) throws HttpException, HttpBadRequestException {
Map<String, Optional<Object>> queryParams = HttpRequestConfig.buildUrlParams(
"branchId", Optional.ofNullable(branchId),
"directoryId", Optional.ofNullable(directoryId),
"recursion", Optional.ofNullable(recursion),
"limit", Optional.ofNullable(limit),
"offset", Optional.ofNullable(offset)
);
DirectoryResponseList directoryResponseList = this.httpClient.get(this.url + "/projects/" + projectId + "/directories", new HttpRequestConfig(queryParams), DirectoryResponseList.class);
return DirectoryResponseList.to(directoryResponseList);
}
/**
* @param projectId project identifier
* @param request request object
* @return newly created directory
*/
public ResponseObject<Directory> addDirectory(Long projectId, AddDirectoryRequest request) throws HttpException, HttpBadRequestException {
DirectoryResponseObject post = this.httpClient.post(this.url + "/projects/" + projectId + "/directories", request, new HttpRequestConfig(), DirectoryResponseObject.class);
return ResponseObject.of(post.getData());
}
/**
* @param projectId project identifier
* @param directoryId directory identifier
* @return directory
*/
public ResponseObject<Directory> getDirectory(Long projectId, Long directoryId) throws HttpException, HttpBadRequestException {
DirectoryResponseObject directoryResponseObject = this.httpClient.get(this.url + "/projects/" + projectId + "/directories/" + directoryId, new HttpRequestConfig(), DirectoryResponseObject.class);
return ResponseObject.of(directoryResponseObject.getData());
}
/**
* @param projectId project identifier
* @param directoryId directory identifier
*/
public void deleteDirectory(Long projectId, Long directoryId) throws HttpException, HttpBadRequestException {
this.httpClient.delete(this.url + "/projects/" + projectId + "/directories/" + directoryId, new HttpRequestConfig(), Void.class);
}
/**
* @param projectId project identifier
* @param directoryId directory identifier
* @param request request object
* @return updated directory
*/
public ResponseObject<Directory> editDirectory(Long projectId, Long directoryId, List<PatchRequest> request) throws HttpException, HttpBadRequestException {
DirectoryResponseObject directoryResponseObject = this.httpClient.patch(this.url + "/projects/" + projectId + "/directories/" + directoryId, request, new HttpRequestConfig(), DirectoryResponseObject.class);
return ResponseObject.of(directoryResponseObject.getData());
}
/**
* @param projectId project identifier
* @param branchId filter by branch id
* @param directoryId filter by directory id
* @param recursion use to list directories recursively
* @param limit maximum number of items to retrieve (default 25)
* @param offset starting offset in the collection (default 0)
* @return list of files
*/
public ResponseList<File> listFiles(Long projectId, Long branchId, Long directoryId, Object recursion, Integer limit, Integer offset) throws HttpException, HttpBadRequestException {
Map<String, Optional<Object>> queryParams = HttpRequestConfig.buildUrlParams(
"branchId", Optional.ofNullable(branchId),
"directoryId", Optional.ofNullable(directoryId),
"recursion", Optional.ofNullable(recursion),
"limit", Optional.ofNullable(limit),
"offset", Optional.ofNullable(offset)
);
FileResponseList fileResponseList = this.httpClient.get(this.url + "/projects/" + projectId + "/files", new HttpRequestConfig(queryParams), FileResponseList.class);
return FileResponseList.to(fileResponseList);
}
/**
* @param projectId project identifier
* @param request request object
* @return newly created file
*/
public ResponseObject<File> addFile(Long projectId, AddFileRequest request) throws HttpException, HttpBadRequestException {
FileResponseObject fileResponseObject = this.httpClient.post(this.url + "/projects/" + projectId + "/files", request, new HttpRequestConfig(), FileResponseObject.class);
return ResponseObject.of(fileResponseObject.getData());
}
/**
* @param projectId project identifier
* @param fileId file identifier
* @return file
*/
public ResponseObject<File> getFile(Long projectId, Long fileId) throws HttpException, HttpBadRequestException {
FileResponseObject fileResponseObject = this.httpClient.get(this.url + "/projects/" + projectId + "/files/" + fileId, new HttpRequestConfig(), FileResponseObject.class);
return ResponseObject.of(fileResponseObject.getData());
}
/**
* @param projectId project identifier
* @param fileId file identifier
* @param request request object
* @return updated file
*/
public ResponseObject<File> updateOrRestoreFile(Long projectId, Long fileId, UpdateOrRestoreFileRequest request) throws HttpException, HttpBadRequestException {
FileResponseObject fileResponseObject = this.httpClient.put(this.url + "/projects/" + projectId + "/files/" + fileId, request, new HttpRequestConfig(), FileResponseObject.class);
return ResponseObject.of(fileResponseObject.getData());
}
/**
* @param projectId project identifier
* @param fileId file identifier
*/
public void deleteFile(Long projectId, Long fileId) throws HttpException, HttpBadRequestException {
this.httpClient.delete(this.url + "/projects/" + projectId + "/files/" + fileId, new HttpRequestConfig(), Void.class);
}
/**
* @param projectId project identifier
* @param fileId file identifier
* @param request request object
* @return updated file
*/
public ResponseObject<File> editFile(Long projectId, Long fileId, List<PatchRequest> request) throws HttpException, HttpBadRequestException {
FileResponseObject fileResponseObject = this.httpClient.patch(this.url + "/projects/" + projectId + "/files/" + fileId, request, new HttpRequestConfig(), FileResponseObject.class);
return ResponseObject.of(fileResponseObject.getData());
}
/**
* @param projectId project identifier
* @param fileId file identifier
* @return file download link
*/
public ResponseObject<DownloadLink> downloadFile(Long projectId, Long fileId) throws HttpException, HttpBadRequestException {
DownloadLinkResponseObject downloadLinkResponseObject = this.httpClient.get(this.url + "/projects/" + projectId + "/files/" + fileId + "/download", new HttpRequestConfig(), DownloadLinkResponseObject.class);
return ResponseObject.of(downloadLinkResponseObject.getData());
}
/**
* @param projectId project identifier
* @param fileId file identifier
* @param limit maximum number of items to retrieve (default 25)
* @param offset starting offset in the collection (default 0)
* @return list of file revisions
*/
public ResponseList<FileRevision> listFileRevisions(Long projectId, Long fileId, Integer limit, Integer offset) throws HttpException, HttpBadRequestException {
Map<String, Optional<Object>> queryParams = HttpRequestConfig.buildUrlParams(
"limit", Optional.ofNullable(limit),
"offset", Optional.ofNullable(offset)
);
FileRevisionResponseList fileRevisionResponseList = this.httpClient.get(this.url + "/projects/" + projectId + "/files/" + fileId + "/revisions", new HttpRequestConfig(queryParams), FileRevisionResponseList.class);
return FileRevisionResponseList.to(fileRevisionResponseList);
}
/**
* @param projectId project identifier
* @param fileId file identifier
* @param revisionId revision identifier
* @return file revision
*/
public ResponseObject<FileRevision> getFileRevision(Long projectId, Long fileId, Long revisionId) throws HttpException, HttpBadRequestException {
FileRevisionResponseObject fileResponseObject = this.httpClient.get(this.url + "/projects/" + projectId + "/files/" + fileId + "/revisions/" + revisionId, new HttpRequestConfig(), FileRevisionResponseObject.class);
return ResponseObject.of(fileResponseObject.getData());
}
}
|
import { defineAsyncComponent } from 'vue'
export const pagesComponents = {
// path: /
"v-8daa1a0e": defineAsyncComponent(() => import(/* webpackChunkName: "v-8daa1a0e" */"/Users/bytedance/yaoshen/yaoshenwang/docs/.vuepress/.temp/pages/index.html.vue")),
// path: /aboutMe/aboutMe.html
"v-586fde37": defineAsyncComponent(() => import(/* webpackChunkName: "v-586fde37" */"/Users/bytedance/yaoshen/yaoshenwang/docs/.vuepress/.temp/pages/aboutMe/aboutMe.html.vue")),
// path: /aboutMe/eduStory.html
"v-fa178d12": defineAsyncComponent(() => import(/* webpackChunkName: "v-fa178d12" */"/Users/bytedance/yaoshen/yaoshenwang/docs/.vuepress/.temp/pages/aboutMe/eduStory.html.vue")),
// path: /changeLog/completed.html
"v-4e4fe93c": defineAsyncComponent(() => import(/* webpackChunkName: "v-4e4fe93c" */"/Users/bytedance/yaoshen/yaoshenwang/docs/.vuepress/.temp/pages/changeLog/completed.html.vue")),
// path: /changeLog/todo.html
"v-6a79bc42": defineAsyncComponent(() => import(/* webpackChunkName: "v-6a79bc42" */"/Users/bytedance/yaoshen/yaoshenwang/docs/.vuepress/.temp/pages/changeLog/todo.html.vue")),
// path: /cicada/aboutCicada.html
"v-6921f140": defineAsyncComponent(() => import(/* webpackChunkName: "v-6921f140" */"/Users/bytedance/yaoshen/yaoshenwang/docs/.vuepress/.temp/pages/cicada/aboutCicada.html.vue")),
// path: /cicada/calculus.html
"v-776ce17a": defineAsyncComponent(() => import(/* webpackChunkName: "v-776ce17a" */"/Users/bytedance/yaoshen/yaoshenwang/docs/.vuepress/.temp/pages/cicada/calculus.html.vue")),
// path: /coding/blog.html
"v-94fb19ea": defineAsyncComponent(() => import(/* webpackChunkName: "v-94fb19ea" */"/Users/bytedance/yaoshen/yaoshenwang/docs/.vuepress/.temp/pages/coding/blog.html.vue")),
// path: /coding/navigation.html
"v-0bc490d9": defineAsyncComponent(() => import(/* webpackChunkName: "v-0bc490d9" */"/Users/bytedance/yaoshen/yaoshenwang/docs/.vuepress/.temp/pages/coding/navigation.html.vue")),
// path: /coding/vue.html
"v-14bbabff": defineAsyncComponent(() => import(/* webpackChunkName: "v-14bbabff" */"/Users/bytedance/yaoshen/yaoshenwang/docs/.vuepress/.temp/pages/coding/vue.html.vue")),
// path: /en/
"v-2d0a870d": defineAsyncComponent(() => import(/* webpackChunkName: "v-2d0a870d" */"/Users/bytedance/yaoshen/yaoshenwang/docs/.vuepress/.temp/pages/en/index.html.vue")),
// path: /404.html
"v-3706649a": defineAsyncComponent(() => import(/* webpackChunkName: "v-3706649a" */"/Users/bytedance/yaoshen/yaoshenwang/docs/.vuepress/.temp/pages/404.html.vue")),
}
|
package com.ahmetkilic.ealocationhelper;
import android.support.annotation.IntDef;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import static com.ahmetkilic.ealocationhelper.FunctionType.LAST_LOCATION;
import static com.ahmetkilic.ealocationhelper.FunctionType.LOCATION_UPDATES;
/**
* Created by <NAME> on 15.02.2019.
* Copyright © 2019. All rights reserved.
* For the full copyright and license information,
* please view the LICENSE file that was distributed with this source code.
*/
@Retention(RetentionPolicy.SOURCE)
@IntDef({LOCATION_UPDATES, LAST_LOCATION})
public @interface FunctionType {
int LOCATION_UPDATES = 1;
int LAST_LOCATION = 2;
}
|
#!/bin/bash
#
# run_ci_tasks.sh [OPTIONS] [PATH TO SAMPLE APP]
# where OPTIONS are:
# -a to run Android CI tasks.
# -i to run iOS CI tasks.
# Defaults to -a -i.
#
set -euxo pipefail
SCRIPT_DIRECTORY=`dirname "$0"`
SCRIPT_NAME=`basename "$0"`
# get platforms to build
ANDROID=false
IOS=false
# Parse arguments
OPTS=`getopt haid $*`
if [ $? != 0 ] ; then echo "Failed parsing options." >&2 ; exit 1 ; fi
eval set -- "$OPTS"
if [ "$1" == "--" ]; then
# set the default options
eval set -- "-a" "-i" $@
fi
while true; do
case "${1:-}" in
-h ) echo -ne "\n${SCRIPT_NAME} [OPTIONS] [PATH TO SAMPLE APP]\nwhere OPTIONS are:\n -a to run Android CI tasks.\n -i to run iOS CI tasks.\n Defaults to -a -i. \n"; exit 0;;
-a ) ANDROID=true;;
-i ) IOS=true;;
-- ) ;;
* ) break ;;
esac
shift
done
SAMPLE_APP_PATH=${1:-}
if [ "$ANDROID" = "true" ] || [ "$IOS" = "true" ]; then
# create the sample for building
if [ -z "$SAMPLE_APP_PATH" ]; then
SAMPLE_APP_PATH=$(mktemp -d /tmp/cordova-sample-app-XXXXX)
fi
# if sample app doesn't already exist, create it
if [[ ! -d $SAMPLE_APP_PATH/test ]]; then
${SCRIPT_DIRECTORY}/create_sample.sh $SAMPLE_APP_PATH
fi
cd $SAMPLE_APP_PATH/test
fi
if [ "$ANDROID" = "true" ]; then
# Make sure google-services.json exists
GOOGLE_SERVICES_FILE_PATH="$(pwd)/platforms/android/app/google-services.json"
if [[ ! -f ${GOOGLE_SERVICES_FILE_PATH} ]]; then
if [[ "${GOOGLE_SERVICES_JSON:-}" == "" ]]; then
echo "ERROR: You must provide ${GOOGLE_SERVICES_FILE_PATH}."
exit 1
else
echo $GOOGLE_SERVICES_JSON > ${GOOGLE_SERVICES_FILE_PATH}
fi
fi
# Build android
cordova build android -- ---gradleArg=-PuaInternalJava6CompileOptions=true 2>&1 | tee -a /tmp/CORDOVA-$$.out
# check for failures
if grep "BUILD FAILED" /tmp/CORDOVA-$$.out; then
# Set build status to failed
echo "ANDROID BUILD FAILED"
exit 1
else
echo "ANDROID BUILD SUCCEEDED"
fi
fi
if [ "$IOS" = "true" ]; then
# Build ios
cordova build ios --emulator 2>&1 | tee -a /tmp/CORDOVA-$$.out
# check for failures
if grep "BUILD FAILED" /tmp/CORDOVA-$$.out; then
# Set build status to failed
echo "iOS BUILD FAILED"
exit 1
fi
if grep "Failed to install 'com.urbanairship.cordova'" /tmp/CORDOVA-$$.out; then
# Set build status to failed
echo "iOS BUILD FAILED"
exit 1
fi
echo "iOS BUILD SUCCEEDED"
fi
echo "CI TASKS SUCCEEDED"
|
#!/bin/bash
# this script expects to be ran from root of
# quay repository.
set -e
Files=(
'util/ipresolver/aws-ip-ranges.json'
'revision_head'
'local-dev/jwtproxy_conf.yaml'
'local-dev/mitm.cert'
'local-dev/mitm.key'
'local-dev/quay.kid'
'local-dev/quay.pem'
'local-dev/supervisord.conf'
'local-dev/__pycache__'
'/local-dev/*.sock'
'node_modules'
'static/build'
'supervisord.log'
'supervisord.pid'
)
for file in "${Files[@]}"; do
rm -rf $file
done
|
import java.util.Arrays;
public class MaxNumbers {
// function to return top 10 maximum values
public static int[] getMaxNumbers(int[] arr) {
int[] top = new int[10];
// sort array
Arrays.sort(arr);
// get top 10 maximum values
for(int i=arr.length-1, j=0; j<10 && i>=0; i--, j++) {
top[j] = arr[i];
}
return top;
}
public static void main(String[] args) {
int[] arr = {2, 9, 15, 11, 3, 8, 5, 10, 7, 21, 13, 4};
int[] result = getMaxNumbers(arr);
System.out.println(Arrays.toString(result)); // [21, 15, 13, 11, 10, 9, 8, 7, 5, 4]
}
} |
<filename>azure/store.go
package azure
import (
"bufio"
"encoding/base64"
"encoding/binary"
"fmt"
"io"
"os"
"path"
"strings"
"time"
az "github.com/Azure/azure-sdk-for-go/storage"
"github.com/araddon/gou"
"github.com/lytics/cloudstorage"
"github.com/pborman/uuid"
"golang.org/x/net/context"
"golang.org/x/sync/errgroup"
)
const (
// StoreType = "azure" this is used to define the storage type to create
// from cloudstorage.NewStore(config)
StoreType = "azure"
// Configuration Keys. These are the names of keys
// to look for in the json map[string]string to extract for config.
// ConfKeyAuthKey config key name of the azure api key for auth
ConfKeyAuthKey = "azure_key"
// Authentication Source's
// AuthKey is for using azure api key
AuthKey cloudstorage.AuthMethod = "azure_key"
)
var (
// Retries number of times to retry upon failures.
Retries = 3
// PageSize is default page size
PageSize = 2000
// ErrNoAzureSession no valid session
ErrNoAzureSession = fmt.Errorf("no valid azure session was created")
// ErrNoAccessKey error for no azure_key
ErrNoAccessKey = fmt.Errorf("no settings.azure_key")
// ErrNoAuth error for no findable auth
ErrNoAuth = fmt.Errorf("No auth provided")
)
func init() {
// Register this Driver (azure) in cloudstorage driver registry.
cloudstorage.Register(StoreType, func(conf *cloudstorage.Config) (cloudstorage.Store, error) {
client, sess, err := NewClient(conf)
if err != nil {
return nil, err
}
return NewStore(client, sess, conf)
})
}
type (
// FS Simple wrapper for accessing azure blob files, it doesn't currently implement a
// Reader/Writer interface so not useful for stream reading of large files yet.
FS struct {
PageSize int
ID string
baseClient *az.Client
client *az.BlobStorageClient
endpoint string
bucket string
cachepath string
}
object struct {
fs *FS
o *az.Blob
cachedcopy *os.File
rc io.ReadCloser
name string // aka "id" in azure
updated time.Time // LastModified in azure
metadata map[string]string
bucket string
readonly bool
opened bool
cachepath string
//infoOnce sync.Once
infoErr error
}
)
// NewClient create new AWS s3 Client. Uses cloudstorage.Config to read
// necessary config settings such as bucket, region, auth.
func NewClient(conf *cloudstorage.Config) (*az.Client, *az.BlobStorageClient, error) {
switch conf.AuthMethod {
case AuthKey:
accessKey := conf.Settings.String(ConfKeyAuthKey)
if accessKey == "" {
return nil, nil, ErrNoAccessKey
}
basicClient, err := az.NewBasicClient(conf.Project, accessKey)
if err != nil {
gou.Warnf("could not get azure client %v", err)
return nil, nil, err
}
client := basicClient.GetBlobService()
return &basicClient, &client, err
}
return nil, nil, ErrNoAuth
}
// NewStore Create AWS S3 storage client of type cloudstorage.Store
func NewStore(c *az.Client, blobClient *az.BlobStorageClient, conf *cloudstorage.Config) (*FS, error) {
if conf.TmpDir == "" {
return nil, fmt.Errorf("unable to create cachepath. config.tmpdir=%q", conf.TmpDir)
}
err := os.MkdirAll(conf.TmpDir, 0775)
if err != nil {
return nil, fmt.Errorf("unable to create cachepath. config.tmpdir=%q err=%v", conf.TmpDir, err)
}
uid := uuid.NewUUID().String()
uid = strings.Replace(uid, "-", "", -1)
return &FS{
baseClient: c,
client: blobClient,
bucket: conf.Bucket,
cachepath: conf.TmpDir,
ID: uid,
PageSize: 10000,
}, nil
}
// Type of store = "azure"
func (f *FS) Type() string {
return StoreType
}
// Client gets access to the underlying google cloud storage client.
func (f *FS) Client() interface{} {
return f.client
}
// String function to provide azure://..../file path
func (f *FS) String() string {
return fmt.Sprintf("azure://%s/", f.bucket)
}
// NewObject of Type azure.
func (f *FS) NewObject(objectname string) (cloudstorage.Object, error) {
obj, err := f.Get(context.Background(), objectname)
if err != nil && err != cloudstorage.ErrObjectNotFound {
return nil, err
} else if obj != nil {
return nil, cloudstorage.ErrObjectExists
}
cf := cloudstorage.CachePathObj(f.cachepath, objectname, f.ID)
return &object{
fs: f,
name: objectname,
metadata: map[string]string{cloudstorage.ContentTypeKey: cloudstorage.ContentType(objectname)},
bucket: f.bucket,
cachedcopy: nil,
cachepath: cf,
}, nil
}
// Get a single File Object
func (f *FS) Get(ctx context.Context, objectpath string) (cloudstorage.Object, error) {
obj, err := f.getObject(ctx, objectpath)
if err != nil {
return nil, err
} else if obj == nil {
return nil, cloudstorage.ErrObjectNotFound
}
return obj, nil
}
// get single object
func (f *FS) getObject(ctx context.Context, objectname string) (*object, error) {
blob := f.client.GetContainerReference(f.bucket).GetBlobReference(objectname)
err := blob.GetProperties(nil)
if err != nil {
if strings.Contains(err.Error(), "404") {
return nil, cloudstorage.ErrObjectNotFound
}
return nil, err
}
o := &object{
name: objectname,
fs: f,
o: blob,
}
o.o.Properties.Etag = cloudstorage.CleanETag(o.o.Properties.Etag)
o.updated = time.Time(o.o.Properties.LastModified)
o.cachepath = cloudstorage.CachePathObj(f.cachepath, o.name, f.ID)
return o, nil
//return newObjectFromHead(f, objectname, res), nil
}
func (f *FS) getOpenObject(ctx context.Context, objectname string) (io.ReadCloser, error) {
rc, err := f.client.GetContainerReference(f.bucket).GetBlobReference(objectname).Get(nil)
if err != nil && strings.Contains(err.Error(), "404") {
return nil, cloudstorage.ErrObjectNotFound
} else if err != nil {
return nil, err
}
return rc, nil
}
func convertMetaData(m map[string]*string) (map[string]string, error) {
result := make(map[string]string, len(m))
for key, value := range m {
if value != nil {
result[strings.ToLower(key)] = *value
} else {
result[strings.ToLower(key)] = ""
}
}
return result, nil
}
// List objects from this store.
func (f *FS) List(ctx context.Context, q cloudstorage.Query) (*cloudstorage.ObjectsResponse, error) {
itemLimit := uint(f.PageSize)
if q.PageSize > 0 {
itemLimit = uint(q.PageSize)
}
params := az.ListBlobsParameters{
Prefix: q.Prefix,
MaxResults: itemLimit,
Marker: q.Marker,
}
blobs, err := f.client.GetContainerReference(f.bucket).ListBlobs(params)
if err != nil {
return nil, err
}
objResp := &cloudstorage.ObjectsResponse{
Objects: make(cloudstorage.Objects, len(blobs.Blobs)),
}
for i, o := range blobs.Blobs {
objResp.Objects[i] = newObject(f, &o)
}
objResp.NextMarker = blobs.NextMarker
q.Marker = blobs.NextMarker
return objResp, nil
}
// Objects returns an iterator over the objects in the google bucket that match the Query q.
// If q is nil, no filtering is done.
func (f *FS) Objects(ctx context.Context, q cloudstorage.Query) (cloudstorage.ObjectIterator, error) {
return cloudstorage.NewObjectPageIterator(ctx, f, q), nil
}
// Folders get folders list.
func (f *FS) Folders(ctx context.Context, q cloudstorage.Query) ([]string, error) {
q.Delimiter = "/"
// Think we should just put 1 here right?
itemLimit := uint(f.PageSize)
if q.PageSize > 0 {
itemLimit = uint(q.PageSize)
}
params := az.ListBlobsParameters{
Prefix: q.Prefix,
MaxResults: itemLimit,
Delimiter: "/",
}
for {
select {
case <-ctx.Done():
// If has been closed
return nil, ctx.Err()
default:
// if q.Marker != "" {
// params.Marker = &q.Marker
// }
blobs, err := f.client.GetContainerReference(f.bucket).ListBlobs(params)
if err != nil {
gou.Warnf("leaving %v", err)
return nil, err
}
if len(blobs.BlobPrefixes) > 0 {
return blobs.BlobPrefixes, nil
}
return nil, nil
}
}
}
/*
// Copy from src to destination
func (f *FS) Copy(ctx context.Context, src, des cloudstorage.Object) error {
so, ok := src.(*object)
if !ok {
return fmt.Errorf("Copy source file expected s3 but got %T", src)
}
do, ok := des.(*object)
if !ok {
return fmt.Errorf("Copy destination expected s3 but got %T", des)
}
oh := so.b.Object(so.name)
dh := do.b.Object(do.name)
_, err := dh.CopierFrom(oh).Run(ctx)
return err
}
// Move which is a Copy & Delete
func (f *FS) Move(ctx context.Context, src, des cloudstorage.Object) error {
so, ok := src.(*object)
if !ok {
return fmt.Errorf("Move source file expected s3 but got %T", src)
}
do, ok := des.(*object)
if !ok {
return fmt.Errorf("Move destination expected s3 but got %T", des)
}
oh := so.b.Object(so.name)
dh := do.b.Object(des.name)
if _, err := dh.CopierFrom(oh).Run(ctx); err != nil {
return err
}
return oh.Delete(ctx)
}
*/
// NewReader create file reader.
func (f *FS) NewReader(o string) (io.ReadCloser, error) {
return f.NewReaderWithContext(context.Background(), o)
}
// NewReaderWithContext create new File reader with context.
func (f *FS) NewReaderWithContext(ctx context.Context, objectname string) (io.ReadCloser, error) {
ioc, err := f.client.GetContainerReference(f.bucket).GetBlobReference(objectname).Get(nil)
if err != nil {
// translate the string error to typed error
if strings.Contains(err.Error(), "404") {
return nil, cloudstorage.ErrObjectNotFound
}
return nil, err
}
return ioc, nil
}
// NewWriter create Object Writer.
func (f *FS) NewWriter(objectName string, metadata map[string]string) (io.WriteCloser, error) {
return f.NewWriterWithContext(context.Background(), objectName, metadata)
}
// NewWriterWithContext create writer with provided context and metadata.
func (f *FS) NewWriterWithContext(ctx context.Context, name string, metadata map[string]string, opts ...cloudstorage.Opts) (io.WriteCloser, error) {
if len(opts) > 0 && opts[0].IfNotExists {
return nil, fmt.Errorf("options IfNotExists not supported for store type")
}
name = strings.Replace(name, " ", "+", -1)
o := &object{name: name, metadata: metadata}
rwc := newAzureWriteCloser(ctx, f, o)
return rwc, nil
}
// azureWriteCloser - manages data and go routines used to pipe data to azures, calling Close
// will flush data to azures and block until all inflight data has been written or
// we get an error.
type azureWriteCloser struct {
pr *io.PipeReader
pw *io.PipeWriter
wc *bufio.Writer
g *errgroup.Group
}
// azureWriteCloser is a io.WriteCloser that manages the azure connection pipe and when Close is called
// it blocks until all data is flushed to azure via a background go routine call to uploadMultiPart.
func newAzureWriteCloser(ctx context.Context, f *FS, obj *object) io.WriteCloser {
pr, pw := io.Pipe()
bw := bufio.NewWriter(pw)
g, _ := errgroup.WithContext(ctx)
g.Go(func() error {
// Upload the file to azure.
// Do a multipart upload
err := f.uploadMultiPart(obj, pr)
if err != nil {
gou.Warnf("could not upload %v", err)
return err
}
return nil
})
return azureWriteCloser{
pr, pw, bw, g,
}
}
// Write writes data to our write buffer, which writes to the backing io pipe.
// If an error is encountered while writting we may not see it here, my guess is
// we wouldn't see it until someone calls close and the error is returned from the
// error group.
func (bc azureWriteCloser) Write(p []byte) (nn int, err error) {
return bc.wc.Write(p)
}
// Close and block until we flush inflight data to azures
func (bc azureWriteCloser) Close() error {
//Flush buffered data to the backing pipe writer.
if err := bc.wc.Flush(); err != nil {
return err
}
//Close the pipe writer so that the pipe reader will return EOF,
// doing so will cause uploadMultiPart to complete and return.
if err := bc.pw.Close(); err != nil {
return err
}
//Use the error group's Wait method to block until uploadMultPart has completed
if err := bc.g.Wait(); err != nil {
return err
}
return nil
}
const (
// constants related to chunked uploads
initialChunkSize = 4 * 1024 * 1024
maxChunkSize = 100 * 1024 * 1024
maxParts = 50000
)
func makeBlockID(id uint64) string {
bytesID := make([]byte, 8)
binary.LittleEndian.PutUint64(bytesID, id)
return base64.StdEncoding.EncodeToString(bytesID)
}
// uploadMultiPart start an upload
func (f *FS) uploadMultiPart(o *object, r io.Reader) error {
//chunkSize, err := calcBlockSize(size)
// if err != nil {
// return err
// }
var buf = make([]byte, initialChunkSize)
var blocks []az.Block
var rawID uint64
blob := f.client.GetContainerReference(f.bucket).GetBlobReference(o.name)
// TODO: performance improvement to mange uploads in separate
// go-routine than the reader
for {
n, err := r.Read(buf)
if err != nil {
if err == io.EOF {
break
}
gou.Warnf("unknown err=%v", err)
return err
}
blockID := makeBlockID(rawID)
chunk := buf[:n]
if err := blob.PutBlock(blockID, chunk, nil); err != nil {
return err
}
blocks = append(blocks, az.Block{
ID: blockID,
Status: az.BlockStatusLatest,
})
rawID++
}
err := blob.PutBlockList(blocks, nil)
if err != nil {
gou.Warnf("could not put block list %v", err)
return err
}
err = blob.GetProperties(nil)
if err != nil {
gou.Warnf("could not load blog properties %v", err)
return err
}
blob.Metadata = o.metadata
err = blob.SetMetadata(nil)
if err != nil {
gou.Warnf("can't set metadata err=%v", err)
return err
}
return nil
}
// Delete requested object path string.
func (f *FS) Delete(ctx context.Context, name string) error {
err := f.client.GetContainerReference(f.bucket).GetBlobReference(name).Delete(nil)
if err != nil && strings.Contains(err.Error(), "404") {
return cloudstorage.ErrObjectNotFound
}
return err
}
func newObject(f *FS, o *az.Blob) *object {
obj := &object{
fs: f,
o: o,
name: o.Name,
bucket: f.bucket,
cachepath: cloudstorage.CachePathObj(f.cachepath, o.Name, f.ID),
}
obj.o.Properties.Etag = cloudstorage.CleanETag(obj.o.Properties.Etag)
return obj
}
/*
func newObjectFromHead(f *FS, name string, o *s3.HeadObjectOutput) *object {
obj := &object{
fs: f,
name: name,
bucket: f.bucket,
cachepath: cloudstorage.CachePathObj(f.cachepath, name, f.ID),
}
if o.LastModified != nil {
obj.updated = *o.LastModified
}
// metadata?
obj.metadata, _ = convertMetaData(o.Metadata)
return obj
}
*/
func (o *object) StorageSource() string {
return StoreType
}
func (o *object) Name() string {
return o.name
}
func (o *object) String() string {
return o.name
}
func (o *object) Updated() time.Time {
return o.updated
}
func (o *object) MetaData() map[string]string {
return o.metadata
}
func (o *object) SetMetaData(meta map[string]string) {
o.metadata = meta
}
func (o *object) Delete() error {
return o.fs.Delete(context.Background(), o.name)
}
func (o *object) Open(accesslevel cloudstorage.AccessLevel) (*os.File, error) {
if o.opened {
return nil, fmt.Errorf("the store object is already opened. %s", o.name)
}
var errs []error = make([]error, 0)
var cachedcopy *os.File = nil
var err error
var readonly = accesslevel == cloudstorage.ReadOnly
err = os.MkdirAll(path.Dir(o.cachepath), 0775)
if err != nil {
return nil, fmt.Errorf("error occurred creating cachedcopy dir. cachepath=%s object=%s err=%v", o.cachepath, o.name, err)
}
err = cloudstorage.EnsureDir(o.cachepath)
if err != nil {
return nil, fmt.Errorf("error occurred creating cachedcopy's dir. cachepath=%s err=%v", o.cachepath, err)
}
cachedcopy, err = os.Create(o.cachepath)
if err != nil {
return nil, fmt.Errorf("error occurred creating file. local=%s err=%v", o.cachepath, err)
}
for try := 0; try < Retries; try++ {
if o.rc == nil {
rc, err := o.fs.getOpenObject(context.Background(), o.name)
if err != nil {
if err == cloudstorage.ErrObjectNotFound {
// New, this is fine
} else {
// lets re-try
errs = append(errs, fmt.Errorf("error getting object err=%v", err))
cloudstorage.Backoff(try)
continue
}
}
if rc != nil {
o.rc = rc
}
}
if o.rc != nil {
// we have a preexisting object, so lets download it..
defer o.rc.Close()
if _, err := cachedcopy.Seek(0, os.SEEK_SET); err != nil {
return nil, fmt.Errorf("error seeking to start of cachedcopy err=%v", err) //don't retry on local fs errors
}
_, err = io.Copy(cachedcopy, o.rc)
if err != nil {
errs = append(errs, fmt.Errorf("error coping bytes. err=%v", err))
//recreate the cachedcopy file incase it has incomplete data
if err := os.Remove(o.cachepath); err != nil {
return nil, fmt.Errorf("error resetting the cachedcopy err=%v", err) //don't retry on local fs errors
}
if cachedcopy, err = os.Create(o.cachepath); err != nil {
return nil, fmt.Errorf("error creating a new cachedcopy file. local=%s err=%v", o.cachepath, err)
}
cloudstorage.Backoff(try)
continue
}
}
if readonly {
cachedcopy.Close()
cachedcopy, err = os.Open(o.cachepath)
if err != nil {
name := "unknown"
if cachedcopy != nil {
name = cachedcopy.Name()
}
return nil, fmt.Errorf("error opening file. local=%s object=%s tfile=%v err=%v", o.cachepath, o.name, name, err)
}
} else {
if _, err := cachedcopy.Seek(0, os.SEEK_SET); err != nil {
return nil, fmt.Errorf("error seeking to start of cachedcopy err=%v", err) //don't retry on local fs errors
}
}
o.cachedcopy = cachedcopy
o.readonly = readonly
o.opened = true
return o.cachedcopy, nil
}
return nil, fmt.Errorf("fetch error retry cnt reached: obj=%s tfile=%v errs:[%v]", o.name, o.cachepath, errs)
}
func (o *object) File() *os.File {
return o.cachedcopy
}
func (o *object) Read(p []byte) (n int, err error) {
return o.cachedcopy.Read(p)
}
func (o *object) Write(p []byte) (n int, err error) {
if o.cachedcopy == nil {
_, err := o.Open(cloudstorage.ReadWrite)
if err != nil {
return 0, err
}
}
return o.cachedcopy.Write(p)
}
func (o *object) Sync() error {
if !o.opened {
return fmt.Errorf("object isn't opened object:%s", o.name)
}
if o.readonly {
return fmt.Errorf("trying to Sync a readonly object:%s", o.name)
}
cachedcopy, err := os.OpenFile(o.cachepath, os.O_RDWR, 0664)
if err != nil {
return fmt.Errorf("couldn't open localfile for sync'ing. local=%s err=%v", o.cachepath, err)
}
defer cachedcopy.Close()
if _, err := cachedcopy.Seek(0, os.SEEK_SET); err != nil {
return fmt.Errorf("error seeking to start of cachedcopy err=%v", err) //don't retry on local filesystem errors
}
// Upload the file
if err = o.fs.uploadMultiPart(o, cachedcopy); err != nil {
gou.Warnf("could not upload %v", err)
return fmt.Errorf("failed to upload file, %v", err)
}
return nil
}
func (o *object) Close() error {
if !o.opened {
return nil
}
defer func() {
os.Remove(o.cachepath)
o.cachedcopy = nil
o.opened = false
}()
if !o.readonly {
err := o.cachedcopy.Sync()
if err != nil {
return err
}
}
err := o.cachedcopy.Close()
if err != nil {
if !strings.Contains(err.Error(), os.ErrClosed.Error()) {
return err
}
}
if o.opened && !o.readonly {
err := o.Sync()
if err != nil {
gou.Errorf("error on sync %v", err)
return err
}
}
return nil
}
func (o *object) Release() error {
if o.cachedcopy != nil {
gou.Debugf("release %q vs %q", o.cachedcopy.Name(), o.cachepath)
o.cachedcopy.Close()
return os.Remove(o.cachepath)
}
os.Remove(o.cachepath)
return nil
}
|
<filename>MedasIoT/medas-iot-rbac/src/main/java/com/foxconn/iot/dto/DeviceTypeDto.java
package com.foxconn.iot.dto;
import java.util.Date;
import javax.validation.constraints.NotBlank;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.fasterxml.jackson.annotation.JsonFormat.Shape;
public class DeviceTypeDto {
@JsonFormat(shape = Shape.STRING)
private long id;
@NotBlank(message = "型号不能为空")
private String model;
@NotBlank(message = "名称不能为空")
private String name;
private String details;
@JsonFormat(shape = Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")
private Date createOn;
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getModel() {
return model;
}
public void setModel(String model) {
this.model = model;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDetails() {
return details;
}
public void setDetails(String details) {
this.details = details;
}
public Date getCreateOn() {
return createOn;
}
public void setCreateOn(Date createOn) {
this.createOn = createOn;
}
}
|
#!/usr/bin/env bash
./liquibase --classpath=scripts --logLevel debug --defaultsFile=azor-shop.properties "$@" |
# Copyright (C) 2011, 2012, 2015 Internet Systems Consortium, Inc. ("ISC")
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
# $Id: clean.sh,v 1.2 2011/03/02 04:20:33 marka Exp $
rm -f test.*
|
import Foundation
class ReverseString {
let originalString: String
init(originalString: String) {
self.originalString = originalString
}
func reversedString() -> String {
var reversedString = ""
for char in self.originalString {
reversedString = "\(char)" + reversedString
}
return reversedString
}
}
let reverseString = ReverseString(originalString: "Hello World!")
reverseString.reversedString() // !dlroW olleH |
package com.lgq.servlet;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
/**
* @author lgq
* @date 2019/10/25
*/
public class FrameworkServlet extends HttpServlet {
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
try {
doDispatch(req, resp);
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
try {
doDispatch(req, resp);
} catch (Exception e) {
e.printStackTrace();
}
}
public void doDispatch(HttpServletRequest request, HttpServletResponse response) throws Exception{
response.setContentType("text/plain;charset=utf-8");
}
}
|
import csv
def parse_table(data):
table = []
lines = data.split("\n")
# Get the headers
headers = lines[0].split("|")[1:-1]
# Get the rows
for row in lines[1:]:
row = row.split("|")[1:-1]
row_dict = {key:value for key, value in zip(headers, row)}
table.append(row_dict)
return table
# Print the result
for row in parse_table(data):
print(row) |
#!/bin/sh
# WARNING: REQUIRES /bin/sh
#
# Install puppet-agent with shell... how hard can it be?
#
# 0.0.1a - Here Be Dragons
#
# Set up colours
if tty -s;then
RED=${RED:-$(tput setaf 1)}
GREEN=${GREEN:-$(tput setaf 2)}
YLW=${YLW:-$(tput setaf 3)}
BLUE=${BLUE:-$(tput setaf 4)}
RESET=${RESET:-$(tput sgr0)}
else
RED=
GREEN=
YLW=
BLUE=
RESET=
fi
# Timestamp
now () {
date +'%H:%M:%S %z'
}
# Logging functions instead of echo
log () {
echo "${BLUE}`now`${RESET} ${1}"
}
info () {
log "${GREEN}INFO${RESET}: ${1}"
}
warn () {
log "${YLW}WARN${RESET}: ${1}"
}
critical () {
log "${RED}CRIT${RESET}: ${1}"
}
utopic () {
warn "There is no utopic release yet, see https://tickets.puppetlabs.com/browse/CPR-92 for progress";
warn "We'll use the trusty package for now";
deb_codename="trusty";
}
# Check whether a command exists - returns 0 if it does, 1 if it does not
exists() {
if command -v $1 >/dev/null 2>&1
then
return 0
else
return 1
fi
}
# Helper bug-reporting text
report_bug() {
critical "Please file a bug report at https://github.com/petems/puppet-install-shell/"
critical ""
critical "Version: $version"
critical "Platform: $platform"
critical "Platform Version: $platform_version"
critical "Machine: $machine"
critical "OS: $os"
critical ""
critical "Please detail your operating system type, version and any other relevant details"
}
# Get command line arguments
while getopts v:f:d:h opt
do
case "$opt" in
v) version="$OPTARG";;
f) cmdline_filename="$OPTARG";;
d) cmdline_dl_dir="$OPTARG";;
h) echo >&2 \
"install_puppet_agent.sh - A shell script to install Puppet Agent > 5.0.0, assuming no dependencies
usage:
-v version version to install, defaults to \$latest_version
-f filename filename for downloaded file, defaults to original name
-d download_dir filename for downloaded file, defaults to /tmp/(random-number)"
exit 0;;
\?) # unknown flag
echo >&2 \
"unknown option
usage: $0 [-v version] [-f filename | -d download_dir]"
exit 1;;
esac
done
shift `expr $OPTIND - 1`
machine=`uname -m`
os=`uname -s`
# Retrieve Platform and Platform Version
if test -f "/etc/lsb-release" && grep -q DISTRIB_ID /etc/lsb-release; then
platform=`grep DISTRIB_ID /etc/lsb-release | cut -d "=" -f 2 | tr '[A-Z]' '[a-z]'`
platform_version=`grep DISTRIB_RELEASE /etc/lsb-release | cut -d "=" -f 2`
elif test -f "/etc/debian_version"; then
platform="debian"
platform_version=`cat /etc/debian_version`
elif test -f "/etc/redhat-release"; then
platform=`sed 's/^\(.\+\) release.*/\1/' /etc/redhat-release | tr '[A-Z]' '[a-z]'`
platform_version=`sed 's/^.\+ release \([.0-9]\+\).*/\1/' /etc/redhat-release`
#If /etc/redhat-release exists, we act like RHEL by default. Except for fedora
if test "$platform" = "fedora"; then
platform="fedora"
else
platform="el"
fi
elif test -f "/etc/system-release"; then
platform=`sed 's/^\(.\+\) release.\+/\1/' /etc/system-release | tr '[A-Z]' '[a-z]'`
platform_version=`sed 's/^.\+ release \([.0-9]\+\).*/\1/' /etc/system-release | tr '[A-Z]' '[a-z]'`
# amazon is built off of fedora, so act like RHEL
if test "$platform" = "amazon linux ami"; then
platform="el"
platform_version="6.0"
fi
# Apple OS X
elif test -f "/usr/bin/sw_vers"; then
platform="mac_os_x"
# Matching the tab-space with sed is error-prone
platform_version=`sw_vers | awk '/^ProductVersion:/ { print $2 }'`
major_version=`echo $platform_version | cut -d. -f1,2`
case $major_version in
"10.6") platform_version="10.6" ;;
"10.7"|"10.8"|"10.9") platform_version="10.7" ;;
*) echo "No builds for platform: $major_version"
report_bug
exit 1
;;
esac
# x86_64 Apple hardware often runs 32-bit kernels (see OHAI-63)
x86_64=`sysctl -n hw.optional.x86_64`
if test $x86_64 -eq 1; then
machine="x86_64"
fi
elif test -f "/etc/release"; then
platform="solaris2"
machine=`/usr/bin/uname -p`
platform_version=`/usr/bin/uname -r`
elif test -f "/etc/SuSE-release"; then
if grep -q 'Enterprise' /etc/SuSE-release;
then
platform="sles"
platform_version=`awk '/^VERSION/ {V = $3}; /^PATCHLEVEL/ {P = $3}; END {print V "." P}' /etc/SuSE-release`
else
platform="suse"
platform_version=`awk '/^VERSION =/ { print $3 }' /etc/SuSE-release`
fi
elif test -f "/etc/arch-release"; then
platform="archlinux"
platform_version=`/usr/bin/uname -r`
elif test "x$os" = "xFreeBSD"; then
platform="freebsd"
platform_version=`uname -r | sed 's/-.*//'`
elif test "x$os" = "xAIX"; then
platform="aix"
platform_version=`uname -v`
machine="ppc"
fi
if test "x$platform" = "x"; then
critical "Unable to determine platform version!"
report_bug
exit 1
fi
if test "x$version" = "x"; then
version="latest";
info "Version parameter not defined, assuming latest";
else
case "$version" in
3*)
critical "Cannot install Puppet 3 with this script. Puppet 3 is EOL, and you should upgrade. you need to use install_puppet_agent.sh"
report_bug
exit 1
;;
4*)
critical "Cannot install Puppet 4 with this script, you need to use install_puppet_agent.sh"
report_bug
exit 1
;;
5*)
critical "Cannot install Puppet 5 with this script, you need to use install_puppet_5_agent.sh"
report_bug
exit 1
;;
*)
info "Version parameter defined: $version";
;;
esac
fi
# Mangle $platform_version to pull the correct build
# for various platforms
major_version=`echo $platform_version | cut -d. -f1`
case $platform in
"el")
platform_version=$major_version
;;
"fedora")
case $major_version in
"23") platform_version="22";;
*) platform_version=$major_version;;
esac
;;
"debian")
case $major_version in
"5") platform_version="6";;
"6") platform_version="6";;
"7") platform_version="6";;
esac
;;
"freebsd")
platform_version=$major_version
;;
"sles")
platform_version=$major_version
;;
"suse")
platform_version=$major_version
;;
esac
if test "x$platform_version" = "x"; then
critical "Unable to determine platform version!"
report_bug
exit 1
fi
if test "x$platform" = "xsolaris2"; then
# hack up the path on Solaris to find wget
PATH=/usr/sfw/bin:$PATH
export PATH
fi
checksum_mismatch() {
critical "Package checksum mismatch!"
report_bug
exit 1
}
unable_to_retrieve_package() {
critical "Unable to retrieve a valid package!"
report_bug
exit 1
}
random_hexdump () {
hexdump -n 2 -e '/2 "%u"' /dev/urandom
}
if test "x$TMPDIR" = "x"; then
tmp="/tmp"
else
tmp=$TMPDIR
fi
# Random function since not all shells have $RANDOM
if exists hexdump; then
random_number=random_hexdump
else
random_number="`date +%N`"
fi
tmp_dir="$tmp/install.sh.$$.$random_number"
(umask 077 && mkdir $tmp_dir) || exit 1
tmp_stderr="$tmp/stderr.$$.$random_number"
capture_tmp_stderr() {
# spool up tmp_stderr from all the commands we called
if test -f $tmp_stderr; then
output=`cat ${tmp_stderr}`
stderr_results="${stderr_results}\nSTDERR from $1:\n\n$output\n"
fi
}
trap "rm -f $tmp_stderr; rm -rf $tmp_dir; exit $1" 1 2 15
# do_wget URL FILENAME
do_wget() {
info "Trying wget..."
wget -O "$2" "$1" 2>$tmp_stderr
rc=$?
# check for 404
grep "ERROR 404" $tmp_stderr 2>&1 >/dev/null
if test $? -eq 0; then
critical "ERROR 404"
unable_to_retrieve_package
fi
# check for bad return status or empty output
if test $rc -ne 0 || test ! -s "$2"; then
capture_tmp_stderr "wget"
return 1
fi
return 0
}
# do_curl URL FILENAME
do_curl() {
info "Trying curl..."
curl -1 -sL -D $tmp_stderr "$1" > "$2"
rc=$?
# check for 404
grep "404 Not Found" $tmp_stderr 2>&1 >/dev/null
if test $? -eq 0; then
critical "ERROR 404"
unable_to_retrieve_package
fi
# check for bad return status or empty output
if test $rc -ne 0 || test ! -s "$2"; then
capture_tmp_stderr "curl"
return 1
fi
return 0
}
# do_fetch URL FILENAME
do_fetch() {
info "Trying fetch..."
fetch -o "$2" "$1" 2>$tmp_stderr
# check for bad return status
test $? -ne 0 && return 1
return 0
}
# do_perl URL FILENAME
do_perl() {
info "Trying perl..."
perl -e 'use LWP::Simple; getprint($ARGV[0]);' "$1" > "$2" 2>$tmp_stderr
rc=$?
# check for 404
grep "404 Not Found" $tmp_stderr 2>&1 >/dev/null
if test $? -eq 0; then
critical "ERROR 404"
unable_to_retrieve_package
fi
# check for bad return status or empty output
if test $rc -ne 0 || test ! -s "$2"; then
capture_tmp_stderr "perl"
return 1
fi
return 0
}
do_checksum() {
if exists sha256sum; then
checksum=`sha256sum $1 | awk '{ print $1 }'`
if test "x$checksum" != "x$2"; then
checksum_mismatch
else
info "Checksum compare with sha256sum succeeded."
fi
elif exists shasum; then
checksum=`shasum -a 256 $1 | awk '{ print $1 }'`
if test "x$checksum" != "x$2"; then
checksum_mismatch
else
info "Checksum compare with shasum succeeded."
fi
elif exists md5sum; then
checksum=`md5sum $1 | awk '{ print $1 }'`
if test "x$checksum" != "x$3"; then
checksum_mismatch
else
info "Checksum compare with md5sum succeeded."
fi
elif exists md5; then
checksum=`md5 $1 | awk '{ print $4 }'`
if test "x$checksum" != "x$3"; then
checksum_mismatch
else
info "Checksum compare with md5 succeeded."
fi
else
warn "Could not find a valid checksum program, pre-install shasum, md5sum or md5 in your O/S image to get valdation..."
fi
}
# do_download URL FILENAME
do_download() {
info "Downloading $1"
info " to file $2"
# we try all of these until we get success.
# perl, in particular may be present but LWP::Simple may not be installed
if exists wget; then
do_wget $1 $2 && return 0
fi
if exists curl; then
do_curl $1 $2 && return 0
fi
if exists fetch; then
do_fetch $1 $2 && return 0
fi
if exists perl; then
do_perl $1 $2 && return 0
fi
unable_to_retrieve_package
}
# install_file TYPE FILENAME
# TYPE is "rpm", "deb", "solaris", or "sh"
install_file() {
case "$1" in
"rpm")
info "installing puppetlabs yum repo with rpm..."
if test -f "/etc/yum.repos.d/puppetlabs-pc1.repo"; then
info "existing puppetlabs yum repo found, moving to old location"
mv /etc/yum.repos.d/puppetlabs-pc1.repo /etc/yum.repos.d/puppetlabs-pc1.repo.old
fi
rpm -Uvh --oldpackage --replacepkgs "$2"
if test "$version" = 'latest'; then
yum install -y puppet-agent
else
yum install -y "puppet-agent-${version}"
fi
;;
"deb")
info "installing puppetlabs apt repo with dpkg..."
dpkg -i "$2"
apt-get update -y
if test "$version" = 'latest'; then
apt-get install -y puppet-agent
else
if test "x$deb_codename" != "x"; then
apt-get install -y "puppet-agent=${version}-1${deb_codename}"
else
apt-get install -y "puppet-agent=${version}"
fi
fi
;;
"solaris")
critical "Solaris not supported yet"
;;
"dmg" )
critical "Puppet-Agent Not Supported Yet: $1"
;;
*)
critical "Unknown filetype: $1"
report_bug
exit 1
;;
esac
if test $? -ne 0; then
critical "Installation failed"
report_bug
exit 1
fi
}
#Platforms that do not need downloads are in *, the rest get their own entry.
case $platform in
"archlinux")
critical "Not got Puppet-agent not supported on Arch yet"
;;
"freebsd")
critical "Not got Puppet-agent not supported on freebsd yet"
;;
*)
info "Downloading Puppet $version for ${platform}..."
case $platform in
"el")
info "Red hat like platform! Lets get you an RPM..."
filetype="rpm"
filename="puppet6-release-el-${platform_version}.noarch.rpm"
download_url="http://yum.puppetlabs.com/puppet6/${filename}"
;;
"fedora")
info "Fedora platform! Lets get the RPM..."
filetype="rpm"
filename="puppet6-release-fedora-${platform_version}.noarch.rpm"
download_url="http://yum.puppetlabs.com/puppet6/${filename}"
;;
"debian")
info "Debian platform! Lets get you a DEB..."
case $major_version in
"5") deb_codename="lenny";;
"6") deb_codename="squeeze";;
"7") deb_codename="wheezy";;
"8") deb_codename="jessie";;
"9") deb_codename="stretch";;
esac
filetype="deb"
filename="puppet6-release-${deb_codename}.deb"
download_url="http://apt.puppetlabs.com/${filename}"
;;
"ubuntu")
info "Ubuntu platform! Lets get you a DEB..."
case $platform_version in
"12.04") deb_codename="precise";;
"12.10") deb_codename="quantal";;
"13.04") deb_codename="raring";;
"13.10") deb_codename="saucy";;
"14.04") deb_codename="trusty";;
"15.04") deb_codename="vivid";;
"15.10") deb_codename="wily";;
"16.04") deb_codename="xenial";;
"16.10") deb_codename="yakkety";;
"17.04") deb_codename="zesty";;
"18.04") deb_codename="bionic";;
"14.10") utopic;;
esac
filetype="deb"
filename="puppet6-release-${deb_codename}.deb"
download_url="http://apt.puppetlabs.com/${filename}"
;;
"mac_os_x")
critical "Script doesn't Puppet-agent not supported on OSX yet"
;;
*)
critical "Sorry $platform is not supported yet!"
report_bug
exit 1
;;
esac
if test "x$cmdline_filename" != "x"; then
download_filename=$cmdline_filename
else
download_filename=$filename
fi
if test "x$cmdline_dl_dir" != "x"; then
download_filename="$cmdline_dl_dir/$download_filename"
else
download_filename="$tmp_dir/$download_filename"
fi
do_download "$download_url" "$download_filename"
install_file $filetype "$download_filename"
;;
esac
#Cleanup
if test "x$tmp_dir" != "x"; then
rm -r "$tmp_dir"
fi
|
#!/bin/bash
chmod +x /usr/local/bin/redis-trib.rb
VALID_REDIS_CONTAINERS=()
for i in `docker ps -q`; do
FIRST_ALIAS=`docker inspect --format '{{range .NetworkSettings.Networks}}{{(index (index .Aliases 0))}}{{end}}' "$i"`
SECOND_ALIAS=`docker inspect --format '{{range .NetworkSettings.Networks}}{{(index (index .Aliases 1))}}{{end}}' "$i"`
if [[ $FIRST_ALIAS == "redis-cluster" || $SECOND_ALIAS == "redis-cluster" ]]; then
IP=`docker inspect --format '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' "$i"`
VALID_REDIS_CONTAINERS+=($IP":6379");
fi
done
echo "Valid Redis containers:" ${VALID_REDIS_CONTAINERS[*]}
redis-trib.rb create --replicas 1 ${VALID_REDIS_CONTAINERS[*]}
|
#!/bin/bash
declare -a xxh64sums
declare -a duplicates
declare -a originals
mainindex=0
workspace="$1"
filecount_raw=0
duplicates_folder=""$workspace"/duplicates_found_by_simple_duplicator"
#####
#bu fonksiyon bir dizinin icindeki her bir elemani alip bakiyor.
#eger dosya ise sumini alip kaydediyor
#eger klasorse; bos ise omit ediyor, dolu ise klasorun ici icin fonksiyonu
#yeniden cagiriyor (recursive)
#notlar: 2>/dev/null ile xxh64sum'dan gelen error mesajlarini gizliyoruz.
# xxh64sum bize hata verip bos string dondururse onu kaydetmiyoruz.
# bashte fonksiyonlar icin global variable kullanmak zorundayiz. o
# sebepten, xxh64sums arrayinin indexini tutan integer (mainindex)
# fonksiyonun disinda. cunku fonksiyona parametre olarak array vere-
# miyoruz. (kazmaca ama kullanisli)
#####
function add_files {
for entry in "$1"/*
do
if [ ! -d "$entry" ]
then
xxh64return=$(xxh64sum "$entry" 2>/dev/null)
if [[ ! -z $xxh64return ]]; then
xxh64sums+=("$xxh64return")
fi
let "mainindex++"
elif [ -d "$entry" ]; then
if [ ! -z "$(ls -A "$entry")" ]; then
add_files "$entry"
fi
fi
echo -ne "\e[31m\e[1mStep 1: \e[0m $mainindex / $filecount_raw \r"
done
}
#scripte gonderilen parametre bir klasor degilse yahut bos ise hata mesaji ver
#TODO gelecekte error mesaji programin nasil kullanilacagina dair aciklama icermeli
if [[ ! -d "$workspace" ]]; then
echo -e "\e[31m\e[1mERROR: \e[0mThere is not such a folder! Please check again."
exit
fi
#birinci kisim (tum dosyalardan sumlarin alinmasi)
tput civis
echo "Starting Job..."
filecount_raw=$(find "$workspace" -type f | wc -l)
add_files "$workspace"
echo -ne "\r\e[31m\e[1mStep 1: \e[0mDone! \n"
#ara hazirlik sumlarin siralanmasi duplicate klasoru olusturma
filecount=${#xxh64sums[@]}
mkdir -p "$duplicates_folder"
counter=0
aim=2
echo -ne "\e[31m\e[1mStep 2: \e[0m0% \e[32m░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░\e[0m"
IFS=$'\n'
sortedsums=($(sort <<< "${xxh64sums[*]}"))
echo -ne "\r\e[31m\e[1mStep 2: \e[0m0% \e[32m█░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░\e[0m"
unset IFS
#####
#ikinci kisim
#siralanan sumlardan birden fazla olan dosyalarin ilkini "originals" arrayine,
#ikinci ve daha fazla kopyalarini ise "duplicates" arrayine atiyor.
#progress barimiz da var biraz sekil (gerci benim tercihim hala fallik olani)
#####
while [ $counter -lt $(($filecount-1)) ]
do
rawsum_current=${sortedsums[$counter]}
rawsum_next=${sortedsums[$(($counter + 1))]}
if [ ${rawsum_current:0:16} = ${rawsum_next:0:16} ]; then
if [[ $counter -gt 0 ]]; then
rawsum_prev=${sortedsums[$(($counter - 1))]}
if [[ ! ${rawsum_current:0:16} = ${rawsum_prev:0:16} ]]; then
originals+=("$rawsum_current")
fi
fi
duplicates+=("$rawsum_next")
fi
progress=$(echo "scale=2; ($counter/$filecount*100)/1" | bc -l )
progress=${progress%%.*}
let "counter++"
if [ $progress -gt $aim ]; then
echo -ne "\r\e[31m\e[1mStep 2: \e[0m $progress% "
for (( i = 0; i < ( $progress / 2 ); i++ )); do
echo -ne "\e[32m█"
done
aim=$(($aim+1))
fi
done
echo -ne "\r\e[31m\e[1mStep 2:\e[0m Done! \nCompleting the job...\n"
#####
#final aksiyonlari
#eger hic duplicate yoksa olusturdugumuz duplicate klasorunu silelim etraf kirlenmesin.
#(klasoru daha onceden olusturmusak ve bu calistirmada hic duplicate yoksa, panik yok.
#klasor bos degilse silmiyoruz)
#duplicate varsa fonksiyonu calistirdigimiz ana klasordeki duplicates dizininde topluyoruz
#ana klasore bir de log file koyduk. asil (orijinal) kopya ve duplicateleri listeleyen
#####
if [ ${#duplicates[@]} = 0 ]
then
if [ -z "$(ls -A "$duplicates_folder")" ]
then
rm -r "$duplicates_folder"
fi
echo "No duplicates found!"
else
echo "Generated by simple-duplicator. ${#duplicates[@]} duplicate files found. $(date "+%Y-%m-%d-%H:%M:%S")" >> "$workspace/simple-duplicator_log"
echo "Files moved to $duplicates_folder:" >> "$workspace/simple-duplicator_log"
echo "========================================" >> "$workspace/simple-duplicator_log"
for (( i = 0; i < ${#duplicates[@]}; i++ )); do
filename=${duplicates[$i]:18}
mv "$filename" "$duplicates_folder"
echo "$filename" >> "$workspace/simple-duplicator_log"
done
echo "${#duplicates[@]} duplicate files found and moved to folder $duplicates_folder"
fi
echo "Done!"
tput cnorm
#sonraki versiyonlarda yapilmasi gerekenler:
#0-orijinal dosyalarin simdilik sadece isimlerini tuttum. log dosyasina eklenecekler. (bunun bu ver-
#siyonda olmasi lazimdi)
#1-sadece belirli mime typelari icin programi calistirma (bu cok onemli. bu ozellik olmadan program
#sistemin .mina bile koyabilir. dusunsene, icerigi bos olan tum dosyalari da topluyor mesela)
#2-dosyalari klasore doldururken ismi ayni olanlardan tek kopya birakiyor. hepsini tutmali mi? (niye)
#3-dosyalari once bulup, ne yapacagini (sileyim mi, klasore mi toplayayim yoksa karismayayim mi)
#sonra sorma ozelligi getirilecek.
#4-simdilik ilk buldugu dosyayi tutup digerlerini topluyor. hangisini tutacagina dair bir yaklasim
#gelistirilmeli. sanki bir iki madde daha vardi aklimda ama simdilik bu kadarini yazabildim. |
<gh_stars>10-100
/**
* @fileoverview Closure Builder - Closure compiler config
*
* @license Copyright 2017 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @author <EMAIL> (<NAME>)
*/
const closureBuilder = require('../../closure-builder');
const pathTools = require('../../tools/path.js');
const glob = closureBuilder.globSupport();
/**
* Build Tools.
* @constructor
* @struct
* @final
*/
let config = function() {};
config.duplicate = {
name: 'closure_test_duplicate',
srcs: [
'test_files/closure_test_1.js',
'test_files/closure_test_2.js',
'test_files/closure_test_duplicate.js',
],
deps: glob([
'test_files/closure_test_*.js',
]),
out: pathTools.getTempTestPath('closure-test-1'),
};
config.general1 = {
name: 'closure_test_1',
srcs: [
'test_files/closure_test_1.js',
],
out: pathTools.getTempTestPath('closure-test-1'),
};
config.general2 = {
name: 'closure_test_2',
srcs: [
'test_files/closure_test_1.js',
'test_files/closure_test_2.js',
],
out: pathTools.getTempTestPath('closure-test-2'),
};
config.group = {
name: 'closure_test_group',
srcs: glob([
'test_files/closure_test_*.js',
]),
out: pathTools.getTempTestPath('closure-test-group'),
};
config.module = {
name: 'closure_test_require_module',
srcs: glob([
'test_files/closure_test_*.js',
]),
out: pathTools.getTempTestPath('closure-module'),
};
config.extern = {
name: 'closure_test_extern',
srcs: glob([
'test_files/closure_test_*.js',
]),
externs: [
'test_files/externs.js',
],
out: pathTools.getTempTestPath('closure-test-extern'),
};
config.error = {
name: 'closure_test_error',
srcs: glob([
'test_files/special/closure_error.js',
]),
out: pathTools.getTempTestPath('closure-error'),
testEnv: true,
};
config.warning = {
name: 'closure_test_warning',
srcs: glob([
'test_files/special/closure_warning.js',
]),
out: pathTools.getTempTestPath('closure-warning'),
};
config.warningDisabled = {
name: 'closure_test_warning',
srcs: glob([
'test_files/special/closure_warning.js',
]),
warn: false,
out: pathTools.getTempTestPath('closure-warning'),
};
config.export = {
name: 'closure_test_export',
srcs: glob([
'test_files/special/closure_export.js',
]),
out: pathTools.getTempTestPath('closure-export'),
};
module.exports = config;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.