text
stringlengths 1
1.05M
|
|---|
<reponame>dinhtuyen/PRML01
import numpy as np
class HiddenMarkovModel(object):
"""
Base class of Hidden Markov models
"""
def __init__(self, initial_proba, transition_proba):
"""
construct hidden markov model
Parameters
----------
initial_proba : (n_hidden,) np.ndarray
initial probability of each hidden state
transition_proba : (n_hidden, n_hidden) np.ndarray
transition probability matrix
(i, j) component denotes the transition probability from i-th to j-th hidden state
Attribute
---------
n_hidden : int
number of hidden state
"""
self.n_hidden = initial_proba.size
self.initial_proba = initial_proba
self.transition_proba = transition_proba
def fit(self, seq, iter_max=100):
"""
perform EM algorithm to estimate parameter of emission model and hidden variables
Parameters
----------
seq : (N, ndim) np.ndarray
observed sequence
iter_max : int
maximum number of EM steps
Returns
-------
posterior : (N, n_hidden) np.ndarray
posterior distribution of each latent variable
"""
params = np.hstack(
(self.initial_proba.ravel(), self.transition_proba.ravel()))
for i in range(iter_max):
p_hidden, p_transition = self.expect(seq)
self.maximize(seq, p_hidden, p_transition)
params_new = np.hstack(
(self.initial_proba.ravel(), self.transition_proba.ravel()))
if np.allclose(params, params_new):
break
else:
params = params_new
return self.forward_backward(seq)
def expect(self, seq):
"""
estimate posterior distributions of hidden states and
transition probability between adjacent latent variables
Parameters
----------
seq : (N, ndim) np.ndarray
observed sequence
Returns
-------
p_hidden : (N, n_hidden) np.ndarray
posterior distribution of each hidden variable
p_transition : (N - 1, n_hidden, n_hidden) np.ndarray
posterior transition probability between adjacent latent variables
"""
likelihood = self.likelihood(seq)
f = self.initial_proba * likelihood[0]
constant = [f.sum()]
forward = [f / f.sum()]
for like in likelihood[1:]:
f = forward[-1] @ self.transition_proba * like
constant.append(f.sum())
forward.append(f / f.sum())
forward = np.asarray(forward)
constant = np.asarray(constant)
backward = [np.ones(self.n_hidden)]
for like, c in zip(likelihood[-1:0:-1], constant[-1:0:-1]):
backward.insert(0, self.transition_proba @ (like * backward[0]) / c)
backward = np.asarray(backward)
p_hidden = forward * backward
p_transition = self.transition_proba * likelihood[1:, None, :] * backward[1:, None, :] * forward[:-1, :, None]
return p_hidden, p_transition
def forward_backward(self, seq):
"""
estimate posterior distributions of hidden states
Parameters
----------
seq : (N, ndim) np.ndarray
observed sequence
Returns
-------
posterior : (N, n_hidden) np.ndarray
posterior distribution of hidden states
"""
likelihood = self.likelihood(seq)
f = self.initial_proba * likelihood[0]
constant = [f.sum()]
forward = [f / f.sum()]
for like in likelihood[1:]:
f = forward[-1] @ self.transition_proba * like
constant.append(f.sum())
forward.append(f / f.sum())
backward = [np.ones(self.n_hidden)]
for like, c in zip(likelihood[-1:0:-1], constant[-1:0:-1]):
backward.insert(0, self.transition_proba @ (like * backward[0]) / c)
forward = np.asarray(forward)
backward = np.asarray(backward)
posterior = forward * backward
return posterior
def filtering(self, seq):
"""
bayesian filtering
Parameters
----------
seq : (N, ndim) np.ndarray
observed sequence
Returns
-------
posterior : (N, n_hidden) np.ndarray
posterior distributions of each latent variables
"""
likelihood = self.likelihood(seq)
p = self.initial_proba * likelihood[0]
posterior = [p / np.sum(p)]
for like in likelihood[1:]:
p = posterior[-1] @ self.transition_proba * like
posterior.append(p / np.sum(p))
posterior = np.asarray(posterior)
return posterior
def viterbi(self, seq):
"""
viterbi algorithm (a.k.a. max-sum algorithm)
Parameters
----------
seq : (N, ndim) np.ndarray
observed sequence
Returns
-------
seq_hid : (N,) np.ndarray
the most probable sequence of hidden variables
"""
nll = -np.log(self.likelihood(seq))
cost_total = nll[0]
from_list = []
for i in range(1, len(seq)):
cost_temp = cost_total[:, None] - np.log(self.transition_proba) + nll[i]
cost_total = np.min(cost_temp, axis=0)
index = np.argmin(cost_temp, axis=0)
from_list.append(index)
seq_hid = [np.argmin(cost_total)]
for source in from_list[::-1]:
seq_hid.insert(0, source[seq_hid[0]])
return seq_hid
|
def search(num, arr):
for i in range(len(arr)):
if arr[i] == num:
return i
arr = [1, 6, 5, 4, 3]
result = search(5, arr)
if result:
print("Number found at index %d" %(result))
else:
print("Number not found")
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import os
import shutil
import tempfile
from dlrn.config import ConfigOptions
from dlrn import db
from dlrn import shell
from dlrn.tests import base
from dlrn import utils
from six.moves import configparser
def mocked_session(url):
session = db.getSession(url)
utils.loadYAML(session, './dlrn/tests/samples/commits_1.yaml')
return session
class TestProcessBuildResult(base.TestCase):
def setUp(self):
super(TestProcessBuildResult, self).setUp()
config = configparser.RawConfigParser()
config.read("projects.ini")
config.set('DEFAULT', 'datadir', tempfile.mkdtemp())
config.set('DEFAULT', 'scriptsdir', tempfile.mkdtemp())
config.set('DEFAULT', 'baseurl', "file://%s" % config.get('DEFAULT',
'datadir'))
self.config = ConfigOptions(config)
self.commit = db.Commit(dt_commit=123, project_name='foo', type="rpm",
commit_hash='1c67b1ab8c6fe273d4e175a14f0df5'
'd3cbbd0edf',
repo_dir='/home/dlrn/data/foo',
distro_hash='c31d1b18eb5ab5aed6721fc4fad06c9'
'bd242490f',
dt_distro=123,
distgit_dir='/home/dlrn/data/foo_distro',
commit_branch='master', dt_build=1441245153)
self.db_fd, filepath = tempfile.mkstemp()
self.session = mocked_session("sqlite:///%s" % filepath)
self.packages = [{'upstream': 'https://github.com/openstack/foo',
'name': 'foo', 'maintainers': '<EMAIL>'},
{'upstream': 'https://github.com/openstack/test',
'name': 'test', 'maintainers': '<EMAIL>'}]
def tearDown(self):
super(TestProcessBuildResult, self).tearDown()
shutil.rmtree(self.config.datadir)
shutil.rmtree(self.config.scriptsdir)
os.close(self.db_fd)
@mock.patch('os.rename')
@mock.patch('os.symlink')
@mock.patch('dlrn.shell.export_commit_yaml')
@mock.patch('dlrn.shell.genreports')
@mock.patch('dlrn.shell.sync_repo')
def test_successful_build(self, rs_mock, gr_mock, ec_mock, sl_mock,
rn_mock):
built_rpms = ['foo-1.2.3.rpm']
status = [self.commit, built_rpms, 'OK', None]
output = shell.process_build_result(status, self.packages,
self.session, [])
self.assertEqual(output, 0)
self.assertEqual(gr_mock.call_count, 1)
self.assertEqual(rs_mock.call_count, 1)
self.assertEqual(ec_mock.call_count, 1)
self.assertEqual(sl_mock.call_count, 1)
self.assertEqual(rn_mock.call_count, 1)
@mock.patch('dlrn.shell.export_commit_yaml')
@mock.patch('dlrn.shell.sendnotifymail')
@mock.patch('dlrn.shell.genreports')
@mock.patch('dlrn.shell.sync_repo')
def test_failed_build(self, rs_mock, gr_mock, sm_mock, ec_mock):
error_msg = 'Unit test error'
status = [self.commit, '', '', error_msg]
output = shell.process_build_result(status, self.packages,
self.session, [])
self.assertEqual(output, 1)
self.assertEqual(gr_mock.call_count, 1)
self.assertEqual(rs_mock.call_count, 1)
self.assertEqual(sm_mock.call_count, 1)
self.assertEqual(ec_mock.call_count, 1)
@mock.patch('dlrn.shell.submit_review')
@mock.patch('dlrn.shell.sendnotifymail')
@mock.patch('dlrn.shell.genreports')
@mock.patch('dlrn.shell.sync_repo')
def test_failed_build_gerrit(self, rs_mock, gr_mock, sm_mock, sr_mock):
self.config.gerrit = 'yes'
error_msg = 'Unit test error'
status = [self.commit, '', '', error_msg]
output = shell.process_build_result(status, self.packages,
self.session, [])
self.assertEqual(output, 1)
self.assertEqual(gr_mock.call_count, 1)
self.assertEqual(rs_mock.call_count, 1)
self.assertEqual(sm_mock.call_count, 1)
self.assertEqual(sr_mock.call_count, 1)
@mock.patch('sh.createrepo_c', create=True)
class TestPostBuild(base.TestCase):
def setUp(self):
super(TestPostBuild, self).setUp()
config = configparser.RawConfigParser()
config.read("projects.ini")
config.set('DEFAULT', 'datadir', tempfile.mkdtemp())
config.set('DEFAULT', 'scriptsdir', tempfile.mkdtemp())
config.set('DEFAULT', 'baseurl', "file://%s" % config.get('DEFAULT',
'datadir'))
self.config = ConfigOptions(config)
self.commit = db.Commit(dt_commit=123, project_name='foo', type="rpm",
commit_hash='1c67b1ab8c6fe273d4e175a14f0df5'
'd3cbbd0edf',
repo_dir='/home/dlrn/data/foo',
distro_hash='c31d1b18eb5ab5aed6721fc4fad06c9'
'bd242490f',
dt_distro=123,
distgit_dir='/home/dlrn/data/foo_distro',
commit_branch='master', dt_build=1441245153)
self.db_fd, filepath = tempfile.mkstemp()
self.session = mocked_session("sqlite:///%s" % filepath)
self.packages = [{'upstream': 'https://github.com/openstack/foo',
'name': 'foo', 'maintainers': '<EMAIL>',
'master-distgit':
'https://github.com/rdo-packages/foo-distgit.git'},
{'upstream': 'https://github.com/openstack/test',
'name': 'test', 'maintainers': '<EMAIL>',
'master-distgit':
'https://github.com/rdo-packages/test-distgit.git'}]
def tearDown(self):
super(TestPostBuild, self).tearDown()
shutil.rmtree(self.config.datadir)
shutil.rmtree(self.config.scriptsdir)
os.close(self.db_fd)
def test_successful_build(self, sh_mock):
built_rpms = ['repos/1c/67/1c67b1ab8c6fe273d4e175a14f0df5d3cbbd0edf'
'_c31d1b18/foo-1.2.3.el7.centos.noarch.rpm',
'repos/1c/67/1c67b1ab8c6fe273d4e175a14f0df5d3cbbd0edf'
'_c31d1b18/foo-1.2.3.el7.centos.src.rpm']
status = [self.commit, built_rpms, 'OK', None]
# Create directory for the CSV file
yumdir = os.path.join(self.config.datadir, "repos",
self.commit.getshardedcommitdir())
os.makedirs(yumdir)
output = shell.post_build(status, self.packages,
self.session)
self.assertTrue(os.path.exists(
os.path.join(self.config.datadir,
"repos",
self.commit.getshardedcommitdir(),
"versions.csv")))
expected = [mock.call(yumdir)]
self.assertEqual(sh_mock.call_args_list, expected)
self.assertEqual(output, 1) # 1 non-successfully built package
def test_successful_build_no_failures(self, sh_mock):
packages = [{'upstream': 'https://github.com/openstack/foo',
'name': 'foo', 'maintainers': '<EMAIL>',
'master-distgit':
'https://github.com/rdo-packages/foo-distgit.git'}]
built_rpms = ['repos/1c/67/1c67b1ab8c6fe273d4e175a14f0df5d3cbbd0edf'
'_c31d1b18/foo-1.2.3.el7.centos.noarch.rpm',
'repos/1c/67/1c67b1ab8c6fe273d4e175a14f0df5d3cbbd0edf'
'_c31d1b18/foo-1.2.3.el7.centos.src.rpm']
status = [self.commit, built_rpms, 'OK', None]
# Create directory for the CSV file
yumdir = os.path.join(self.config.datadir, "repos",
self.commit.getshardedcommitdir())
os.makedirs(yumdir)
output = shell.post_build(status, packages, self.session)
expected = [mock.call(yumdir)]
self.assertEqual(sh_mock.call_args_list, expected)
self.assertEqual(output, 0)
def test_successful_build_no_failures_nosrcrpm(self, sh_mock):
packages = [{'upstream': 'https://github.com/openstack/foo',
'name': 'foo', 'maintainers': '<EMAIL>',
'master-distgit':
'https://github.com/rdo-packages/foo-distgit.git'}]
built_rpms = ['repos/1c/67/1c67b1ab8c6fe273d4e175a14f0df5d3cbbd0edf'
'_c31d1b18/foo-1.2.3.el7.centos.noarch.rpm',
'repos/1c/67/1c67b1ab8c6fe273d4e175a14f0df5d3cbbd0edf'
'_c31d1b18/foo-1.2.3.el7.centos.src.rpm']
self.config.include_srpm_in_repo = False
status = [self.commit, built_rpms, 'OK', None]
# Create directory for the CSV file
yumdir = os.path.join(self.config.datadir, "repos",
self.commit.getshardedcommitdir())
os.makedirs(yumdir)
output = shell.post_build(status, packages, self.session)
expected = [mock.call('-x', '*.src.rpm', yumdir)]
self.assertEqual(sh_mock.call_args_list, expected)
self.assertEqual(output, 0)
def test_successful_build_no_repo(self, sh_mock):
packages = [{'upstream': 'https://github.com/openstack/foo',
'name': 'foo', 'maintainers': '<EMAIL>',
'master-distgit':
'https://github.com/rdo-packages/foo-distgit.git'}]
built_rpms = ['repos/1c/67/1c67b1ab8c6fe273d4e175a14f0df5d3cbbd0edf'
'_c31d1b18/foo-1.2.3.el7.centos.noarch.rpm',
'repos/1c/67/1c67b1ab8c6fe273d4e175a14f0df5d3cbbd0edf'
'_c31d1b18/foo-1.2.3.el7.centos.src.rpm']
status = [self.commit, built_rpms, 'OK', None]
# Create directory for the CSV file
yumdir = os.path.join(self.config.datadir, "repos",
self.commit.getshardedcommitdir())
os.makedirs(yumdir)
output = shell.post_build(status, packages, self.session,
build_repo=False)
# There will be no createrepo call
expected = []
self.assertEqual(sh_mock.call_args_list, expected)
self.assertEqual(output, 0)
|
<gh_stars>0
import React from 'react';
import PropTypes from 'prop-types';
import Includes from './includes';
import Header from './header';
import Footer from './footer';
import './layout.css';
const Layout = ({ siteTitle, parks, children }) => (
<>
<Includes />
<Header siteTitle={siteTitle} parks={parks} />
<main>{children}</main>
<Footer />
</>
);
Layout.propTypes = {
siteTitle: PropTypes.string.isRequired,
parks: PropTypes.array.isRequired,
children: PropTypes.node.isRequired,
};
export default Layout;
|
def standardize(data):
mean = data.mean()
std = data.std()
z_score = (data - mean) / std
return z_score
|
# const.py
DOMAIN = "energy_usage"
DOMAIN_DATA = "domain_data"
ATTRIBUTION = "Data provided by the smart meter"
# DailyUsageSensor.py
from .const import DOMAIN, DOMAIN_DATA, ATTRIBUTION
class FplDailyUsageSensor:
def __init__(self, data):
self.data = data
def get_daily_usage(self):
# Process the data to calculate daily usage
# Return the daily usage data
pass
# AverageDailySensor.py
from .const import DOMAIN, DOMAIN_DATA, ATTRIBUTION
class FplAverageDailySensor:
def __init__(self, data):
self.data = data
def get_average_daily_usage(self):
# Process the data to calculate average daily usage
# Return the average daily usage data
pass
|
#!/bin/bash
set -e
rm -rf vendor
if [ -d "var/" ]; then
rm -rf var/cache/*
rm -rf var/logs/*
chown -R www-data:www-data var/
fi
composer install --prefer-dist --no-interaction --optimize-autoloader -v
php bin/console doctrine:database:create --no-interaction --if-not-exists
php bin/console doctrine:migrations:migrate --no-interaction
if [ "${1#-}" != "$1" ]; then
set -- php "$@"
fi
exec "$@"
|
<reponame>carlosmmarques/android-isel
package pt.isel.pdm.li51n.g4.tmdbisel.data.models.schema;
import android.util.Log;
import java.lang.annotation.Annotation;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import pt.isel.pdm.li51n.g4.tmdbisel.data.models.core.Attributes;
import pt.isel.pdm.li51n.g4.tmdbisel.helpers.Logger;
import pt.isel.pdm.li51n.g4.tmdbisel.helpers.ReflectionHelper;
/**
* Class to be used on creation of the DB Schema
*
* @param <T>
*/
public class TableSchema<T> {
/**
* TAG to be used in logs
*/
private static final String TAG = TableSchema.class.getSimpleName();
private Class<T> type;
private String table;
private String[] mColumns;
private String mSortOrderDefault;
/**
* Default constructor
* @param type of the class to be used
*/
public TableSchema(Class<T> type) {
this.type = type;
table = type.getSimpleName();
}
/**
* Private method to get columns name
* @param type to be used
* @param prefix prefix to be used on each column name
* @return Array of Strings with all columns name
*/
private static String[] getColumns(Class<?> type, String prefix) {
Log.d(TAG, "getColumns");
Iterable<Field> fields = ReflectionHelper.getFieldsUpTo(type, Object.class);
List<String> columns = new ArrayList<>();
for (Field field : fields) {
Annotation annotation = field.getAnnotation(Attributes.class);
Attributes attr = (Attributes) annotation;
if (attr != null && attr.notMapped()) {
continue;
}
String columnName = prefix + (attr != null && attr.primaryKey() ? "_" : "") + field.getName();
if (columnName.equals("id")) {
columnName = "_id";
}
if (!String.class.isAssignableFrom(field.getType()) &&
!Integer.class.isAssignableFrom(field.getType()) &&
!Double.class.isAssignableFrom(field.getType())) {
Collections.addAll(columns, getColumns(field.getType(), field.getName()));
} else {
columns.add(columnName);
}
}
return columns.toArray(new String[columns.size()]);
}
/**
* Data Description Language (DDL) to be used on drop DB table
* @return statement to drop DB table
*/
public String getDropTableDDL() {
Log.d(TAG, "getDropTableDDL => " + table);
return "DROP TABLE IF EXISTS " + table;
}
/**
* Data Description Language (DDL) to be used on create DB table
* @return statement to create DB table
*/
public String getCreateTableDDL() {
Log.d(TAG, "getCreateTableDDL => " + table);
StringBuilder queryBuilder = new StringBuilder();
try {
Iterable<Field> fields = ReflectionHelper.getFieldsUpTo(type, Object.class);
queryBuilder.append("CREATE TABLE ").append(table).append(" (");
queryBuilder.append(getColumnsDDL(fields));
queryBuilder.append(");");
} catch (Exception e) {
Logger.e(e);
}
return queryBuilder.toString();
}
/**
* Get columns name
* @return Array of Strings with all columns name
*/
public String[] getColumns() {
Log.d(TAG, "getColumns => " + table);
if (mColumns != null) {
return mColumns;
}
mColumns = getColumns(type, "");
return mColumns;
}
/**
* Data Description Language (DDL) to be used on create DB
* @param fields fields
* @return statement of DB columns
*/
public String getColumnsDDL(Iterable<Field> fields) {
Log.d(TAG, "getColumnsDDL => " + table);
return getColumnsDDL(fields, "");
}
/**
* Data Description Language (DDL) to be used on create DB
* @param fields fields
* @param prefix to be used on fields
* @return string with columns information on DDL
*/
private String getColumnsDDL(Iterable<Field> fields, String prefix) {
Log.d(TAG, "getColumnsDDL => " + table);
boolean firstField = true;
StringBuilder queryBuilder = new StringBuilder();
StringBuilder lastLines = new StringBuilder();
for (Field field : fields) {
Annotation annotation = field.getAnnotation(Attributes.class);
Attributes attr = (Attributes) annotation;
if (attr != null && attr.notMapped()) {
if (!attr.compositeKey().equals("")) {
lastLines.append(String.format(
", PRIMARY KEY(%s)",
attr.compositeKey()
));
}
continue;
}
if (!firstField) {
String currQuery = queryBuilder.toString();
// trim trailing ", " sequences:
if (currQuery.lastIndexOf(", ") == currQuery.length()-2 )
queryBuilder.setLength(currQuery.length()-2);
else
queryBuilder.append(", ");
}
String columnName = (attr != null && attr.primaryKey() ? "_" : "") + field.getName();
if (columnName.equals("id")) {
columnName = "_id";
}
if (String.class.isAssignableFrom(field.getType())) {
queryBuilder.append(prefix);
queryBuilder.append(columnName).append(" ");
queryBuilder.append("TEXT");
} else if (Integer.class.isAssignableFrom(field.getType())) {
queryBuilder.append(prefix);
queryBuilder.append(columnName).append(" ");
queryBuilder.append("INTEGER");
} else if (Double.class.isAssignableFrom(field.getType())) {
queryBuilder.append(prefix);
queryBuilder.append(columnName).append(" ");
queryBuilder.append("REAL");
} else {
Iterable<Field> fieldFields = ReflectionHelper.getFieldsUpTo(field.getType(), Object.class);
queryBuilder.append(getColumnsDDL(fieldFields, field.getName()));
continue;
}
if (annotation != null && attr != null) {
if (attr.primaryKey()) {
queryBuilder.append(" PRIMARY KEY");
if (attr.autoIncrement()) {
queryBuilder.append(" AUTOINCREMENT");
}
} else if (attr.unique()) {
queryBuilder.append(" UNIQUE");
} else if (!attr.foreignTable().equals("")) {
lastLines.append(String.format(
", FOREIGN KEY(%s) REFERENCES %s(%s)",
columnName,
attr.foreignTable(),
"_id"
));
}
if (attr.sortOrderDefault()) {
mSortOrderDefault = columnName;
}
}
firstField = false;
}
queryBuilder.append(lastLines.toString());
return queryBuilder.toString();
}
/**
* Get table name
* @return table name
*/
public String getTableName() {
Log.d(TAG, "getTableName => " + table);
return table;
}
/**
* Get default sort order
* @return sort order
*/
public String getSortOrderDefault() {
Log.d(TAG, "getSortOrderDefault => " + table);
return mSortOrderDefault;
}
/**
* Get columns name separated with comma
* @return columns name separated with comma
*/
public String getColumnsString() {
Log.d(TAG, "getColumnsString => " + table);
StringBuilder sb = new StringBuilder();
for(String column : getColumns()){
String aux = table+"."+column+", ";
sb.append(aux);
}
String res = sb.toString();
res = res.substring(0, res.lastIndexOf(", "));
return res;
}
}
|
### M
python -u main_informer.py --model informer --data WTH --features M --attn prob --d_layers 2 --e_layers 3 --itr 3 --label_len 168 --pred_len 24 --seq_len 168 --des 'Exp'
python -u main_informer.py --model informer --data WTH --features M --attn prob --d_layers 1 --e_layers 2 --itr 3 --label_len 96 --pred_len 48 --seq_len 96 --des 'Exp'
python -u main_informer.py --model informer --data WTH --features M --attn prob --d_layers 2 --e_layers 3 --itr 3 --label_len 168 --pred_len 168 --seq_len 336 --des 'Exp'
python -u main_informer.py --model informer --data WTH --features M --attn prob --d_layers 2 --e_layers 3 --itr 3 --label_len 168 --pred_len 336 --seq_len 720 --des 'Exp'
python -u main_informer.py --model informer --data WTH --features M --attn prob --d_layers 2 --e_layers 3 --itr 3 --label_len 336 --pred_len 720 --seq_len 720 --des 'Exp'
### S
python -u main_informer.py --model informer --data WTH --features S --attn prob --d_layers 1 --e_layers 2 --itr 3 --label_len 168 --pred_len 24 --seq_len 720 --des 'Exp'
python -u main_informer.py --model informer --data WTH --features S --attn prob --d_layers 1 --e_layers 2 --itr 3 --label_len 168 --pred_len 48 --seq_len 720 --des 'Exp'
python -u main_informer.py --model informer --data WTH --features S --attn prob --d_layers 2 --e_layers 3 --itr 3 --label_len 168 --pred_len 168 --seq_len 168 --des 'Exp'
python -u main_informer.py --model informer --data WTH --features S --attn prob --d_layers 2 --e_layers 3 --itr 3 --label_len 336 --pred_len 336 --seq_len 336 --des 'Exp'
python -u main_informer.py --model informer --data WTH --features S --attn prob --d_layers 2 --e_layers 3 --itr 3 --label_len 336 --pred_len 720 --seq_len 720 --des 'Exp'
|
def calculate_molecular_weight(compound: str, atomic_weights: dict) -> float:
weight = 0.0
current_element = ""
current_count = 0
for char in compound:
if char.isalpha():
if current_element:
weight += atomic_weights.get(current_element, 0) * max(current_count, 1)
current_element = char
current_count = 0
else:
current_element = char
else:
current_count = current_count * 10 + int(char)
if current_element:
weight += atomic_weights.get(current_element, 0) * max(current_count, 1)
return weight
|
// Copyright 2019 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "patchpanel/arc_service.h"
#include <fcntl.h>
#include <linux/rtnetlink.h>
#include <net/if.h>
#include <sys/ioctl.h>
#include <sys/utsname.h>
#include <unistd.h>
#include <utility>
#include <vector>
#include <base/bind.h>
#include <base/files/file_path.h>
#include <base/files/file_util.h>
#include <base/files/scoped_file.h>
#include <base/logging.h>
#include <base/strings/string_number_conversions.h>
#include <base/strings/string_util.h>
#include <base/strings/stringprintf.h>
#include <base/system/sys_info.h>
#include <brillo/key_value_store.h>
#include <chromeos/constants/vm_tools.h>
#include "patchpanel/adb_proxy.h"
#include "patchpanel/datapath.h"
#include "patchpanel/mac_address_generator.h"
#include "patchpanel/manager.h"
#include "patchpanel/minijailed_process_runner.h"
#include "patchpanel/net_util.h"
#include "patchpanel/scoped_ns.h"
namespace patchpanel {
namespace {
const int32_t kAndroidRootUid = 655360;
constexpr uint32_t kInvalidId = 0;
constexpr char kArcNetnsName[] = "arc_netns";
constexpr char kArcIfname[] = "arc0";
constexpr char kArcBridge[] = "arcbr0";
constexpr std::array<const char*, 2> kEthernetInterfacePrefixes{{"eth", "usb"}};
constexpr std::array<const char*, 2> kWifiInterfacePrefixes{{"wlan", "mlan"}};
constexpr std::array<const char*, 2> kCellInterfacePrefixes{{"wwan", "rmnet"}};
bool KernelVersion(int* major, int* minor) {
struct utsname u;
if (uname(&u) != 0) {
PLOG(ERROR) << "uname failed";
*major = *minor = 0;
return false;
}
int unused;
if (sscanf(u.release, "%d.%d.%d", major, minor, &unused) != 3) {
LOG(ERROR) << "unexpected release string: " << u.release;
*major = *minor = 0;
return false;
}
return true;
}
void OneTimeContainerSetup(const Datapath& datapath) {
static bool done = false;
if (done)
return;
auto& runner = datapath.runner();
// Load networking modules needed by Android that are not compiled in the
// kernel. Android does not allow auto-loading of kernel modules.
// Expected for all kernels.
if (runner.modprobe_all({
// The netfilter modules needed by netd for iptables commands.
"ip6table_filter",
"ip6t_ipv6header",
"ip6t_REJECT",
// The ipsec modules for AH and ESP encryption for ipv6.
"ah6",
"esp6",
}) != 0) {
LOG(ERROR) << "One or more required kernel modules failed to load."
<< " Some Android functionality may be broken.";
}
// The xfrm modules needed for Android's ipsec APIs on kernels < 5.4.
int major, minor;
if (KernelVersion(&major, &minor) &&
(major < 5 || (major == 5 && minor < 4)) &&
runner.modprobe_all({
"xfrm4_mode_transport",
"xfrm4_mode_tunnel",
"xfrm6_mode_transport",
"xfrm6_mode_tunnel",
}) != 0) {
LOG(ERROR) << "One or more required kernel modules failed to load."
<< " Some Android functionality may be broken.";
}
// Optional modules.
if (runner.modprobe_all({
// This module is not available in kernels < 3.18
"nf_reject_ipv6",
// These modules are needed for supporting Chrome traffic on Android
// VPN which uses Android's NAT feature. Android NAT sets up
// iptables
// rules that use these conntrack modules for FTP/TFTP.
"nf_nat_ftp",
"nf_nat_tftp",
// The tun module is needed by the Android 464xlat clatd process.
"tun",
}) != 0) {
LOG(WARNING) << "One or more optional kernel modules failed to load.";
}
// This is only needed for CTS (b/27932574).
if (runner.chown("655360", "655360", "/sys/class/xt_idletimer") != 0) {
LOG(ERROR) << "Failed to change ownership of xt_idletimer.";
}
done = true;
}
// Makes Android root(the owner of the mtu sysfs file for device |ifname|.
void SetContainerSysfsMtuOwner(uint32_t pid,
const std::string& ifname,
const std::string& basename) {
const std::string current_mnt_ns = "/proc/self/ns/mnt";
const std::string target_mnt_ns = "/proc/" + std::to_string(pid) + "/ns/mnt";
const std::string sysfs_mtu_path =
"/sys/class/net/" + ifname + "/" + basename;
base::ScopedFD current_ns_fd(open(current_mnt_ns.c_str(), O_RDONLY));
if (!current_ns_fd.is_valid()) {
PLOG(ERROR) << " Could not open " << current_mnt_ns;
return;
}
base::ScopedFD target_ns_fd(open(target_mnt_ns.c_str(), O_RDONLY));
if (!target_ns_fd.is_valid()) {
PLOG(ERROR) << " Could not open " << target_mnt_ns;
return;
}
if (setns(target_ns_fd.get(), CLONE_NEWNS) == -1) {
PLOG(ERROR) << "Could not enter " << target_mnt_ns;
return;
}
if (chown(sysfs_mtu_path.c_str(), kAndroidRootUid, kAndroidRootUid) == -1)
LOG(ERROR) << "Failed to change ownership of " + sysfs_mtu_path;
if (setns(current_ns_fd.get(), CLONE_NEWNS) == -1)
PLOG(ERROR) << "Could not re-enter " << current_mnt_ns;
}
ArcService::InterfaceType InterfaceTypeFor(const std::string& ifname) {
for (const auto& prefix : kEthernetInterfacePrefixes) {
if (base::StartsWith(ifname, prefix,
base::CompareCase::INSENSITIVE_ASCII)) {
return ArcService::InterfaceType::ETHERNET;
}
}
for (const auto& prefix : kWifiInterfacePrefixes) {
if (base::StartsWith(ifname, prefix,
base::CompareCase::INSENSITIVE_ASCII)) {
return ArcService::InterfaceType::WIFI;
}
}
for (const auto& prefix : kCellInterfacePrefixes) {
if (base::StartsWith(ifname, prefix,
base::CompareCase::INSENSITIVE_ASCII)) {
return ArcService::InterfaceType::CELL;
}
}
return ArcService::InterfaceType::UNKNOWN;
}
bool IsMulticastInterface(const std::string& ifname) {
if (ifname.empty()) {
return false;
}
int fd = socket(AF_INET, SOCK_DGRAM, 0);
if (fd < 0) {
// If IPv4 fails, try to open a socket using IPv6.
fd = socket(AF_INET6, SOCK_DGRAM, 0);
if (fd < 0) {
LOG(ERROR) << "Unable to create socket";
return false;
}
}
struct ifreq ifr;
memset(&ifr, 0, sizeof(ifr));
strncpy(ifr.ifr_name, ifname.c_str(), IFNAMSIZ);
if (ioctl(fd, SIOCGIFFLAGS, &ifr) < 0) {
PLOG(ERROR) << "SIOCGIFFLAGS failed for " << ifname;
close(fd);
return false;
}
close(fd);
return (ifr.ifr_flags & IFF_MULTICAST);
}
// Returns the ARC management device used for VPN forwarding, ADB-over-TCP.
std::unique_ptr<Device> MakeArcDevice(AddressManager* addr_mgr,
GuestMessage::GuestType guest) {
auto ipv4_subnet = addr_mgr->AllocateIPv4Subnet(AddressManager::Guest::ARC);
if (!ipv4_subnet) {
LOG(ERROR) << "Subnet already in use or unavailable";
return nullptr;
}
auto host_ipv4_addr = ipv4_subnet->AllocateAtOffset(0);
if (!host_ipv4_addr) {
LOG(ERROR) << "Bridge address already in use or unavailable";
return nullptr;
}
auto guest_ipv4_addr = ipv4_subnet->AllocateAtOffset(1);
if (!guest_ipv4_addr) {
LOG(ERROR) << "ARC address already in use or unavailable";
return nullptr;
}
int subnet_index = (guest == GuestMessage::ARC_VM) ? 1 : kAnySubnetIndex;
auto config = std::make_unique<Device::Config>(
addr_mgr->GenerateMacAddress(subnet_index), std::move(ipv4_subnet),
std::move(host_ipv4_addr), std::move(guest_ipv4_addr));
Device::Options opts{
.fwd_multicast = false,
.ipv6_enabled = false,
.adb_allowed = false,
};
return std::make_unique<Device>(kArcIfname, kArcBridge, kArcIfname,
std::move(config), opts);
}
} // namespace
ArcService::ArcService(ShillClient* shill_client,
Datapath* datapath,
AddressManager* addr_mgr,
TrafficForwarder* forwarder,
GuestMessage::GuestType guest)
: shill_client_(shill_client),
datapath_(datapath),
addr_mgr_(addr_mgr),
forwarder_(forwarder),
guest_(guest),
id_(kInvalidId) {
arc_device_ = MakeArcDevice(addr_mgr, guest_);
AllocateAddressConfigs();
shill_client_->RegisterDevicesChangedHandler(
base::Bind(&ArcService::OnDevicesChanged, weak_factory_.GetWeakPtr()));
shill_client_->ScanDevices();
}
ArcService::~ArcService() {
if (IsStarted()) {
Stop(id_);
}
}
bool ArcService::IsStarted() const {
return id_ != kInvalidId;
}
void ArcService::AllocateAddressConfigs() {
// The first usable subnet is the "other" ARC device subnet.
// As a temporary workaround, for ARCVM, allocate fixed MAC addresses.
uint8_t mac_addr_index = 2;
// Allocate 2 subnets each for Ethernet and WiFi and 1 for LTE WAN interfaces.
for (const auto itype :
{InterfaceType::ETHERNET, InterfaceType::ETHERNET, InterfaceType::WIFI,
InterfaceType::WIFI, InterfaceType::CELL}) {
auto ipv4_subnet =
addr_mgr_->AllocateIPv4Subnet(AddressManager::Guest::ARC_NET);
if (!ipv4_subnet) {
LOG(ERROR) << "Subnet already in use or unavailable";
continue;
}
// For here out, use the same slices.
auto host_ipv4_addr = ipv4_subnet->AllocateAtOffset(0);
if (!host_ipv4_addr) {
LOG(ERROR) << "Bridge address already in use or unavailable";
continue;
}
auto guest_ipv4_addr = ipv4_subnet->AllocateAtOffset(1);
if (!guest_ipv4_addr) {
LOG(ERROR) << "ARC address already in use or unavailable";
continue;
}
MacAddress mac_addr = (guest_ == GuestMessage::ARC_VM)
? addr_mgr_->GenerateMacAddress(mac_addr_index++)
: addr_mgr_->GenerateMacAddress();
available_configs_[itype].emplace_back(std::make_unique<Device::Config>(
mac_addr, std::move(ipv4_subnet), std::move(host_ipv4_addr),
std::move(guest_ipv4_addr)));
}
for (const auto& kv : available_configs_)
for (const auto& c : kv.second)
all_configs_.emplace_back(c.get());
// Append arc0 config so that the necessary tap device gets created.
all_configs_.insert(all_configs_.begin(), &arc_device_->config());
}
std::unique_ptr<Device::Config> ArcService::AcquireConfig(
const std::string& ifname) {
auto itype = InterfaceTypeFor(ifname);
if (itype == InterfaceType::UNKNOWN) {
LOG(ERROR) << "Unsupported interface: " << ifname;
return nullptr;
}
auto& configs = available_configs_[itype];
if (configs.empty()) {
LOG(ERROR) << "No more addresses available. Cannot make device for "
<< ifname;
return nullptr;
}
std::unique_ptr<Device::Config> config;
config = std::move(configs.front());
configs.pop_front();
return config;
}
void ArcService::ReleaseConfig(const std::string& ifname,
std::unique_ptr<Device::Config> config) {
auto itype = InterfaceTypeFor(ifname);
if (itype == InterfaceType::UNKNOWN) {
LOG(ERROR) << "Unsupported interface: " << ifname;
return;
}
available_configs_[itype].push_front(std::move(config));
}
bool ArcService::Start(uint32_t id) {
if (IsStarted()) {
LOG(WARNING) << "Already running - did something crash?"
<< " Stopping and restarting...";
Stop(id_);
}
std::string arc_device_ifname;
if (guest_ == GuestMessage::ARC_VM) {
// Allocate TAP devices for all configs.
for (auto* config : all_configs_) {
auto mac = config->mac_addr();
auto tap = datapath_->AddTAP("" /* auto-generate name */, &mac,
nullptr /* no ipv4 subnet */,
vm_tools::kCrosVmUser);
if (tap.empty()) {
LOG(ERROR) << "Failed to create TAP device";
continue;
}
config->set_tap_ifname(tap);
}
arc_device_ifname = arc_device_->config().tap_ifname();
} else {
OneTimeContainerSetup(*datapath_);
if (!datapath_->NetnsAttachName(kArcNetnsName, id)) {
LOG(ERROR) << "Failed to attach name " << kArcNetnsName << " to pid "
<< id;
return false;
}
arc_device_ifname = ArcVethHostName(arc_device_->guest_ifname());
if (!datapath_->ConnectVethPair(id, kArcNetnsName, arc_device_ifname,
arc_device_->guest_ifname(),
arc_device_->config().mac_addr(),
arc_device_->config().guest_ipv4_addr(), 30,
arc_device_->options().fwd_multicast)) {
LOG(ERROR) << "Cannot create virtual link for device "
<< arc_device_->phys_ifname();
return false;
}
}
id_ = id;
// Create the bridge for the management device arc0.
// Per crbug/1008686 this device cannot be deleted and then re-added.
// So instead of removing the bridge when the service stops, bring down the
// device instead and re-up it on restart.
if (!datapath_->AddBridge(kArcBridge, arc_device_->config().host_ipv4_addr(),
30) &&
!datapath_->MaskInterfaceFlags(kArcBridge, IFF_UP)) {
LOG(ERROR) << "Failed to bring up arc bridge " << kArcBridge;
return false;
}
if (!datapath_->AddToBridge(kArcBridge, arc_device_ifname)) {
LOG(ERROR) << "Failed to bridge arc device " << arc_device_ifname << " to "
<< kArcBridge;
return false;
}
LOG(INFO) << "Started ARC management device " << *arc_device_.get();
// Start already known Shill <-> ARC mapped devices.
for (const auto& ifname : shill_devices_)
AddDevice(ifname);
return true;
}
void ArcService::Stop(uint32_t id) {
if (!IsStarted()) {
LOG(ERROR) << "ArcService was not running";
return;
}
// After the ARC container has stopped, the pid is not known anymore.
if (guest_ == GuestMessage::ARC_VM && id_ != id) {
LOG(ERROR) << "Mismatched ARCVM CIDs " << id_ << " != " << id;
return;
}
// Stop Shill <-> ARC mapped devices.
for (const auto& ifname : shill_devices_)
RemoveDevice(ifname);
// Per crbug/1008686 this device cannot be deleted and then re-added.
// So instead of removing the bridge, bring it down and mark it. This will
// allow us to detect if the device is re-added in case of a crash restart
// and do the right thing.
if (!datapath_->MaskInterfaceFlags(kArcBridge, IFF_DEBUG, IFF_UP))
LOG(ERROR) << "Failed to bring down arc bridge "
<< "- it may not restart correctly";
if (guest_ == GuestMessage::ARC) {
datapath_->RemoveInterface(ArcVethHostName(arc_device_->phys_ifname()));
if (!datapath_->NetnsDeleteName(kArcNetnsName))
LOG(WARNING) << "Failed to delete netns name " << kArcNetnsName;
}
// Destroy allocated TAP devices if any, including the ARC management device.
for (auto* config : all_configs_) {
if (config->tap_ifname().empty())
continue;
datapath_->RemoveInterface(config->tap_ifname());
config->set_tap_ifname("");
}
LOG(INFO) << "Stopped ARC management device " << *arc_device_.get();
id_ = kInvalidId;
}
void ArcService::OnDevicesChanged(const std::set<std::string>& added,
const std::set<std::string>& removed) {
for (const std::string& ifname : removed) {
shill_devices_.erase(ifname);
RemoveDevice(ifname);
}
for (const std::string& ifname : added) {
shill_devices_.insert(ifname);
AddDevice(ifname);
}
}
void ArcService::AddDevice(const std::string& ifname) {
if (!IsStarted())
return;
if (ifname.empty())
return;
if (devices_.find(ifname) != devices_.end()) {
LOG(DFATAL) << "Attemping to add already tracked device: " << ifname;
return;
}
auto itype = InterfaceTypeFor(ifname);
Device::Options opts{
.fwd_multicast = IsMulticastInterface(ifname),
// TODO(crbug/726815) Also enable |ipv6_enabled| for cellular networks
// once IPv6 is enabled on cellular networks in Shill.
.ipv6_enabled =
(itype == InterfaceType::ETHERNET || itype == InterfaceType::WIFI),
.adb_allowed =
(itype == InterfaceType::ETHERNET || itype == InterfaceType::WIFI),
};
auto config = AcquireConfig(ifname);
if (!config) {
LOG(ERROR) << "Cannot acquire a Config for " << ifname;
return;
}
auto device = std::make_unique<Device>(ifname, ArcBridgeName(ifname), ifname,
std::move(config), opts);
LOG(INFO) << "Starting device " << *device;
// Create the bridge.
if (!datapath_->AddBridge(device->host_ifname(),
device->config().host_ipv4_addr(), 30)) {
LOG(ERROR) << "Failed to setup bridge " << device->host_ifname();
return;
}
datapath_->StartRoutingDevice(device->phys_ifname(), device->host_ifname(),
device->config().guest_ipv4_addr(),
TrafficSource::ARC);
std::string virtual_device_ifname;
if (guest_ == GuestMessage::ARC_VM) {
virtual_device_ifname = device->config().tap_ifname();
if (virtual_device_ifname.empty()) {
LOG(ERROR) << "No TAP device for " << *device;
return;
}
} else {
virtual_device_ifname = ArcVethHostName(device->guest_ifname());
if (!datapath_->ConnectVethPair(
id_, kArcNetnsName, virtual_device_ifname, device->guest_ifname(),
device->config().mac_addr(), device->config().guest_ipv4_addr(), 30,
device->options().fwd_multicast)) {
LOG(ERROR) << "Cannot create veth link for device " << *device;
return;
}
// Allow netd to write to /sys/class/net/<guest_ifname>/mtu (b/169936104).
SetContainerSysfsMtuOwner(id_, device->guest_ifname(), "mtu");
}
if (!datapath_->AddToBridge(device->host_ifname(), virtual_device_ifname)) {
if (guest_ == GuestMessage::ARC) {
datapath_->RemoveInterface(virtual_device_ifname);
}
LOG(ERROR) << "Failed to bridge interface " << virtual_device_ifname;
return;
}
if (device->options().adb_allowed &&
!datapath_->AddAdbPortAccessRule(ifname)) {
LOG(ERROR) << "Failed to add ADB port access rule";
}
forwarder_->StartForwarding(device->phys_ifname(), device->host_ifname(),
device->options().ipv6_enabled,
device->options().fwd_multicast);
devices_.emplace(ifname, std::move(device));
}
void ArcService::RemoveDevice(const std::string& ifname) {
if (!IsStarted())
return;
const auto it = devices_.find(ifname);
if (it == devices_.end()) {
LOG(WARNING) << "Unknown device: " << ifname;
return;
}
const auto* device = it->second.get();
LOG(INFO) << "Removing device " << *device;
forwarder_->StopForwarding(device->phys_ifname(), device->host_ifname(),
device->options().ipv6_enabled,
device->options().fwd_multicast);
// ARCVM TAP devices are removed in VmImpl::Stop() when the service stops
if (guest_ == GuestMessage::ARC)
datapath_->RemoveInterface(ArcVethHostName(device->phys_ifname()));
datapath_->StopRoutingDevice(device->phys_ifname(), device->host_ifname(),
device->config().guest_ipv4_addr(),
TrafficSource::ARC);
datapath_->RemoveBridge(device->host_ifname());
if (device->options().adb_allowed)
datapath_->DeleteAdbPortAccessRule(ifname);
ReleaseConfig(ifname, it->second->release_config());
devices_.erase(it);
}
std::vector<const Device::Config*> ArcService::GetDeviceConfigs() const {
std::vector<const Device::Config*> configs;
for (auto* c : all_configs_)
configs.emplace_back(c);
return configs;
}
} // namespace patchpanel
|
<gh_stars>1-10
/**
* tweaked forwardRef for supporting `as` prop
*
* All credit goes to chakra-ui, Reach UI, Reakit, fluentui for base types
* & forwardRef function
*/
import React from "react";
export type OmitCommonProps<Target, OmitAdditionalProps extends keyof any = never> = Omit<
Target,
"transition" | "as" | "color" | OmitAdditionalProps
>;
export type RightJoinProps<
SourceProps extends object = {},
OverrideProps extends object = {},
> = OmitCommonProps<SourceProps, keyof OverrideProps> & OverrideProps;
export type As<Props = any> = React.ElementType<Props>;
export type MergeWithAs<
ComponentProps extends object,
AsProps extends object,
AdditionalProps extends object = {},
AsComponent extends As = As,
> = RightJoinProps<ComponentProps, AdditionalProps> &
RightJoinProps<AsProps, AdditionalProps> & {
as?: AsComponent;
};
export type ComponentWithAs<Component extends As, Props extends object = {}> = {
<AsComponent extends As>(
props: MergeWithAs<
React.ComponentProps<Component>,
React.ComponentProps<AsComponent>,
Props,
AsComponent
>,
): JSX.Element;
displayName?: string;
propTypes?: React.WeakValidationMap<any>;
contextTypes?: React.ValidationMap<any>;
defaultProps?: Partial<any>;
id?: string;
};
/**
* Extract the props of a React element or component
*/
export type PropsOf<T extends As> = React.ComponentPropsWithoutRef<T> & {
as?: As;
};
export function forwardRef<Props extends object, Component extends As>(
component: React.ForwardRefRenderFunction<
any,
RightJoinProps<PropsOf<Component>, Props> & {
as?: As;
}
>,
) {
return React.forwardRef(component) as unknown as ComponentWithAs<Component, Props>;
}
export type HTMLRevindProps<T extends As> = Omit<
PropsOf<T>,
T extends "svg" ? "ref" | "children" : "ref"
> & { as?: As };
|
<reponame>koksyn/hexagonal-java-report-generator<filename>application/src/main/java/com/report/application/entity/FilmCharacter.java<gh_stars>1-10
package com.report.application.entity;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import javax.persistence.*;
import java.util.UUID;
@Data
@Entity
@AllArgsConstructor
@NoArgsConstructor
public class FilmCharacter {
@Id
private UUID uuid;
@PrePersist
private void prePersist() {
uuid = UUID.randomUUID();
}
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "report_id")
private Report report;
@Column(nullable = false)
private Long filmId;
@Column(nullable = false)
private String filmName;
@Column(nullable = false)
private Long characterId;
@Column(nullable = false)
private String characterName;
@Column(nullable = false)
private Long planetId;
@Column(nullable = false)
private String planetName;
}
|
limit = 8
odds = []
for i in range(limit):
if i % 2 != 0 and i > limit:
odds.append(i)
|
require 'sinatra/base'
require 'securerandom'
require 'json'
require 'open3'
module HaproxyHelper
def self.generate_names(config)
backend_title = ""
config["backends"].each do |backend|
backend["title"] = p SecureRandom.urlsafe_base64(6)
backend_title = backend["title"]
backend["servers"].each do |server|
server["name"] = p SecureRandom.urlsafe_base64(6)
end
end
config["frontends"].each do |frontend|
frontend["title"] = p SecureRandom.urlsafe_base64(6)
frontend["default_backend"] = backend_title
end
config
end
def self.reload
make_log, status = Open3.capture3("service haproxy restart")
hash = { log: make_log, status: status}
json = JSON.generate(hash)
end
end
|
package affiliation
import "database/sql"
// Identity contains sortingHat user Identity
type Identity struct {
ID sql.NullString
UUID sql.NullString
Name sql.NullString
Username sql.NullString
Email sql.NullString
Domain sql.NullString
Gender sql.NullString
GenderACC *int `db:"gender_acc"`
OrgName sql.NullString
IsBot bool `db:"is_bot"`
MultiOrgNames []string
}
|
<gh_stars>0
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.entity.proxy.nginx;
import java.util.Collection;
import java.util.Map;
import brooklyn.entity.proxy.ProxySslConfig;
import brooklyn.util.ResourceUtils;
import brooklyn.util.collections.MutableMap;
import brooklyn.util.text.Strings;
import brooklyn.util.text.TemplateProcessor;
import com.google.common.collect.LinkedHashMultimap;
import com.google.common.collect.Multimap;
/**
* Processes a FreeMarker template for an {@link NginxController} configuration file.
*/
public class NginxConfigTemplate {
private NginxDriver driver;
public static NginxConfigTemplate generator(NginxDriver driver) {
return new NginxConfigTemplate(driver);
}
private NginxConfigTemplate(NginxDriver driver) {
this.driver = driver;
}
public String configFile() {
// Check template URL exists
String templateUrl = driver.getEntity().getConfig(NginxController.SERVER_CONF_TEMPLATE_URL);
ResourceUtils.create(this).checkUrlExists(templateUrl);
// Check SSL configuration
ProxySslConfig ssl = driver.getEntity().getConfig(NginxController.SSL_CONFIG);
if (ssl != null && Strings.isEmpty(ssl.getCertificateDestination()) && Strings.isEmpty(ssl.getCertificateSourceUrl())) {
throw new IllegalStateException("ProxySslConfig can't have a null certificateDestination and null certificateSourceUrl. One or both need to be set");
}
// For mapping by URL
Iterable<UrlMapping> mappings = ((NginxController) driver.getEntity()).getUrlMappings();
Multimap<String, UrlMapping> mappingsByDomain = LinkedHashMultimap.create();
for (UrlMapping mapping : mappings) {
Collection<String> addrs = mapping.getAttribute(UrlMapping.TARGET_ADDRESSES);
if (addrs != null && addrs.size() > 0) {
mappingsByDomain.put(mapping.getDomain(), mapping);
}
}
Map<String, Object> substitutions = MutableMap.<String, Object>builder()
.putIfNotNull("ssl", ssl)
.put("urlMappings", mappings)
.put("domainMappings", mappingsByDomain)
.build();
// Get template contents and process
String contents = ResourceUtils.create(driver.getEntity()).getResourceAsString(templateUrl);
return TemplateProcessor.processTemplateContents(contents, driver, substitutions);
}
}
|
<reponame>thadcsmith/koa-i18next-detector<filename>src/lookups/session.js
export default {
name: 'session',
lookup(ctx, options) {
let found;
if (options.lookupSession && ctx && ctx.session) {
found = ctx.session[options.lookupSession];
}
return found;
},
cacheUserLanguage(ctx, lng, options = {}) {
if (options.lookupSession && ctx && ctx.session) {
ctx.session[options.lookupSession] = lng;
}
}
};
|
class CardGame:
def __init__(self, nPlayers):
self.nPlayers = nPlayers
# Other initialization code for the card game
def dealCards(self, peopleSlap):
if some_condition: # Replace with the actual condition to check
self._whoDeals = k
else:
for k in range(self.nPlayers):
if peopleSlap[k] > 0:
self.pile.putUnder(self._players[k].deal())
GIVEPILE = self.pile.give2owner()
self.printStatus(2)
|
<gh_stars>1-10
export default function SelectFieldScript() {
// Elements
const selectWrapper = document.getElementById("iq-select-wrapper");
const selectElm = document.getElementById("iq-select-field");
selectElm.value = selectElm.getAttribute("defaultvalue");
if (selectElm.value === "")
selectWrapper.classList.add("iq-select-field--empty");
// Event Listeners
selectElm.addEventListener("change", () => {
if (selectElm.value === "") return;
selectWrapper.classList.remove("iq-select-field--empty");
});
}
|
#!/bin/sh
set -e
set -u
set -o pipefail
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/AFNetworking/AFNetworking.framework"
install_framework "${BUILT_PRODUCTS_DIR}/HJPodTestLib/HJPodTestLib.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/AFNetworking/AFNetworking.framework"
install_framework "${BUILT_PRODUCTS_DIR}/HJPodTestLib/HJPodTestLib.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
#!/bin/sh
set -e
# export DISTRIBUTION to use something other than unstable (or whatever was
# last used in debian/changelog). see dch(1). can use a DEBVERSION exported
# in the process's environment.
usage() { echo "usage: `basename $0` version" ; }
[ $# -eq 1 ] || { usage >&2 ; exit 1 ; }
VERSION="$1"
if [ -z "$DEBVERSION" ] ; then
DEBVERSION=1
fi
rm -fv debian/files
dch -v $VERSION+dfsg.1-$DEBVERSION
if [ -n "$DISTRIBUTION" ] ; then
dch -r --distribution "$DISTRIBUTION"
else
dch -r
fi
uscan --repack --compression xz --force
gpg --sign --armor --detach-sign ../notcurses_$VERSION+dfsg.1.orig.tar.xz
# FIXME this seems to upload to $VERSION.dfsg as opposed to $VERSION+dfsg?
github-asset dankamongmen/notcurses upload v$VERSION ../notcurses_$VERSION+dfsg.1.orig.tar.xz ../notcurses_$VERSION+dfsg.1.orig.tar.xz.asc
git commit -m "v$VERSION" -a
echo
echo "Go change the $VERSION.dfsg to $VERSION+dfsg before proceeding, dog"
echo
gbp import-orig ../notcurses_$VERSION+dfsg.1.orig.tar.xz
git push --tags
dpkg-buildpackage --build=source
cd ..
export TERM=xterm-256color && sudo pbuilder build *dsc
cd -
git push
rm debian/files
|
python infer_signate.py --model ../../deploy.prototxt \
--weights ../../converted_from_pytorch.caffemodel
|
// From https://medium.com/@faith__ngetich/locking-down-a-project-to-a-specific-node-version-using-nvmrc-and-or-engines-e5fd19144245
const semver = require ('semver');
const { engines } = require ('../package');
const version = engines.node;
if (!semver.satisfies(process.version, version)) {
throw new Error(`The current node version ${process.version} does not satisfy the required version ${version} .`);
}
|
/// Specifies one member of D3D12_QUERY_HEAP_TYPE.
public var type: D3DQueryHeapType {
get {
return D3DQueryHeapType(rawValue: RawValue(Type: self.rawValue.Type)) // Assuming self is an instance of the enclosing type
}
set {
self.rawValue.Type = newValue.rawValue
}
}
|
package commands
import (
"errors"
inventoryPkg "github.com/cbuschka/tfvm/internal/inventory"
"github.com/cbuschka/tfvm/internal/util"
"github.com/cbuschka/tfvm/internal/version"
workspacePkg "github.com/cbuschka/tfvm/internal/workspace"
)
// RunTfvmInstallCommand runs tfvm install command.
func RunTfvmInstallCommand(args []string) error {
inventory, err := inventoryPkg.GetInventory()
if err != nil {
return err
}
err = inventory.Update()
if err != nil {
return err
}
err = inventory.Save()
if err != nil {
return err
}
versionSpec, err := getTfVersionSpecToInstall(args)
if err != nil {
return err
}
tfRelease, err := inventory.GetMatchingTerraformRelease(versionSpec)
if err != nil {
if version.IsNoSuchTerraformRelease(err) {
util.Die(1, "No matching terraform version for %s.", versionSpec.String())
return err
}
return err
}
_, err = inventory.GetInstalledTerraform(tfRelease.Version)
if err != nil {
util.Die(1, "Installing terraform failed: %s.", err.Error())
return err
}
return nil
}
func getTfVersionSpecToInstall(args []string) (*version.TerraformVersionSpec, error) {
if len(args) > 1 {
util.Die(1, "Only version to install allowed.")
return nil, errors.New("unreachable code")
}
if len(args) == 1 {
versionSpec, err := version.ParseTerraformVersionSpec(args[0])
if err != nil {
util.Die(1, "Invalid version '%s'.", args[0])
return nil, errors.New("unreachable code")
}
return versionSpec, nil
}
workspace, err := workspacePkg.GetWorkspace()
if err != nil {
return nil, err
}
tfVersionSelection, err := workspace.GetTerraformVersionSelection()
if err != nil {
return nil, err
}
return tfVersionSelection.VersionSpec(), nil
}
|
import { Pipe, PipeTransform } from '@angular/core';
@Pipe({
name: 'filterPipe'
})
export class FilterPipe implements PipeTransform {
transform(items: any[], searchText: string): any {
if(!items) return [];
if(!searchText) return items;
searchText = searchText.toLowerCase();
return items.filter( item => {
return Object.keys(item).some(key => {
if (item[key]) {
return item[key].toString().toLowerCase().includes(searchText);
}
});
});
}
}
|
<filename>src/client/reducers/uploadReducer.js
export default function(state = {uploading: false, error: false, replay: null}, action) {
switch (action.type) {
case 'UPLOAD_STARTED':
return {...state, uploading: true, error: false};
case 'UPLOAD_FAILURE':
return {...state, uploading: false, error: action.payload};
case 'UPLOAD_SUCCESS':
return {...state, uploading: false, error: false, replay: action.payload};
default:
return state;
}
}
|
#!/bin/bash
#Get Asset Chain Names from json file
echo -e "\e[91m WARNING: This script creates addresses to be use in pool config and payment processing"
echo " The address, privkey, and pubkey are stored in a owner read-only file"
echo -e " make sure to encrypt, backup, or delete as required \e[39m"
if [ ! -d ~/kmd_pulp/stomp/wallets ]; then
mkdir ~/kmd_pulp/stomp/wallets
fi
ac_json=$(curl https://raw.githubusercontent.com/StakedChain/StakedNotary/master/assetchains.json 2>/dev/null)
for row in $(echo "${ac_json}" | jq -c -r '.[]'); do
_jq() {
echo ${row} | jq -r ${1}
}
chain=$(_jq '.ac_name')
if [ ! -d ~/.komodo/${chain} ]; then
echo -e "\e[91m [ $chain ] CONF FILE DOES NOT EXIST!"
echo -e "Run ~/Knomp/install/startStaked.sh first \e[39m"
exit 1
fi
if [ ! -f ~/kmd_pulp/stomp/wallets/.${chain}_poolwallet ]; then
touch ~/kmd_pulp/stomp/wallets/.${chain}_poolwallet
sudo chmod 600 ~/kmd_pulp/stomp/wallets/.${chain}_poolwallet
address=$(komodo-cli -ac_name=$chain getnewaddress)
echo "Created $address for [ $chain ]"
echo { \"chain\":\"${chain}\", >> ~/kmd_pulp/stomp/wallets/.${chain}_poolwallet
echo \"addr\":\"${address}\", >> ~/kmd_pulp/stomp/wallets/.${chain}_poolwallet
echo \"pk\":\"$(komodo-cli -ac_name=${chain} dumpprivkey $address)\", >> ~/kmd_pulp/stomp/wallets/.${chain}_poolwallet
echo \"pub\":\"$(komodo-cli -ac_name=${chain} validateaddress $address | jq -r '.pubkey')\" } >> ~/kmd_pulp/stomp/wallets/.${chain}_poolwallet
else
echo "ADDRESS FOR $chain ALREADY CREATED";
fi
done
echo -e "\e[92m Finished: Your address info is located in ~/kmd_pulp/stomp/wallets \e[39m"
|
#!/usr/bin/env bash
#
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://aws.amazon.com/apache2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
#
set -o errexit # Exit the script if any statement fails.
set -o nounset # Exit the script if any uninitialized variable is used.
CLONE_URL=${CLONE_URL:- 'git://github.com/alexa/avs-device-sdk.git'}
PORT_AUDIO_FILE="pa_stable_v190600_20161030.tgz"
PORT_AUDIO_DOWNLOAD_URL="http://www.portaudio.com/archives/$PORT_AUDIO_FILE"
TEST_MODEL_DOWNLOAD="https://github.com/Sensory/alexa-rpi/blob/master/models/spot-alexa-rpi-31000.snsr"
BUILD_TESTS=${BUILD_TESTS:-'true'}
CURRENT_DIR="$(pwd)"
INSTALL_BASE=${INSTALL_BASE:-"$CURRENT_DIR"}
SOURCE_FOLDER=${SDK_LOC:-''}
THIRD_PARTY_FOLDER=${THIRD_PARTY_LOC:-'third-party'}
BUILD_FOLDER=${BUILD_FOLDER:-'build'}
SOUNDS_FOLDER=${SOUNDS_FOLDER:-'sounds'}
DB_FOLDER=${DB_FOLDER:-'db'}
SOURCE_PATH="$INSTALL_BASE/$SOURCE_FOLDER"
THIRD_PARTY_PATH="$INSTALL_BASE/$THIRD_PARTY_FOLDER"
BUILD_PATH="$INSTALL_BASE/$BUILD_FOLDER"
SOUNDS_PATH="$INSTALL_BASE/$SOUNDS_FOLDER"
DB_PATH="$INSTALL_BASE/$DB_FOLDER"
CONFIG_DB_PATH="$DB_PATH"
UNIT_TEST_MODEL_PATH="$INSTALL_BASE/avs-device-sdk/KWD/inputs/SensoryModels/"
UNIT_TEST_MODEL="$THIRD_PARTY_PATH/alexa-rpi/models/spot-alexa-rpi-31000.snsr"
INPUT_CONFIG_FILE="$SOURCE_PATH/avs-device-sdk/Integration/AlexaClientSDKConfig.json"
OUTPUT_CONFIG_FILE="$BUILD_PATH/Integration/AlexaClientSDKConfig.json"
TEMP_CONFIG_FILE="$BUILD_PATH/Integration/tmp_AlexaClientSDKConfig.json"
TEST_SCRIPT="$INSTALL_BASE/test.sh"
LIB_SUFFIX="a"
ANDROID_CONFIG_FILE=""
# Default device serial number if nothing is specified
DEVICE_SERIAL_NUMBER="123456"
# Default device manufacturer name
DEVICE_MANUFACTURER_NAME=${DEVICE_MANUFACTURER_NAME:-"Test Manufacturer"}
# Default device description
DEVICE_DESCRIPTION=${DEVICE_DESCRIPTION:-"Test Device"}
GSTREAMER_AUDIO_SINK="autoaudiosink"
build_port_audio() {
# build port audio
echo
echo "==============> BUILDING PORT AUDIO =============="
echo
pushd $THIRD_PARTY_PATH
wget -c $PORT_AUDIO_DOWNLOAD_URL
tar zxf $PORT_AUDIO_FILE
pushd portaudio
./configure --without-jack
make
popd
popd
}
get_platform() {
uname_str=`uname -a`
result=""
if [[ "$uname_str" == "Linux "* ]] && [[ -f /etc/os-release ]]
then
sys_id=`cat /etc/os-release | grep "^ID="`
if [[ "$sys_id" == "ID=raspbian" ]]
then
echo "Raspberry pi"
fi
elif [[ "$uname_str" == "MINGW64"* ]]
then
echo "Windows mingw64"
fi
}
show_help() {
echo 'Usage: setup.sh <config-json-file> [OPTIONS]'
echo 'The <config-json-file> can be downloaded from developer portal and must contain the following:'
echo ' "clientId": "<OAuth client ID>"'
echo ' "productId": "<your product name for device>"'
echo ''
echo 'Optional parameters'
echo ' -s <serial-number> If nothing is provided, the default device serial number is 123456'
echo ' -a <file-name> The file that contains Android installation configurations (e.g. androidConfig.txt)'
echo ' -d <description> The description of the device.'
echo ' -m <manufacturer> The device manufacturer name.'
echo ' -h Display this help and exit'
}
if [[ $# -lt 1 ]]; then
show_help
exit 1
fi
CONFIG_JSON_FILE=$1
if [ ! -f "$CONFIG_JSON_FILE" ]; then
echo "Config json file not found!"
show_help
exit 1
fi
shift 1
OPTIONS=s:a:m:d:h
while getopts "$OPTIONS" opt ; do
case $opt in
s )
DEVICE_SERIAL_NUMBER="$OPTARG"
;;
a )
ANDROID_CONFIG_FILE="$OPTARG"
if [ ! -f "$ANDROID_CONFIG_FILE" ]; then
echo "Android config file is not found!"
exit 1
fi
source $ANDROID_CONFIG_FILE
;;
d )
DEVICE_DESCRIPTION="$OPTARG"
;;
m )
DEVICE_MANUFACTURER_NAME="$OPTARG"
;;
h )
show_help
exit 1
;;
esac
done
if [[ ! "$DEVICE_SERIAL_NUMBER" =~ [0-9a-zA-Z_]+ ]]; then
echo 'Device serial number is invalid!'
exit 1
fi
# The target platform for the build.
PLATFORM=${PLATFORM:-$(get_platform)}
if [ "$PLATFORM" == "Raspberry pi" ]
then
source pi.sh
elif [ "$PLATFORM" == "Windows mingw64" ]
then
source mingw.sh
else
PLATFORM_LOWER=$(echo "${PLATFORM}" | tr '[:upper:]' '[:lower:]')
if [ "$PLATFORM_LOWER" == "android" ]
then
PLATFORM="Android"
source android.sh
else
echo "The installation script doesn't support current system. (System: $(uname -a))"
exit 1
fi
fi
echo "################################################################################"
echo "################################################################################"
echo ""
echo ""
echo "AVS Device SDK $PLATFORM Script - Terms and Agreements"
echo ""
echo ""
echo "The AVS Device SDK is dependent on several third-party libraries, environments, "
echo "and/or other software packages that are installed using this script from "
echo "third-party sources (\"External Dependencies\"). These are terms and conditions "
echo "associated with the External Dependencies "
echo "(available at https://github.com/alexa/avs-device-sdk/wiki/Dependencies) that "
echo "you need to agree to abide by if you choose to install the External Dependencies."
echo ""
echo ""
echo "If you do not agree with every term and condition associated with the External "
echo "Dependencies, enter \"QUIT\" in the command line when prompted by the installer."
echo "Else enter \"AGREE\"."
echo ""
echo ""
echo "################################################################################"
echo "################################################################################"
read input
input=$(echo $input | awk '{print tolower($0)}')
if [ $input == 'quit' ]
then
exit 1
elif [ $input == 'agree' ]
then
echo "################################################################################"
echo "Proceeding with installation"
echo "################################################################################"
else
echo "################################################################################"
echo 'Unknown option'
echo "################################################################################"
exit 1
fi
if [ ! -d "$BUILD_PATH" ]
then
# Make sure required packages are installed
echo "==============> INSTALLING REQUIRED TOOLS AND PACKAGE ============"
echo
install_dependencies
# create / paths
echo
echo "==============> CREATING PATHS AND GETTING SOUND FILES ============"
echo
mkdir -p $SOURCE_PATH
mkdir -p $THIRD_PARTY_PATH
mkdir -p $SOUNDS_PATH
mkdir -p $DB_PATH
run_os_specifics
if [ ! -d "${SOURCE_PATH}/avs-device-sdk" ]
then
#get sdk
echo
echo "==============> CLONING SDK =============="
echo
cd $SOURCE_PATH
git clone --single-branch $CLONE_URL avs-device-sdk
fi
# make the SDK
echo
echo "==============> BUILDING SDK =============="
echo
mkdir -p $BUILD_PATH
cd $BUILD_PATH
cmake "$SOURCE_PATH/avs-device-sdk" \
-DCMAKE_BUILD_TYPE=DEBUG \
"${CMAKE_PLATFORM_SPECIFIC[@]}"
cd $BUILD_PATH
make SampleApp -j2
else
cd $BUILD_PATH
make SampleApp -j2
fi
echo
echo "==============> SAVING CONFIGURATION FILE =============="
echo
# Create configuration file with audioSink configuration at the beginning of the file
cat << EOF > "$OUTPUT_CONFIG_FILE"
{
"gstreamerMediaPlayer":{
"audioSink":"$GSTREAMER_AUDIO_SINK"
},
EOF
cd $INSTALL_BASE
bash genConfig.sh config.json $DEVICE_SERIAL_NUMBER $CONFIG_DB_PATH $SOURCE_PATH/avs-device-sdk $TEMP_CONFIG_FILE \
-DSDK_CONFIG_MANUFACTURER_NAME="$DEVICE_MANUFACTURER_NAME" -DSDK_CONFIG_DEVICE_DESCRIPTION="$DEVICE_DESCRIPTION"
# Delete first line from temp file to remove opening bracket
sed -i -e "1d" $TEMP_CONFIG_FILE
# Append temp file to configuration file
cat $TEMP_CONFIG_FILE >> $OUTPUT_CONFIG_FILE
# Delete temp file
rm $TEMP_CONFIG_FILE
echo
echo "==============> FINAL CONFIGURATION =============="
echo
cat $OUTPUT_CONFIG_FILE
generate_start_script
generate_test_script
echo " **** Completed Configuration/Build ***"
|
<reponame>petercunning/notebook
# -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
# -*- coding: utf-8 -*-
"""
Map tile acquisition
--------------------
Demonstrates cartopy's ability to draw map tiles which are downloaded on
demand from the MapQuest tile server. Internally these tiles are then combined
into a single image and displayed in the cartopy GeoAxes.
"""
import matplotlib.pyplot as plt
from matplotlib.transforms import offset_copy
import cartopy.crs as ccrs
import cartopy.io.img_tiles as cimgt
%matplotlib inline
def main():
# Create a Stamen Terrain instance.
terrain = cimgt.StamenTerrain()
# Create a GeoAxes in the tile's projection.
plt.figure(figsize=(10,10))
ax = plt.axes(projection=terrain.crs)
# Limit the extent of the map to a small longitude/latitude range.
ax.set_extent([-122.3, -122, 46.1, 46.3])
# Add the MapQuest data at zoom level 8.
ax.add_image(terrain, 12)
# Add a marker for the Mount Saint Helens volcano.
plt.plot(-122.189611,46.205868, marker='o', color='yellow', markersize=12,
alpha=0.7, transform=ccrs.Geodetic())
# Use the cartopy interface to create a matplotlib transform object
# for the Geodetic coordinate system. We will use this along with
# matplotlib's offset_copy function to define a coordinate system which
# translates the text by 25 pixels to the left.
geodetic_transform = ccrs.Geodetic()._as_mpl_transform(ax)
text_transform = offset_copy(geodetic_transform, units='dots', x=-25)
# Add text 25 pixels to the left of the volcano.
plt.text(-122.189611,46.205868, u'Mount Saint Helens Volcano',
verticalalignment='center', horizontalalignment='right',
transform=text_transform,
bbox=dict(facecolor='wheat', alpha=0.5, boxstyle='round'))
gl=ax.gridlines(draw_labels=True)
gl.xlabels_top = False
gl.ylabels_right = False
plt.show()
if __name__ == '__main__':
main()
# <codecell>
# <codecell>
# <codecell>
|
import { NgModule } from '@angular/core';
import { Routes, RouterModule } from '@angular/router';
import { HomeComponent } from './home/home.component';
import { AuthGuard } from './auth/auth.guard';
import { ServicesComponent } from './services/services.component';
import { CompaniesListComponent } from './companies-list/companies-list.component';
import { FormsModule } from '@angular/forms';
import { CompanyeditComponent } from './companyedit/companyedit.component';
import { ProductListComponent } from './product-list/product-list.component';
import { ProducteditComponent } from './productedit/productedit.component';
import { InventoryComponent } from './inventory/inventory.component';
import { ShoppingcartComponent } from './shoppingcart/shoppingcart.component';
const routes: Routes = [
{ path: '', component: HomeComponent },
{ path: 'services', component: ServicesComponent, canActivate: [AuthGuard] },
{ path: 'companies-list', component: CompaniesListComponent, canActivate: [AuthGuard] },
{ path: 'companyedit/:id', component: CompanyeditComponent, canActivate: [AuthGuard] },
{ path: 'products-list', component: ProductListComponent, canActivate: [AuthGuard] },
{ path: 'productedit/:id', component: ProducteditComponent, canActivate: [AuthGuard]},
{ path: 'shop', component: InventoryComponent, canActivate: [AuthGuard]},
{ path: 'cart', component: ShoppingcartComponent, canActivate: [AuthGuard]}
];
@NgModule({
imports: [RouterModule.forRoot(routes), FormsModule],
exports: [RouterModule],
providers: [AuthGuard]
})
export class AppRoutingModule { }
|
export class UsersService {
constructor({
usersRepository,
}) {
this.usersRepository = usersRepository;
}
findOrCreate(options) {
return this.usersRepository.findOrCreate(options).then(users => users[0]);
}
}
|
require 'spec_helper'
require 'rest_spec_helper'
require 'rhc/commands/setup'
# just test the command runner as we already have extensive wizard tests
describe RHC::Commands::Setup do
subject{ RHC::Commands::Setup }
let(:instance){ subject.new }
let!(:config){ base_config }
before{ described_class.send(:public, *described_class.protected_instance_methods) }
before{ FakeFS::FileSystem.clear }
before{ RHC::Config.stub(:home_dir).and_return('/home/mock_user') }
before{ RHC::Servers.any_instance.stub(:load) }
describe '#run' do
it{ expects_running('setup').should call(:run).on(instance).with(no_args) }
let(:arguments) { ['setup', '--config', 'test.conf', '-l', 'test@test.foo', '-p', 'password'] }
before(:each) do
@wizard = double('wizard')
@wizard.stub(:run).and_return(true)
RHC::RerunWizard.stub(:new){ @wizard }
end
context 'when no issues' do
it "should exit 0" do
expect { run }.to exit_with_code(0)
end
end
context 'when there is an issue' do
it "should exit 1" do
@wizard.stub(:run).and_return(false)
expect { run }.to exit_with_code(1)
end
end
end
it{ expects_running('setup').should call(:run).on(instance).with(no_args) }
it{ command_for('setup', '--clean').options.clean.should be_true }
it{ command_for('setup').options.server.should == 'openshift.redhat.com' }
it{ command_for('setup').options.create_token.should be_nil }
it{ command_for('setup', '--server', 'foo.com').options.server.should == 'foo.com' }
it{ command_for('setup', '--no-create-token').options.create_token.should == false }
it{ command_for('setup', '--create-token').options.create_token.should == true }
context "when config has use_authorization_tokens=false" do
let!(:config){ base_config{ |c, d| d.add('use_authorization_tokens', 'false') } }
it{ command_for('setup').options.use_authorization_tokens.should == false }
end
context "when config has use_authorization_tokens=true" do
let!(:config){ base_config{ |c, d| d.add('use_authorization_tokens', 'true') } }
it{ command_for('setup').options.use_authorization_tokens.should be_true }
end
=begin context 'when libra_server is set' do
before{ ENV.stub(:[]).with('LIBRA_SERVER').and_return('bar.com') }
it{ command_for('setup').config['libra_server'].should == 'bar.com' }
it{ command_for('setup').options.server.should == 'bar.com' }
it{ command_for('setup', '--server', 'foo.com').options.server.should == 'foo.com' }
=end end
context 'when --clean is used' do
let!(:config){ base_config{ |config, defaults| defaults.add 'libra_server', 'test.com' } }
it("should ignore a config value"){ command_for('setup', '--clean').options.server.should == 'openshift.redhat.com' }
end
context 'when -d is passed' do
let(:arguments) { ['setup', '-d', '-l', '<EMAIL>'] }
# 'y' for the password prompt
let(:input) { ['', 'y', '', ''] }
let!(:rest_client){ MockRestClient.new }
it("succeeds"){ FakeFS{ expect { run input }.to exit_with_code 0 } }
it("the output includes debug output") do
FakeFS{ run_output( input ).should match 'DEBUG' }
end
end
context 'when -l is used to specify the user name' do
let(:arguments) { ['setup', '-l', '<EMAIL>'] }
# 'y' for the password prompt
let(:input) { ['', 'y', '', ''] }
let!(:rest_client){ MockRestClient.new }
it("succeeds"){ FakeFS{ expect { run input }.to exit_with_code 0 } }
it("sets the user name to the value given by the command line") do
FakeFS{ run_output( input ).should match '<EMAIL>' }
end
end
describe 'help' do
let(:arguments) { ['setup', '--help'] }
context 'help is run' do
it "should display help" do
@wizard.stub(:run).and_return(true)
expect { run }.to exit_with_code(0)
end
it('should output usage') { run_output.should match("Connects to an OpenShift server to get you started. Will") }
end
end
describe '--autocomplete' do
let(:arguments) { ['setup', '--autocomplete'] }
before do
path = File.join(Gem.loaded_specs['rhc'].full_gem_path, "autocomplete")
FakeFS::FileUtils.mkdir_p(path)
FakeFS::FileUtils.touch(File.join(path, "rhc_bash"))
end
context 'is passed' do
it('should output information') { FakeFS{ run_output.should match("To enable tab-completion") } }
it('should output the gem path') { FakeFS{ run_output.should match File.join(RHC::Config.home_conf_dir, 'bash_autocomplete') } }
end
end
end
|
#!/bin/sh
python3 -m grpc_tools.protoc -I. --python_out=. --mypy_out=. --grpc_python_out=. spacy_grpc/spacy.proto
|
class DropSourceTags < ActiveRecord::Migration
class ::SourceTag < ActiveRecord::Base
has_many :source_taggings
has_many :links, :through => :source_taggings
end
class ::SourceTagging < ActiveRecord::Base
belongs_to :source_tag
belongs_to :link
end
class ::Link < ActiveRecord::Base
has_many :source_taggings
has_many :delicious_tags, :source => :source_tag, :through => :source_taggings
end
def self.up
add_column :links, :source_tags, :text
Link.reset_column_information
Link.find_each(:batch_size => 100) do |link|
link.send(:write_attribute, :source_tags, link.delicious_tags.map(&:name).join(','))
link.save
end
drop_table :source_tags
drop_table :source_taggings
end
def self.down
remove_column :links, :source_tags
create_table "source_taggings", :force => true do |t|
t.integer "link_id"
t.integer "source_tag_id"
t.integer "position"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "source_tags", :force => true do |t|
t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
end
end
end
|
<gh_stars>100-1000
import React from "react";
import { CookieConsent } from "@site/src/features/cookie-consent";
// Default implementation, that you can customize
// https://docusaurus.io/docs/using-themes#wrapper-your-site-with-root
function Root({ children }) {
return (
<>
{children}
{/* NOTE: exp with HotJar feedback widget (FEEDBACK-325) */}
{/* <Feedback /> */}
<CookieConsent />
</>
);
}
export default Root;
|
require 'test_helper'
class RestoresHelperTest < ActionView::TestCase
end
|
#import <stdio.h>
int main ()
{
int num, i, isPrime;
printf("Prime numbers between 1 and 50 are: \n");
for(num = 1; num <= 50; num++)
{
isPrime = 1;
for(i=2; i<=num/2; i++)
{
if(num%i==0)
{
isPrime = 0;
break;
}
}
if (isPrime==1)
{
printf("%d ", num);
}
}
return 0;
}
|
<filename>components/layout/MainLayout.js
import Header from './Header';
import Footer from './Footer';
const MainLayout = props => {
return (
<div className='h-100 d-flex flex-column'>
<div>
<Header lang={props.lang} small={props.smallHeader} activePage={props.activePage} otherLangLink={props.otherLangLink}/>
<div className='container my-5'>
{props.children}
</div>
</div>
<div className='mt-auto'>
<Footer lang={props.lang} year={props.year}/>
</div>
</div>
);
}
export default MainLayout;
|
#!/bin/bash
# Configuration script for libpng 1.6.37
# Library release date: 2019/04/14
export FM_LIBPNG_NAME="libpng"
export FM_LIBPNG_VERSION="1.6.37"
export FM_LIBPNG_FULL_NAME="${FM_LIBPNG_NAME}-${FM_LIBPNG_VERSION}"
export FM_LIBPNG_TARBALL_NAME="${FM_LIBPNG_FULL_NAME}.tar.xz"
export FM_LIBPNG_TARBALL_DOWNLOAD_URL="https://download.sourceforge.net/libpng/${FM_LIBPNG_TARBALL_NAME}"
export FM_LIBPNG_INSTALL_CHECK="include/png.h"
export FM_LIBPNG_HASH="505e70834d35383537b6491e7ae8641f1a4bed1876dbfe361201fc80868d88ca"
export FM_LIBPNG_HASH_TYPE="SHA-256"
|
<reponame>Ianwanarua/McJowells-Pizza<gh_stars>0
//Declaration
let type;
let crust;
let topping;
//constructor
function McPizza(type, size, crust, topping) {
this.type = type;
this.size = size;
this.crust = crust;
this.topping = topping;
}
//Crust prize
McPizza.prototype.getCrust = function () {
if (this.crust === 0) {
return 50
} else if (this.crust === 1) {
return 100
} else if (this.crust === 2) {
return 150
}
}
//Topping price
McPizza.prototype.getTopping = function () {
var count = $("#topping :selected").length;
if (this.topping === 0) {
return 100
} else if (this.topping === 1) {
return 150
} else if (this.topping === 2) {
return 100
} else if (this.topping === 3) {
return 150
}
else if (this.topping ===4) {
return 200
}
}
//Pizza size price
McPizza.prototype.getSize = function () {
var count = $("#topping :selected").length;//allows for multiple selecting
//Calculating Pizza by size and type
//Pizza 1
if (this.type == 0) {
if (this.size === 0) {
return 400
}
else if (this.size === 1)
return 450
else {
return 550
}
}//pizza 2
else if (this.type == 1) {
if (this.size === 0) {
return 700
} else if (this.size === 1)
return 800
else {
return 1500
}
} //3
else if (this.type == 2) {
if (this.size === 0) {
return 800
} else if (this.size === 1)
return 1000
else {
return 1800
}
} //4
else if (this.type == 3) {
if (this.size === 0) {
return 900
} else if (this.size === 1)
return 1500
else {
return 2000
}
}//5
else if (this.type == 4) {
if (this.size === 0) {
return 500
} else if (this.size === 1)
return 950
else {
return 2500
}
} //6
else if (this.type == 5) {
if (this.size === 0) {
return 400
} else if (this.size === 1)
return 1000
else {
return 2400
}
}
else {
return false;
}
}
//Calculating the total cost
function fullBill() {
var areaLocation = document.getElementById("myArea").value;
var add = 0;
$(".total_pizza").each(function () {
var value = $(this).text();
if (!isNaN(value) && value.length != 0) {
add += parseFloat(value);
}
});
if (document.getElementById('yes').checked) {
var result = "Your order is Ksh. " + add + " with a delivery fee of Ksh. 100 ";
var orderBill = add + 100;
var total = "Total: Ksh. " + orderBill + " .00";
$('#result').text(result);
$('#totalCost').text(total);
swal({
title: "Your order will be delivered to " + areaLocation + " at kshs 100. ",//sweet alert for popping up
})
} else {
var total = "Total: Ksh. " + add + " .00";
$('#totalCost').text(total)
}
}
//checkout button
function checkout() {
swal({
title: "Your order has been taken." + "\r\n" + "Thank You for choosing Mcjowells Pizza",
icon: "success",
}).then((value) => {
location.reload();
});
}
$(document).ready(function () {
//show location if prompted to
$('.radioBtn').change(function () {
if (document.getElementById("yes").checked) {
$('.location').show();
} else {
$('.location').hide();
}
});
//proceed button
$('#Proceed').click(function () {
var type = $('#type option:selected').val();
var size = $('#size option:selected').val();
var crust = $('#crust option:selected').val();
var quantity = $('#quantity').val();
var topping = $('#topping option:selected').val();
//validate fields
if (type == '' || size == '' || crust == '' || topping == '' || quantity == '') {
alert('Fill in the form completely')
} else if (document.getElementById("yes").checked && $('#myArea').val() == '') {
alert('Enter location')
} else {
var selectedType = parseInt($('#type option:selected').val());
var selectedSize = parseInt($('#size option:selected').val());
var selectedCrust = parseInt($('#crust option:selected').val());
var quantity = parseInt($('#quantity').val());
var selectedTopping = parseInt($('#topping option:selected').val());
//creates new object
var newOrder = new McPizza(selectedType, selectedSize, selectedCrust, selectedTopping);
//price of neworder
var pizzaBill = (newOrder.getSize() + newOrder.getCrust() + newOrder.getTopping()) * quantity;
//the table should show this
$(".table tbody:last").append("<tr>" +
"<td>" + $('#type option:selected').text() + "</td>" +
"<td>" + $('#crust option:selected').text() + "</td>" +
"<td>" + $('#size option:selected').text() + "</td>" +
"<td>" + $('#topping option:selected').text() + "</td>" +
"<td>" + $('#quantity').val() + "</td>" +
"<td><span class='total_pizza'>" + pizzaBill + "</span></td>" +
"</tr>");
$(fullBill);
}
})
$('#checkout').click(function () {
checkout();
})
})
|
<gh_stars>0
public class ControleRemoto implements Controlador {
// PROPRIETIES
private Integer volume;
private Boolean isOn, playing;
// CONSTRUCT
public ControleRemoto(){
this.volume = 50;
this.isOn = false;
this.playing = false;
}
// GETTERS
private Integer getVolume(){
return volume;
}
private Boolean getIsOn(){
return isOn;
}
private Boolean getPlaying(){
return playing;
}
// SETTERS
private void setVolume(Integer volume){
this.volume = volume;
}
private void setIsOn(Boolean isOn){
this.isOn = isOn;
}
private void setPlaying(Boolean playing){
this.playing = playing;
}
// ABSTRACTS METHODS
@Override
public void bind() {
setIsOn(true);
}
@Override
public void off() {
setIsOn(false);
}
@Override
public void openMenu() {
print(getIsOn().toString());
print(getVolume().toString());
for (int i = 0; i <= getVolume(); i += 10){
System.out.print("|");
}
print(getPlaying().toString());
}
@Override
public void closeMenu() {
print("Fechando o menu!");
}
@Override
public void increaseVolume() {
if (this.getIsOn() && this.getVolume() <= 100){
setVolume(this.getVolume() + 1);
}
}
@Override
public void decreaseVolume() {
if (this.getIsOn() && this.getVolume() >= 0){
setVolume(this.getVolume() - 1);
}
}
@Override
public void callmute() {
if (this.getIsOn() && this.getVolume() >= 0){
this.setVolume(0);
}
}
@Override
public void mutedoff() {
if (this.getIsOn() && this.getVolume() >= 0){
this.setVolume(50);
}
}
@Override
public void play() {
if (this.getIsOn() && !this.getPlaying()){
this.setPlaying(true);
}
}
@Override
public void pause() {
if (this.getIsOn() && this.getPlaying()){
this.setPlaying(false);
}
}
protected static void print(String text){
System.out.println(text);
}
}
|
#!/usr/bin/env bash
# Examples:
# export API="bootstrap=192.168.222.30:6443,master-0=192.168.222.31:6443,master-1=192.168.222.32:6443,master-3=192.168.222.33:6443"
# export API_LISTEN="127.0.0.1:6443,192.168.222.1:6443"
# export INGRESS_HTTP="master-0=192.168.222.31:80,master-1=192.168.222.32:80,master-3=192.168.222.33:80,worker-0=192.168.222.34:80,worker-1=192.168.222.35:80,worker-3=192.168.222.36:80"
# export INGRESS_HTTP_LISTEN="127.0.0.1:80,192.168.222.1:80"
# export INGRESS_HTTPS="master-0=192.168.222.31:443,master-1=192.168.222.32:443,master-3=192.168.222.33:443,worker-0=192.168.222.34:443,worker-1=192.168.222.35:443,worker-3=192.168.222.36:443"
# export INGRESS_HTTPS_LISTEN="127.0.0.1:443,192.168.222.1:443"
# export MACHINE_CONFIG_SERVER="bootstrap=192.168.222.30:22623,master-0=192.168.222.31:22623,master-1=192.168.222.32:22623,master-3=192.168.222.33:22623"
# export MACHINE_CONFIG_SERVER_LISTEN="127.0.0.1:22623,192.168.222.1:22623"
# export STATS_LISTEN="127.0.0.1:1984"
# export STATS_ADMIN_PASSWORD="aengeo4oodoidaiP"
function build_member_conf {
DATA=$1
IFS=,
CONFIG=""
for i in $DATA ; do
# i contains 'name=ip:port'
# ${i%:=} => name
# ${i#*=} => ip:port
CONFIG+=" server ${i%=*} ${i#*=} check\n"
done
echo -e $CONFIG;
}
function build_listen_conf {
DATA=$1
IFS=,
CONFIG=""
for i in $DATA ; do
# i contains 'ip:port'
CONFIG+=" bind ${i}\n"
done
echo -e $CONFIG;
}
if [ ! -z "${STATS_LISTEN}" ] && [ ! -z "${STATS_ADMIN_PASSWORD}" ] ; then
echo "Stats enabled;"
export STATS_CFG="
frontend stats
bind $STATS_LISTEN
mode http
log global
maxconn 10
timeout client 100s
stats enable
stats hide-version
stats refresh 30s
stats show-node
stats auth admin:$STATS_ADMIN_PASSWORD
stats uri /
"
else
export STATS_CFG=""
fi
export INGRESS_HTTP_CFG=$(build_member_conf $INGRESS_HTTP)
export INGRESS_HTTP_LISTEN_CFG=$(build_listen_conf ${INGRESS_HTTP_LISTEN:-*:80})
export INGRESS_HTTPS_CFG=$(build_member_conf $INGRESS_HTTPS)
export INGRESS_HTTPS_LISTEN_CFG=$(build_listen_conf ${INGRESS_HTTPS_LISTEN:-*:443})
export API_CFG=$(build_member_conf $API)
export API_LISTEN_CFG=$(build_listen_conf ${API_LISTEN:-*:6443})
export MACHINE_CONFIG_SERVER_CFG=$(build_member_conf $MACHINE_CONFIG_SERVER)
export MACHINE_CONFIG_SERVER_LISTEN_CFG=$(build_listen_conf ${MACHINE_CONFIG_SERVER_LISTEN:-*:22623})
envsubst < haproxy-template.cfg > /haproxy.cfg
exec "$@"
|
<filename>src/main/java/br/com/alinesolutions/anotaai/model/produto/EntradaMercadoria.java
package br.com.alinesolutions.anotaai.model.produto;
import java.time.ZonedDateTime;
import java.util.List;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.DiscriminatorValue;
import javax.persistence.Entity;
import javax.persistence.FetchType;
import javax.persistence.ManyToOne;
import javax.persistence.NamedQueries;
import javax.persistence.NamedQuery;
import javax.persistence.OneToMany;
import javax.validation.constraints.NotNull;
import javax.xml.bind.annotation.XmlRootElement;
import org.hibernate.annotations.SQLDelete;
import org.hibernate.annotations.Where;
import com.fasterxml.jackson.annotation.JsonIdentityInfo;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.ObjectIdGenerators;
import br.com.alinesolutions.anotaai.model.BaseEntity;
import br.com.alinesolutions.anotaai.model.produto.EntradaMercadoria.EntradaMercadoriaConstant;
import br.com.alinesolutions.anotaai.model.usuario.Cliente;
@NamedQueries({ @NamedQuery(name = EntradaMercadoriaConstant.FIND_BY_NOME_KEY, query = EntradaMercadoriaConstant.FIND_BY_NOME_QUERY),
@NamedQuery(name = EntradaMercadoriaConstant.FIND_BY_DATE_KEY, query = EntradaMercadoriaConstant.FIND_BY_DATE_QUERY),
@NamedQuery(name = EntradaMercadoriaConstant.FIND_BY_DATE_COUNT, query = EntradaMercadoriaConstant.FIND_BY_DATE_QUERY_COUNT),
@NamedQuery(name = EntradaMercadoriaConstant.FIND_BY_NOME_AND_DATE_KEY, query = EntradaMercadoriaConstant.FIND_BY_NOME_AND_DATE_QUERY),
@NamedQuery(name = EntradaMercadoriaConstant.FIND_BY_NOME_AND_DATE_COUNT, query = EntradaMercadoriaConstant.FIND_BY_NOME_AND_DATE_QUERY_COUNT),
@NamedQuery(name = EntradaMercadoriaConstant.LIST_ALL_KEY, query = EntradaMercadoriaConstant.LIST_ALL_QUERY),
@NamedQuery(name = EntradaMercadoriaConstant.ITEM_ENTRADA_BY_ENTRADA_KEY, query = EntradaMercadoriaConstant.ITEM_ENTRADA_BY_ENTRADA_QUERY),
@NamedQuery(name = EntradaMercadoriaConstant.FIND_BY_NOME_COUNT, query = EntradaMercadoriaConstant.FIND_BY_NOME_QUERY_COUNT),
@NamedQuery(name = EntradaMercadoriaConstant.LIST_ALL_COUNT, query = EntradaMercadoriaConstant.LIST_ALL_QUERY_COUNT),
@NamedQuery(name = EntradaMercadoriaConstant.FIND_BY_ENTRADA_MERCADORIA, query = EntradaMercadoriaConstant.FIND_BY_ENTRADA_MERCADORIA_QUERY),
@NamedQuery(name = EntradaMercadoriaConstant.FIND_BY_ID_KEY, query = EntradaMercadoriaConstant.FIND_BY_ID_QUERY)
})
@DiscriminatorValue("ENTRADA")
@JsonIgnoreProperties(ignoreUnknown = true)
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "id", scope = EntradaMercadoria.class)
@Entity
@Where(clause = "ativo = true")
@SQLDelete(sql = "update EntradaMercadoria set ativo = false where id = ?")
@XmlRootElement
public class EntradaMercadoria extends BaseEntity<Long, EntradaMercadoria> {
private static final long serialVersionUID = 1L;
@NotNull
@Column(nullable = false)
private Long codigo;
@OneToMany(mappedBy = "entradaMercadoria", cascade = { CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST })
private List<ItemEntrada> itens;
@NotNull
private ZonedDateTime dataEntrada;
@NotNull
private ZonedDateTime dataCadastro;
@NotNull
@ManyToOne(optional = false, fetch = FetchType.LAZY, cascade = { CascadeType.DETACH })
private Cliente cliente;
public List<ItemEntrada> getItens() {
return itens;
}
public void setItens(List<ItemEntrada> itens) {
this.itens = itens;
}
public ZonedDateTime getDataEntrada() {
return dataEntrada;
}
public void setDataEntrada(ZonedDateTime dataEntrada) {
this.dataEntrada = dataEntrada;
}
public EntradaMercadoria(Long id) {
setId(id);
}
public EntradaMercadoria() {
}
public EntradaMercadoria(Long id, ZonedDateTime dataEntrada, Long codigo) {
setId(id);
setDataEntrada(dataEntrada);
setCodigo(codigo);
}
public EntradaMercadoria(Long id, ZonedDateTime dataEntrada) {
setId(id);
setDataEntrada(dataEntrada);
}
public Long getCodigo() {
return codigo;
}
public void setCodigo(Long codigo) {
this.codigo = codigo;
}
public Cliente getCliente() {
return cliente;
}
public void setCliente(Cliente cliente) {
this.cliente = cliente;
}
public ZonedDateTime getDataCadastro() {
return dataCadastro;
}
public void setDataCadastro(ZonedDateTime dataCadastro) {
this.dataCadastro = dataCadastro;
}
public interface EntradaMercadoriaConstant {
String ITEM_MERCADORIA = "Item(s) de mercadoria";
String ENTRADA_MERCADORIA = "Entrada de mercadoria";
String FIELD_NOME = "nome";
String FIND_BY_NOME_KEY = "EntradaMercadoria.findByName";
String FIND_BY_DATE_KEY = "EntradaMercadoria.findByDate";
String FIND_BY_NOME_LIKE_KEY = "EntradaMercadoria.findByNameLike";
String FIND_BY_NOME_QUERY = "select distinct new br.com.alinesolutions.anotaai.model.produto.EntradaMercadoria(e.id,e.dataEntrada,e.codigo) from EntradaMercadoria e join e.itens itens join itens.movimentacaoProduto mov join mov.produto prod where prod.descricao =:descricao order by e.dataEntrada";
String FIND_BY_DATE_QUERY = "select distinct new br.com.alinesolutions.anotaai.model.produto.EntradaMercadoria(e.id,e.dataEntrada,e.codigo) from EntradaMercadoria e where trunc(e.dataEntrada) = trunc(:dataEntrada) order by e.dataEntrada";
String FIND_BY_ID_KEY = "EntradaMercadoria.findById";
String FIND_BY_ID_QUERY = "select new br.com.alinesolutions.anotaai.model.produto.EntradaMercadoria(e.id,e.dataEntrada,e.codigo) from EntradaMercadoria e where e.id = :id";
String ITEM_ENTRADA_BY_ENTRADA_KEY = "EntradaMercadoria.itemEntradaByEntrada";
String ITEM_ENTRADA_BY_ENTRADA_QUERY = "select new br.com.alinesolutions.anotaai.model.produto.ItemEntrada(ie.id, ie.precoCusto, mov.id, mov.quantidade, p.id, p.descricao) from ItemEntrada ie left join ie.movimentacaoProduto mov left join mov.produto p where ie.entradaMercadoria = :entradaMercadoria";
String FIND_BY_ENTRADA_MERCADORIA = "ItemEntrada.findByName";
String FIND_BY_ENTRADA_MERCADORIA_QUERY = "select new br.com.alinesolutions.anotaai.model.produto.ItemEntrada(i.precoCusto) from ItemEntrada i where i.entradaMercadoria.id =:idEntradaMercadoria";
String LIST_ALL_KEY = "EntradaMercadoria.listAll";
String LIST_ALL_QUERY = "select new br.com.alinesolutions.anotaai.model.produto.EntradaMercadoria(e.id,e.dataEntrada,e.codigo) from EntradaMercadoria e order by e.dataEntrada";
String LIST_ALL_COUNT = "EntradaMercadoria.listAllCount";
String FIND_BY_NOME_COUNT = "EntradaMercadoria.findByNameCount";
String FIND_BY_DATE_COUNT = "EntradaMercadoria.findByDateCount";
String FIND_BY_NOME_AND_DATE_KEY = "EntradaMercadoria.findByDateAndName";
String FIND_BY_NOME_AND_DATE_COUNT = "EntradaMercadoria.findByDateAndNameCount";
String FIND_BY_DATE_QUERY_COUNT = "select distinct count(e) from EntradaMercadoria e where trunc(e.dataEntrada) = trunc(:dataEntrada)";
String FIND_BY_NOME_QUERY_COUNT = "select distinct count(e) from EntradaMercadoria e join e.itens itens join itens.movimentacaoProduto mov join mov.produto prod where prod.descricao =:descricao";
String LIST_ALL_QUERY_COUNT = "select count(e) from EntradaMercadoria e";
String FIND_BY_NOME_AND_DATE_QUERY_COUNT = "select distinct count(e) from EntradaMercadoria e join e.itens itens join itens.movimentacaoProduto mov join mov.produto prod where prod.descricao =:descricao and trunc(e.dataEntrada) = trunc(:dataEntrada)";
String FIND_BY_NOME_AND_DATE_QUERY = "select distinct new br.com.alinesolutions.anotaai.model.produto.EntradaMercadoria(e.id,e.dataEntrada,e.codigo) from EntradaMercadoria e join e.itens itens join itens.movimentacaoProduto mov join mov.produto prod where prod.descricao =:descricao and trunc(e.dataEntrada) = trunc(:dataEntrada) order by e.dataEntrada";
}
}
|
<reponame>OpenHosec/govici
// Copyright (C) 2019 Arroyo Networks, Inc
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package vici
import (
"testing"
)
func TestUnmarshalBoolTrue(t *testing.T) {
boolMessage := struct {
Field bool `vici:"field"`
}{
Field: false,
}
m := &Message{
[]string{"field"},
map[string]interface{}{
"field": "yes",
},
}
err := UnmarshalMessage(m, &boolMessage)
if err != nil {
t.Fatalf("Error unmarshalling bool value: %v", err)
}
if boolMessage.Field != true {
t.Fatalf("Unmarshalled boolean value is invalid.\nExpected: true\nReceived: %+v", boolMessage.Field)
}
}
func TestUnmarshalBoolFalse(t *testing.T) {
boolMessage := struct {
Field bool `vici:"field"`
}{
Field: true,
}
m := &Message{
[]string{"field"},
map[string]interface{}{
"field": "no",
},
}
err := UnmarshalMessage(m, &boolMessage)
if err != nil {
t.Fatalf("Error unmarshalling bool value: %v", err)
}
if boolMessage.Field != false {
t.Fatalf("Unmarshalled boolean value is invalid.\nExpected: false\nReceived: %+v", boolMessage.Field)
}
}
func TestUnmarshalBoolInvalid(t *testing.T) {
boolMessage := struct {
Field bool `vici:"field"`
}{
Field: true,
}
m := &Message{
[]string{"field"},
map[string]interface{}{
"field": "invalid-not-a-bool",
},
}
err := UnmarshalMessage(m, &boolMessage)
if err == nil {
t.Error("Expected error when unmarshalling invalid boolean value. None was returned.")
}
}
func TestUnmarshalBoolTruePtr(t *testing.T) {
boolMessage := struct {
Field *bool `vici:"field"`
}{
Field: nil,
}
m := &Message{
[]string{"field"},
map[string]interface{}{
"field": "yes",
},
}
err := UnmarshalMessage(m, &boolMessage)
if err != nil {
t.Fatalf("Error unmarshalling bool value to pointer: %v", err)
}
if boolMessage.Field == nil {
t.Fatalf("Unmarshalled boolean pointer is nil.")
}
if *boolMessage.Field != true {
t.Fatalf("Unmarshalled boolean value is invalid.\nExpected: true\nReceived: %+v", *boolMessage.Field)
}
}
func TestUnmarshalBoolFalsePtr(t *testing.T) {
boolMessage := struct {
Field *bool `vici:"field"`
}{
Field: nil,
}
m := &Message{
[]string{"field"},
map[string]interface{}{
"field": "no",
},
}
err := UnmarshalMessage(m, &boolMessage)
if err != nil {
t.Fatalf("Error unmarshalling bool value to pointer: %v", err)
}
if boolMessage.Field == nil {
t.Fatalf("Unmarshalled boolean pointer is nil.")
}
if *boolMessage.Field != false {
t.Fatalf("Unmarshalled boolean value is invalid.\nExpected: false\nReceived: %+v", *boolMessage.Field)
}
}
func TestUnmarshalInt(t *testing.T) {
intMessage := struct {
Field int `vici:"field"`
}{
Field: 0,
}
m := &Message{
[]string{"field"},
map[string]interface{}{
"field": "23",
},
}
err := UnmarshalMessage(m, &intMessage)
if err != nil {
t.Fatalf("Error unmarshalling int value: %v", err)
}
if intMessage.Field != 23 {
t.Fatalf("Unmarshalled int value is invalid.\nExpected: 23\nReceived: %+v", intMessage.Field)
}
}
func TestUnmarshalInt2(t *testing.T) {
intMessage := struct {
Field int `vici:"field"`
}{
Field: 0,
}
m := &Message{
[]string{"field"},
map[string]interface{}{
"field": "-23",
},
}
err := UnmarshalMessage(m, &intMessage)
if err != nil {
t.Fatalf("Error unmarshalling int value: %v", err)
}
if intMessage.Field != -23 {
t.Fatalf("Unmarshalled int value is invalid.\nExpected: -23\nReceived: %+v", intMessage.Field)
}
}
func TestUnmarshalInt8(t *testing.T) {
intMessage := struct {
Field int8 `vici:"field"`
}{
Field: 0,
}
m := &Message{
[]string{"field"},
map[string]interface{}{
"field": "23",
},
}
err := UnmarshalMessage(m, &intMessage)
if err != nil {
t.Fatalf("Error unmarshalling int8 value: %v", err)
}
if intMessage.Field != 23 {
t.Fatalf("Unmarshalled int8 value is invalid.\nExpected: 23\nReceived: %+v", intMessage.Field)
}
}
func TestUnmarshalInt8Overflow(t *testing.T) {
intMessage := struct {
Field int8 `vici:"field"`
}{
Field: 0,
}
m := &Message{
[]string{"field"},
map[string]interface{}{
"field": "1001",
},
}
err := UnmarshalMessage(m, &intMessage)
if err != nil {
t.Fatalf("Error unmarshalling int8 value: %v", err)
}
if intMessage.Field == 23 {
t.Fatalf("Unmarshalled int8 value is invalid.\nExpected: -23 (Overflow)\nReceived: %+v", intMessage.Field)
}
}
func TestUnmarshalUint(t *testing.T) {
intMessage := struct {
Field uint `vici:"field"`
}{
Field: 0,
}
m := &Message{
[]string{"field"},
map[string]interface{}{
"field": "23",
},
}
err := UnmarshalMessage(m, &intMessage)
if err != nil {
t.Fatalf("Error unmarshalling uint value: %v", err)
}
if intMessage.Field != 23 {
t.Fatalf("Unmarshalled uint value is invalid.\nExpected: 23\nReceived: %+v", intMessage.Field)
}
}
func TestUnmarshalUintInvalid(t *testing.T) {
intMessage := struct {
Field uint `vici:"field"`
}{
Field: 0,
}
m := &Message{
[]string{"field"},
map[string]interface{}{
"field": "-1",
},
}
err := UnmarshalMessage(m, &intMessage)
if err == nil {
t.Error("Expected error when unmarshalling invalid uint value. None was returned.")
}
}
func TestUnmarshalEnumType(t *testing.T) {
type TestType string
const testValue TestType = "test-value"
enumMessage := struct {
Field TestType `vici:"field"`
}{}
m := &Message{
[]string{"field"},
map[string]interface{}{
"field": "test-value",
},
}
err := UnmarshalMessage(m, &enumMessage)
if err != nil {
t.Fatalf("Error unmarshalling enum type value: %v", err)
}
if enumMessage.Field != testValue {
t.Fatalf("Unmarshalled uint value is invalid.\nExpected: %+v\nReceived: %+v", testValue, enumMessage.Field)
}
}
func TestUnmarshalEmbeddedStruct(t *testing.T) {
const testValue = "unmarshalled-embedded-value"
type Embedded struct {
Field string `vici:"field"`
}
embeddedMessage := struct {
Embedded `vici:"embedded"`
}{}
m := &Message{
[]string{"embedded"},
map[string]interface{}{
"embedded": &Message{
[]string{"field"},
map[string]interface{}{
"field": testValue,
},
},
},
}
err := UnmarshalMessage(m, &embeddedMessage)
if err != nil {
t.Fatalf("Error unmarshalling into embedded struct: %v", err)
}
if embeddedMessage.Field != testValue {
t.Fatalf("Unmarshalled embedded value is invalid.\nExpected: %+v\nReceived: %+v", testValue, embeddedMessage.Field)
}
}
func TestUnmarshalInline(t *testing.T) {
testValue := "unmarshal-inline"
type Embedded struct {
Field string `vici:"field"`
}
inlineMessage := struct {
Embedded `vici:",inline"`
}{}
m := &Message{
[]string{"field"},
map[string]interface{}{
"field": testValue,
},
}
err := UnmarshalMessage(m, &inlineMessage)
if err != nil {
t.Fatalf("Error unmarshalling into inlined embedded struct: %v", err)
}
if inlineMessage.Field != testValue {
t.Fatalf("Unmarshalled inlined embedded value is invalid.\nExpected: %+v\nReceived: %+v", testValue, inlineMessage.Field)
}
}
func TestUnmarshalInlineInvalidType(t *testing.T) {
inlineMessage := struct {
Field string `vici:",inline"`
}{}
m := &Message{
[]string{"field"},
map[string]interface{}{
"field": "test-value",
},
}
err := UnmarshalMessage(m, &inlineMessage)
if err == nil {
t.Error("Expected error when unmarshalling invalid inlined embedded type. None was returned.")
}
}
func TestUnmarshalInlineComposite(t *testing.T) {
testValue := "unmarshal-inline-composite"
otherValue := "other-value"
type Embedded struct {
Field string `vici:"field"`
}
inlineMessage := struct {
Embedded `vici:",inline"`
Other string `vici:"other"`
}{}
m := &Message{
[]string{"field", "other"},
map[string]interface{}{
"field": testValue,
"other": otherValue,
},
}
err := UnmarshalMessage(m, &inlineMessage)
if err != nil {
t.Fatalf("Error unmarshalling into inlined embedded struct: %v", err)
}
if inlineMessage.Field != testValue {
t.Fatalf("Unmarshalled inlined embedded value is invalid.\nExpected: %+v\nReceived: %+v", testValue, inlineMessage.Field)
}
if inlineMessage.Other != otherValue {
t.Fatalf("Unmarshalled inlined embedded value is invalid.\nExpected: %+v\nReceived: %+v", otherValue, inlineMessage.Other)
}
}
|
#!/bin/bash
set -e
export NODE_OPTIONS="--max-old-space-size=3000"
if [ -z "$VIRTUAL_ENV" ]; then
echo "This requires the melati python virtual environment."
echo "Execute '. ./activate' before running."
exit 1
fi
if [ "$(id -u)" = 0 ]; then
echo "The Melati Blockchain GUI can not be installed or run by the root user."
exit 1
fi
# Allows overriding the branch or commit to build in melati-blockchain-gui
SUBMODULE_BRANCH=$1
UBUNTU=false
# Manage npm and other install requirements on an OS specific basis
if [ "$(uname)" = "Linux" ]; then
#LINUX=1
if type apt-get; then
# Debian/Ubuntu
UBUNTU=true
sudo apt-get install -y npm nodejs libxss1
elif type yum && [ ! -f "/etc/redhat-release" ] && [ ! -f "/etc/centos-release" ] && [ ! -f /etc/rocky-release ]; then
# AMZN 2
echo "Installing on Amazon Linux 2."
curl -sL https://rpm.nodesource.com/setup_12.x | sudo bash -
sudo yum install -y nodejs
elif type yum && [ ! -f /etc/rocky-release ] && [ -f /etc/redhat-release ] || [ -f /etc/centos-release ]; then
# CentOS or Redhat
echo "Installing on CentOS/Redhat."
curl -sL https://rpm.nodesource.com/setup_12.x | sudo bash -
sudo yum install -y nodejs
elif type yum && [ -f /etc/rocky-release ]; then
# RockyLinux
echo "Installing on RockyLinux"
dnf module enable nodejs:12
sudo dnf install -y nodejs
fi
elif [ "$(uname)" = "Darwin" ] && type brew && ! npm version >/dev/null 2>&1; then
# Install npm if not installed
brew install npm
elif [ "$(uname)" = "OpenBSD" ]; then
pkg_add node
elif [ "$(uname)" = "FreeBSD" ]; then
pkg install node
fi
# Ubuntu before 20.04LTS has an ancient node.js
echo ""
UBUNTU_PRE_2004=false
if $UBUNTU; then
UBUNTU_PRE_2004=$(python -c 'import subprocess; process = subprocess.run(["lsb_release", "-rs"], stdout=subprocess.PIPE); print(float(process.stdout) < float(20.04))')
fi
if [ "$UBUNTU_PRE_2004" = "True" ]; then
echo "Installing on Ubuntu older than 20.04 LTS: Ugrading node.js to stable."
UBUNTU_PRE_2004=true # Unfortunately Python returns True when shell expects true
sudo npm install -g n
sudo n stable
export PATH="$PATH"
fi
if [ "$UBUNTU" = "true" ] && [ "$UBUNTU_PRE_2004" = "False" ]; then
echo "Installing on Ubuntu 20.04 LTS or newer: Using installed node.js version."
fi
# For Mac and Windows, we will set up node.js on GitHub Actions and Azure
# Pipelines directly, so skip unless you are completing a source/developer install.
# Ubuntu special cases above.
if [ ! "$CI" ]; then
echo "Running git submodule update --init --recursive."
echo ""
git submodule update --init --recursive
echo "Running git submodule update."
echo ""
git submodule update
cd melati-blockchain-gui
if [ "$SUBMODULE_BRANCH" ];
then
git fetch
git checkout "$SUBMODULE_BRANCH"
git pull
echo ""
echo "Building the GUI with branch $SUBMODULE_BRANCH"
echo ""
fi
npm install
npm audit fix || true
npm run build
else
echo "Skipping node.js in install.sh on MacOS ci."
fi
echo ""
echo "Melati blockchain install-gui.sh completed."
echo ""
echo "Type 'cd melati-blockchain-gui' and then 'npm run electron &' to start the GUI."
|
require 'tmpdir'
require 'digest/md5'
module Capistrano
module GitCopy
# Utility stuff to avoid cluttering of deploy.cap
class Utility
def initialize(context)
@context = context
end
# Check if repo cache exists
#
# @return [Boolean] indicates if repo cache exists
def test
test! " [ -d #{repo_path} ] "
end
# Check if repo is accessible
#
# @return void
def check
git :'ls-remote --heads', repo_url
end
# Clone repo to cache
#
# @return void
def clone
execute :mkdir, '-p', tmp_path
git :clone, fetch(:repo_url), repo_path
end
# Update repo and submodules to branch
#
# @return void
def update
git :remote, :update
git :reset, '--hard', commit_hash
# submodules
git :submodule, :init
git :submodule, :update
git :submodule, :foreach, '--recursive', :git, :submodule, :update, '--init'
# cleanup
git :clean, '-d', '-f'
git :submodule, :foreach, '--recursive', :git, :clean, '-d', '-f'
end
# Create tar archive
#
# @return void
def prepare_release
execute git_archive_all_bin, "--prefix=''", archive_path
end
# Upload and extract release
#
# @return void
def release
remote_archive_path = File.join(fetch(:deploy_to), File.basename(archive_path))
upload! archive_path, remote_archive_path
execute :mkdir, '-p', release_path
execute :tar, '-f', remote_archive_path, '-x', '-C', release_path
execute :rm, '-f', remote_archive_path
end
# Set deployed revision
#
# @return void
def fetch_revision
capture(:git, 'rev-list', '--max-count=1', '--abbrev-commit', fetch(:branch)).strip
end
# Cleanup repo cache
#
# @return void
def cleanup
execute :rm, '-rf', tmp_path
info 'Local repo cache was removed'
end
# Temporary path for all git-copy operations
#
# @return [String]
def tmp_path
@_tmp_path ||= File.join(Dir.tmpdir, deploy_id)
end
# Path to repo cache
#
# @return [String]
def repo_path
@_repo_path ||= File.join(tmp_path, 'repo')
end
# Path to archive
#
# @return [String]
def archive_path
@_archive_path ||= File.join(tmp_path, 'archive.tar.gz')
end
private
def fetch(*args)
@context.fetch(*args)
end
def test!(*args)
@context.test(*args)
end
def execute(*args)
@context.execute(*args)
end
def capture(*args)
@context.capture(*args)
end
def upload!(*args)
@context.upload!(*args)
end
def info(*args)
@context.info(*args)
end
def git(*args)
args.unshift(:git)
execute(*args)
end
def git_archive_all_bin
File.expand_path('../../../../vendor/git-archive-all/git-archive-all', __FILE__)
end
def deploy_id
[
fetch(:application),
fetch(:stage),
Digest::MD5.hexdigest(fetch(:repo_url))[0..7],
Digest::MD5.hexdigest(Dir.getwd)[0..7]
].compact.join('_').gsub(/[^\w]/, '')
end
def commit_hash
return @_commit_hash if @_commit_hash
branch = fetch(:branch, 'master').to_s.strip
if test! :git, 'rev-parse', "origin/#{branch}", '>/dev/null 2>/dev/null'
@_commit_hash = capture(:git, 'rev-parse', "origin/#{branch}").strip
else
@_commit_hash = capture(:git, 'rev-parse', branch).strip
end
end
end
end
end
|
#!/bin/sh
#----------------------------------------------------------------------------#
# OpenBSD client for Xymon #
# #
# Copyright (C) 2005-2010 Henrik Storner <henrik@hswn.dk> #
# #
# This program is released under the GNU General Public License (GPL), #
# version 2. See the file "COPYING" for details. #
# #
#----------------------------------------------------------------------------#
#
# $Id$
echo "[date]"
date
echo "[uname]"
uname -a
echo "[uptime]"
uptime
echo "[who]"
who
echo "[df]"
df -P -tnonfs,kernfs,procfs,cd9660 | sed -e '/^[^ ][^ ]*$/{
N
s/[ ]*\n[ ]*/ /
}'
echo "[mount]"
mount
echo "[meminfo]"
$XYMONHOME/bin/openbsd-meminfo
echo "[swapctl]"
/sbin/swapctl -s
echo "[ifconfig]"
ifconfig -A
echo "[route]"
netstat -rn
echo "[netstat]"
netstat -s
echo "[ifstat]"
netstat -i -b -n | egrep -v "^lo|<Link"
echo "[ports]"
(netstat -na -f inet; netstat -na -f inet6) | grep "^tcp"
echo "[ps]"
ps -ax -ww -o pid,ppid,user,start,state,pri,pcpu,cputime,pmem,rss,vsz,args
# $TOP must be set, the install utility should do that for us if it exists.
if test "$TOP" != ""
then
if test -x "$TOP"
then
echo "[top]"
$TOP -n 20
fi
fi
# vmstat
nohup sh -c "vmstat 300 2 1>$XYMONTMP/xymon_vmstat.$MACHINEDOTS.$$ 2>&1; mv $XYMONTMP/xymon_vmstat.$MACHINEDOTS.$$ $XYMONTMP/xymon_vmstat.$MACHINEDOTS" </dev/null >/dev/null 2>&1 &
sleep 5
if test -f $XYMONTMP/xymon_vmstat.$MACHINEDOTS; then echo "[vmstat]"; cat $XYMONTMP/xymon_vmstat.$MACHINEDOTS; rm -f $XYMONTMP/xymon_vmstat.$MACHINEDOTS; fi
exit
|
#!/usr/bin/env bash
echo -e "\e[1;33m This script will Setup Repositories and attempt the No-Nag fix. PVE7 ONLY \e[0m"
while true; do
read -p "Start the PVE7 Post Install Script (y/n)?" yn
case $yn in
[Yy]* ) break;;
[Nn]* ) exit;;
* ) echo "Please answer yes or no.";;
esac
done
if [ `pveversion | grep "pve-manager/7" | wc -l` -ne 1 ]; then
echo -e "This script requires Proxmox Virtual Environment 7.0 or greater"
echo -e "Exiting..."
sleep 2
exit
fi
clear
echo -e "\e[1;33m Disable Enterprise Repository... \e[0m"
sleep 1
sed -i "s/^deb/#deb/g" /etc/apt/sources.list.d/pve-enterprise.list
echo -e "\e[1;33m Setup Repositories... \e[0m"
sleep 1
cat <<EOF > /etc/apt/sources.list
deb http://ftp.debian.org/debian bullseye main contrib
deb http://ftp.debian.org/debian bullseye-updates main contrib
deb http://security.debian.org/debian-security bullseye-security main contrib
deb http://download.proxmox.com/debian/pve bullseye pve-no-subscription
# deb http://download.proxmox.com/debian/pve bullseye pvetest
EOF
echo -e "\e[1;33m Disable Subscription Nag... \e[0m"
echo "DPkg::Post-Invoke { \"dpkg -V proxmox-widget-toolkit | grep -q '/proxmoxlib\.js$'; if [ \$? -eq 1 ]; then { echo 'Removing subscription nag from UI...'; sed -i '/data.status/{s/\!//;s/Active/NoMoreNagging/}' /usr/share/javascript/proxmox-widget-toolkit/proxmoxlib.js; }; fi\"; };" > /etc/apt/apt.conf.d/no-nag-script
apt --reinstall install proxmox-widget-toolkit &>/dev/null
echo -e "\e[1;33m Finished....Please Update Proxmox \e[0m"
# bash -c "$(wget -qLO - https://raw.githubusercontent.com/tteck/Proxmox/main/misc/post_install.sh)"
|
#!/bin/bash
function usage() {
echo " -t|--target-dir <dir> local target directory for prepare a Ray cluster deployment package"
echo " [-s|--source-dir] <dir> local source directory to prepare a Ray cluster deployment package"
}
while [ $# -gt 0 ];do
key=$1
case $key in
-h|--help)
usage
exit 0
;;
-s|--source-dir)
ray_dir=$2
shift 2
;;
-t|--target-dir)
t_dir=$2
shift 2
;;
*)
echo "ERROR: unknown option $key"
echo
usage
exit -1
;;
esac
done
realpath() {
[[ $1 = /* ]] && echo "$1" || echo "$PWD/${1#./}"
}
if [ -z $ray_dir ];then
scripts_path=`realpath $0`
ray_dir=`dirname $scripts_path`
ray_dir=`dirname $ray_dir`
fi
# echo "ray_dir = $ray_dir"
declare -a nativeBinaries=(
"./src/common/thirdparty/redis/src/redis-server"
"./src/plasma/plasma_store_server"
"./src/plasma/plasma_manager"
"./src/local_scheduler/local_scheduler"
"./src/global_scheduler/global_scheduler"
"./src/ray/raylet/raylet"
"./src/ray/raylet/raylet_monitor"
)
declare -a nativeLibraries=(
"./src/common/redis_module/libray_redis_module.so"
"./src/local_scheduler/liblocal_scheduler_library_java.*"
"./src/plasma/libplasma_java.*"
"./src/ray/raylet/*lib.a"
)
declare -a javaBinaries=(
"api"
"common"
"worker"
"test"
)
function prepare_source()
{
if [ -z $t_dir ];then
echo "--target-dir not specified"
usage
exit -1
fi
# prepare native components under /ray/native/bin
mkdir -p $t_dir"/ray/native/bin/"
for i in "${!nativeBinaries[@]}"
do
cp $ray_dir/build/${nativeBinaries[$i]} $t_dir/ray/native/bin/
done
# prepare native libraries under /ray/native/lib
mkdir -p $t_dir"/ray/native/lib/"
for i in "${!nativeLibraries[@]}"
do
cp $ray_dir/build/${nativeLibraries[$i]} $t_dir/ray/native/lib/
done
# prepare java components under /ray/java/lib
mkdir -p $t_dir"/ray/java/lib/"
unzip -q $ray_dir/java/cli/target/ray-cli-ear.zip -d $ray_dir/java
cp $ray_dir/java/ray-cli/lib/* $t_dir/ray/java/lib/
rm -rf $ray_dir/java/ray-cli
cp -rf $ray_dir/java/ray.config.ini $t_dir/ray/
# prepare java apps directory
mkdir -p $t_dir"/ray/java/apps/"
# prepare run.sh
cp $ray_dir/java/run.sh $t_dir/
}
prepare_source
|
<filename>pd-for-ios/DispatcherSample/DispatcherSample/SampleListener.h
//
// SampleListener.h
// DispatcherSample
//
// Copyright (c) 2011 <NAME> (<EMAIL>)
//
// For information on usage and redistribution, and for a DISCLAIMER OF ALL
// WARRANTIES, see the file, "LICENSE.txt," in this distribution.
//
#import <Foundation/Foundation.h>
#import "PdBase.h"
#import "PdDispatcher.h"
@interface SampleListener : NSObject<PdListener> {
UILabel *label;
}
- (id)initWithLabel:(UILabel *)label;
@end
|
import React, { Component } from 'react';
import { Paper, Typography } from '@material-ui/core';
import { storage } from '../../services/element';
export class Storage extends Component {
state = {};
async componentWillMount() {
const info = await storage.ipfs.version();
this.setState({
info,
});
}
render() {
const { info } = this.state;
return (
<Paper className="Storage" style={{ padding: '8px', wordBreak: 'break-all' }}>
<Typography variant={'h5'}>Storage</Typography>
{info === undefined ? (
<Typography variant={'h6'}>Loading...</Typography>
) : (
<div>
<Typography>Version: {info.version}</Typography>
<Typography>Repo: {info.repo}</Typography>
</div>
)}
</Paper>
);
}
}
export default Storage;
|
# Generated by Powerlevel10k configuration wizard on 2021-07-25 at 14:34 IDT.
# Based on romkatv/powerlevel10k/config/p10k-lean.zsh, checksum 19275.
# Wizard options: nerdfont-complete + powerline, small icons, unicode, lean, 24h time,
# 1 line, compact, few icons, concise, instant_prompt=verbose.
# Type `p10k configure` to generate another config.
#
# Config for Powerlevel10k with lean prompt style. Type `p10k configure` to generate
# your own config based on it.
#
# Tip: Looking for a nice color? Here's a one-liner to print colormap.
#
# for i in {0..255}; do print -Pn "%K{$i} %k%F{$i}${(l:3::0:)i}%f " ${${(M)$((i%6)):#3}:+$'\n'}; done
# Temporarily change options.
'builtin' 'local' '-a' 'p10k_config_opts'
[[ ! -o 'aliases' ]] || p10k_config_opts+=('aliases')
[[ ! -o 'sh_glob' ]] || p10k_config_opts+=('sh_glob')
[[ ! -o 'no_brace_expand' ]] || p10k_config_opts+=('no_brace_expand')
'builtin' 'setopt' 'no_aliases' 'no_sh_glob' 'brace_expand'
() {
emulate -L zsh -o extended_glob
# Unset all configuration options. This allows you to apply configuration changes without
# restarting zsh. Edit ~/.p10k.zsh and type `source ~/.p10k.zsh`.
unset -m '(POWERLEVEL9K_*|DEFAULT_USER)~POWERLEVEL9K_GITSTATUS_DIR'
# Zsh >= 5.1 is required.
autoload -Uz is-at-least && is-at-least 5.1 || return
# The list of segments shown on the left. Fill it with the most important segments.
typeset -g POWERLEVEL9K_LEFT_PROMPT_ELEMENTS=(
# os_icon # os identifier
dir # current directory
vcs # git status
prompt_char # prompt symbol
)
# The list of segments shown on the right. Fill it with less important segments.
# Right prompt on the last prompt line (where you are typing your commands) gets
# automatically hidden when the input line reaches it. Right prompt above the
# last prompt line gets hidden if it would overlap with left prompt.
typeset -g POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS=(
status # exit code of the last command
command_execution_time # duration of the last command
background_jobs # presence of background jobs
direnv # direnv status (https://direnv.net/)
asdf # asdf version manager (https://github.com/asdf-vm/asdf)
virtualenv # python virtual environment (https://docs.python.org/3/library/venv.html)
anaconda # conda environment (https://conda.io/)
pyenv # python environment (https://github.com/pyenv/pyenv)
goenv # go environment (https://github.com/syndbg/goenv)
nodenv # node.js version from nodenv (https://github.com/nodenv/nodenv)
nvm # node.js version from nvm (https://github.com/nvm-sh/nvm)
nodeenv # node.js environment (https://github.com/ekalinin/nodeenv)
# node_version # node.js version
# go_version # go version (https://golang.org)
# rust_version # rustc version (https://www.rust-lang.org)
# dotnet_version # .NET version (https://dotnet.microsoft.com)
# php_version # php version (https://www.php.net/)
# laravel_version # laravel php framework version (https://laravel.com/)
# java_version # java version (https://www.java.com/)
# package # name@version from package.json (https://docs.npmjs.com/files/package.json)
rbenv # ruby version from rbenv (https://github.com/rbenv/rbenv)
rvm # ruby version from rvm (https://rvm.io)
fvm # flutter version management (https://github.com/leoafarias/fvm)
luaenv # lua version from luaenv (https://github.com/cehoffman/luaenv)
jenv # java version from jenv (https://github.com/jenv/jenv)
plenv # perl version from plenv (https://github.com/tokuhirom/plenv)
phpenv # php version from phpenv (https://github.com/phpenv/phpenv)
scalaenv # scala version from scalaenv (https://github.com/scalaenv/scalaenv)
haskell_stack # haskell version from stack (https://haskellstack.org/)
kubecontext # current kubernetes context (https://kubernetes.io/)
terraform # terraform workspace (https://www.terraform.io)
aws # aws profile (https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-profiles.html)
aws_eb_env # aws elastic beanstalk environment (https://aws.amazon.com/elasticbeanstalk/)
azure # azure account name (https://docs.microsoft.com/en-us/cli/azure)
gcloud # google cloud cli account and project (https://cloud.google.com/)
google_app_cred # google application credentials (https://cloud.google.com/docs/authentication/production)
context # user@hostname
nordvpn # nordvpn connection status, linux only (https://nordvpn.com/)
ranger # ranger shell (https://github.com/ranger/ranger)
nnn # nnn shell (https://github.com/jarun/nnn)
xplr # xplr shell (https://github.com/sayanarijit/xplr)
vim_shell # vim shell indicator (:sh)
midnight_commander # midnight commander shell (https://midnight-commander.org/)
nix_shell # nix shell (https://nixos.org/nixos/nix-pills/developing-with-nix-shell.html)
# vpn_ip # virtual private network indicator
# load # CPU load
# disk_usage # disk usage
# ram # free RAM
# swap # used swap
todo # todo items (https://github.com/todotxt/todo.txt-cli)
timewarrior # timewarrior tracking status (https://timewarrior.net/)
taskwarrior # taskwarrior task count (https://taskwarrior.org/)
time # current time
# ip # ip address and bandwidth usage for a specified network interface
# public_ip # public IP address
# proxy # system-wide http/https/ftp proxy
# battery # internal battery
# wifi # wifi speed
# example # example user-defined segment (see prompt_example function below)
)
# Defines character set used by powerlevel10k. It's best to let `p10k configure` set it for you.
typeset -g POWERLEVEL9K_MODE=nerdfont-complete
# When set to `moderate`, some icons will have an extra space after them. This is meant to avoid
# icon overlap when using non-monospace fonts. When set to `none`, spaces are not added.
typeset -g POWERLEVEL9K_ICON_PADDING=none
# Basic style options that define the overall look of your prompt. You probably don't want to
# change them.
typeset -g POWERLEVEL9K_BACKGROUND= # transparent background
typeset -g POWERLEVEL9K_{LEFT,RIGHT}_{LEFT,RIGHT}_WHITESPACE= # no surrounding whitespace
typeset -g POWERLEVEL9K_{LEFT,RIGHT}_SUBSEGMENT_SEPARATOR=' ' # separate segments with a space
typeset -g POWERLEVEL9K_{LEFT,RIGHT}_SEGMENT_SEPARATOR= # no end-of-line symbol
# When set to true, icons appear before content on both sides of the prompt. When set
# to false, icons go after content. If empty or not set, icons go before content in the left
# prompt and after content in the right prompt.
#
# You can also override it for a specific segment:
#
# POWERLEVEL9K_STATUS_ICON_BEFORE_CONTENT=false
#
# Or for a specific segment in specific state:
#
# POWERLEVEL9K_DIR_NOT_WRITABLE_ICON_BEFORE_CONTENT=false
typeset -g POWERLEVEL9K_ICON_BEFORE_CONTENT=true
# Add an empty line before each prompt.
typeset -g POWERLEVEL9K_PROMPT_ADD_NEWLINE=false
# Connect left prompt lines with these symbols.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_PREFIX=
typeset -g POWERLEVEL9K_MULTILINE_NEWLINE_PROMPT_PREFIX=
typeset -g POWERLEVEL9K_MULTILINE_LAST_PROMPT_PREFIX=
# Connect right prompt lines with these symbols.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_SUFFIX=
typeset -g POWERLEVEL9K_MULTILINE_NEWLINE_PROMPT_SUFFIX=
typeset -g POWERLEVEL9K_MULTILINE_LAST_PROMPT_SUFFIX=
# The left end of left prompt.
typeset -g POWERLEVEL9K_LEFT_PROMPT_FIRST_SEGMENT_START_SYMBOL=
# The right end of right prompt.
typeset -g POWERLEVEL9K_RIGHT_PROMPT_LAST_SEGMENT_END_SYMBOL=
# Ruler, a.k.a. the horizontal line before each prompt. If you set it to true, you'll
# probably want to set POWERLEVEL9K_PROMPT_ADD_NEWLINE=false above and
# POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_CHAR=' ' below.
typeset -g POWERLEVEL9K_SHOW_RULER=false
typeset -g POWERLEVEL9K_RULER_CHAR='─' # reasonable alternative: '·'
typeset -g POWERLEVEL9K_RULER_FOREGROUND=242
# Filler between left and right prompt on the first prompt line. You can set it to '·' or '─'
# to make it easier to see the alignment between left and right prompt and to separate prompt
# from command output. It serves the same purpose as ruler (see above) without increasing
# the number of prompt lines. You'll probably want to set POWERLEVEL9K_SHOW_RULER=false
# if using this. You might also like POWERLEVEL9K_PROMPT_ADD_NEWLINE=false for more compact
# prompt.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_CHAR=' '
if [[ $POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_CHAR != ' ' ]]; then
# The color of the filler.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_FOREGROUND=242
# Add a space between the end of left prompt and the filler.
typeset -g POWERLEVEL9K_LEFT_PROMPT_LAST_SEGMENT_END_SYMBOL=' '
# Add a space between the filler and the start of right prompt.
typeset -g POWERLEVEL9K_RIGHT_PROMPT_FIRST_SEGMENT_START_SYMBOL=' '
# Start filler from the edge of the screen if there are no left segments on the first line.
typeset -g POWERLEVEL9K_EMPTY_LINE_LEFT_PROMPT_FIRST_SEGMENT_END_SYMBOL='%{%}'
# End filler on the edge of the screen if there are no right segments on the first line.
typeset -g POWERLEVEL9K_EMPTY_LINE_RIGHT_PROMPT_FIRST_SEGMENT_START_SYMBOL='%{%}'
fi
#################################[ os_icon: os identifier ]##################################
# OS identifier color.
typeset -g POWERLEVEL9K_OS_ICON_FOREGROUND=
# Custom icon.
# typeset -g POWERLEVEL9K_OS_ICON_CONTENT_EXPANSION='⭐'
################################[ prompt_char: prompt symbol ]################################
# Green prompt symbol if the last command succeeded.
typeset -g POWERLEVEL9K_PROMPT_CHAR_OK_{VIINS,VICMD,VIVIS,VIOWR}_FOREGROUND=76
# Red prompt symbol if the last command failed.
typeset -g POWERLEVEL9K_PROMPT_CHAR_ERROR_{VIINS,VICMD,VIVIS,VIOWR}_FOREGROUND=196
# Default prompt symbol.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIINS_CONTENT_EXPANSION='❯'
# Prompt symbol in command vi mode.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VICMD_CONTENT_EXPANSION='❮'
# Prompt symbol in visual vi mode.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIVIS_CONTENT_EXPANSION='V'
# Prompt symbol in overwrite vi mode.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIOWR_CONTENT_EXPANSION='▶'
typeset -g POWERLEVEL9K_PROMPT_CHAR_OVERWRITE_STATE=true
# No line terminator if prompt_char is the last segment.
typeset -g POWERLEVEL9K_PROMPT_CHAR_LEFT_PROMPT_LAST_SEGMENT_END_SYMBOL=''
# No line introducer if prompt_char is the first segment.
typeset -g POWERLEVEL9K_PROMPT_CHAR_LEFT_PROMPT_FIRST_SEGMENT_START_SYMBOL=
##################################[ dir: current directory ]##################################
# Default current directory color.
typeset -g POWERLEVEL9K_DIR_FOREGROUND=31
# If directory is too long, shorten some of its segments to the shortest possible unique
# prefix. The shortened directory can be tab-completed to the original.
typeset -g POWERLEVEL9K_SHORTEN_STRATEGY=truncate_to_unique
# Replace removed segment suffixes with this symbol.
typeset -g POWERLEVEL9K_SHORTEN_DELIMITER=
# Color of the shortened directory segments.
typeset -g POWERLEVEL9K_DIR_SHORTENED_FOREGROUND=103
# Color of the anchor directory segments. Anchor segments are never shortened. The first
# segment is always an anchor.
typeset -g POWERLEVEL9K_DIR_ANCHOR_FOREGROUND=39
# Display anchor directory segments in bold.
typeset -g POWERLEVEL9K_DIR_ANCHOR_BOLD=true
# Don't shorten directories that contain any of these files. They are anchors.
local anchor_files=(
.bzr
.citc
.git
.hg
.node-version
.python-version
.go-version
.ruby-version
.lua-version
.java-version
.perl-version
.php-version
.tool-version
.shorten_folder_marker
.svn
.terraform
CVS
Cargo.toml
composer.json
go.mod
package.json
stack.yaml
)
typeset -g POWERLEVEL9K_SHORTEN_FOLDER_MARKER="(${(j:|:)anchor_files})"
# If set to "first" ("last"), remove everything before the first (last) subdirectory that contains
# files matching $POWERLEVEL9K_SHORTEN_FOLDER_MARKER. For example, when the current directory is
# /foo/bar/git_repo/nested_git_repo/baz, prompt will display git_repo/nested_git_repo/baz (first)
# or nested_git_repo/baz (last). This assumes that git_repo and nested_git_repo contain markers
# and other directories don't.
#
# Optionally, "first" and "last" can be followed by ":<offset>" where <offset> is an integer.
# This moves the truncation point to the right (positive offset) or to the left (negative offset)
# relative to the marker. Plain "first" and "last" are equivalent to "first:0" and "last:0"
# respectively.
typeset -g POWERLEVEL9K_DIR_TRUNCATE_BEFORE_MARKER=false
# Don't shorten this many last directory segments. They are anchors.
typeset -g POWERLEVEL9K_SHORTEN_DIR_LENGTH=1
# Shorten directory if it's longer than this even if there is space for it. The value can
# be either absolute (e.g., '80') or a percentage of terminal width (e.g, '50%'). If empty,
# directory will be shortened only when prompt doesn't fit or when other parameters demand it
# (see POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS and POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT below).
# If set to `0`, directory will always be shortened to its minimum length.
typeset -g POWERLEVEL9K_DIR_MAX_LENGTH=80
# When `dir` segment is on the last prompt line, try to shorten it enough to leave at least this
# many columns for typing commands.
typeset -g POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS=40
# When `dir` segment is on the last prompt line, try to shorten it enough to leave at least
# COLUMNS * POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT * 0.01 columns for typing commands.
typeset -g POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT=50
# If set to true, embed a hyperlink into the directory. Useful for quickly
# opening a directory in the file manager simply by clicking the link.
# Can also be handy when the directory is shortened, as it allows you to see
# the full directory that was used in previous commands.
typeset -g POWERLEVEL9K_DIR_HYPERLINK=false
# Enable special styling for non-writable and non-existent directories. See POWERLEVEL9K_LOCK_ICON
# and POWERLEVEL9K_DIR_CLASSES below.
typeset -g POWERLEVEL9K_DIR_SHOW_WRITABLE=v3
# The default icon shown next to non-writable and non-existent directories when
# POWERLEVEL9K_DIR_SHOW_WRITABLE is set to v3.
# typeset -g POWERLEVEL9K_LOCK_ICON='⭐'
# POWERLEVEL9K_DIR_CLASSES allows you to specify custom icons and colors for different
# directories. It must be an array with 3 * N elements. Each triplet consists of:
#
# 1. A pattern against which the current directory ($PWD) is matched. Matching is done with
# extended_glob option enabled.
# 2. Directory class for the purpose of styling.
# 3. An empty string.
#
# Triplets are tried in order. The first triplet whose pattern matches $PWD wins.
#
# If POWERLEVEL9K_DIR_SHOW_WRITABLE is set to v3, non-writable and non-existent directories
# acquire class suffix _NOT_WRITABLE and NON_EXISTENT respectively.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_DIR_CLASSES=(
# '~/work(|/*)' WORK ''
# '~(|/*)' HOME ''
# '*' DEFAULT '')
#
# Whenever the current directory is ~/work or a subdirectory of ~/work, it gets styled with one
# of the following classes depending on its writability and existence: WORK, WORK_NOT_WRITABLE or
# WORK_NON_EXISTENT.
#
# Simply assigning classes to directories doesn't have any visible effects. It merely gives you an
# option to define custom colors and icons for different directory classes.
#
# # Styling for WORK.
# typeset -g POWERLEVEL9K_DIR_WORK_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_DIR_WORK_FOREGROUND=31
# typeset -g POWERLEVEL9K_DIR_WORK_SHORTENED_FOREGROUND=103
# typeset -g POWERLEVEL9K_DIR_WORK_ANCHOR_FOREGROUND=39
#
# # Styling for WORK_NOT_WRITABLE.
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_FOREGROUND=31
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_SHORTENED_FOREGROUND=103
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_ANCHOR_FOREGROUND=39
#
# # Styling for WORK_NON_EXISTENT.
# typeset -g POWERLEVEL9K_DIR_WORK_NON_EXISTENT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_DIR_WORK_NON_EXISTENT_FOREGROUND=31
# typeset -g POWERLEVEL9K_DIR_WORK_NON_EXISTENT_SHORTENED_FOREGROUND=103
# typeset -g POWERLEVEL9K_DIR_WORK_NON_EXISTENT_ANCHOR_FOREGROUND=39
#
# If a styling parameter isn't explicitly defined for some class, it falls back to the classless
# parameter. For example, if POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_FOREGROUND is not set, it falls
# back to POWERLEVEL9K_DIR_FOREGROUND.
#
typeset -g POWERLEVEL9K_DIR_CLASSES=()
# Custom prefix.
# typeset -g POWERLEVEL9K_DIR_PREFIX='%fin '
#####################################[ vcs: git status ]######################################
# Branch icon. Set this parameter to '\uF126 ' for the popular Powerline branch icon.
typeset -g POWERLEVEL9K_VCS_BRANCH_ICON=
# Untracked files icon. It's really a question mark, your font isn't broken.
# Change the value of this parameter to show a different icon.
typeset -g POWERLEVEL9K_VCS_UNTRACKED_ICON='?'
# Formatter for Git status.
#
# Example output: master wip ⇣42⇡42 *42 merge ~42 +42 !42 ?42.
#
# You can edit the function to customize how Git status looks.
#
# VCS_STATUS_* parameters are set by gitstatus plugin. See reference:
# https://github.com/romkatv/gitstatus/blob/master/gitstatus.plugin.zsh.
function my_git_formatter() {
emulate -L zsh
if [[ -n $P9K_CONTENT ]]; then
# If P9K_CONTENT is not empty, use it. It's either "loading" or from vcs_info (not from
# gitstatus plugin). VCS_STATUS_* parameters are not available in this case.
typeset -g my_git_format=$P9K_CONTENT
return
fi
if (( $1 )); then
# Styling for up-to-date Git status.
local meta='%f' # default foreground
local clean='%76F' # green foreground
local modified='%178F' # yellow foreground
local untracked='%39F' # blue foreground
local conflicted='%196F' # red foreground
else
# Styling for incomplete and stale Git status.
local meta='%244F' # grey foreground
local clean='%244F' # grey foreground
local modified='%244F' # grey foreground
local untracked='%244F' # grey foreground
local conflicted='%244F' # grey foreground
fi
local res
if [[ -n $VCS_STATUS_LOCAL_BRANCH ]]; then
local branch=${(V)VCS_STATUS_LOCAL_BRANCH}
# If local branch name is at most 32 characters long, show it in full.
# Otherwise show the first 12 … the last 12.
# Tip: To always show local branch name in full without truncation, delete the next line.
(( $#branch > 32 )) && branch[13,-13]="…" # <-- this line
res+="${clean}${(g::)POWERLEVEL9K_VCS_BRANCH_ICON}${branch//\%/%%}"
fi
if [[ -n $VCS_STATUS_TAG
# Show tag only if not on a branch.
# Tip: To always show tag, delete the next line.
&& -z $VCS_STATUS_LOCAL_BRANCH # <-- this line
]]; then
local tag=${(V)VCS_STATUS_TAG}
# If tag name is at most 32 characters long, show it in full.
# Otherwise show the first 12 … the last 12.
# Tip: To always show tag name in full without truncation, delete the next line.
(( $#tag > 32 )) && tag[13,-13]="…" # <-- this line
res+="${meta}#${clean}${tag//\%/%%}"
fi
# Display the current Git commit if there is no branch and no tag.
# Tip: To always display the current Git commit, delete the next line.
[[ -z $VCS_STATUS_LOCAL_BRANCH && -z $VCS_STATUS_TAG ]] && # <-- this line
res+="${meta}@${clean}${VCS_STATUS_COMMIT[1,8]}"
# Show tracking branch name if it differs from local branch.
if [[ -n ${VCS_STATUS_REMOTE_BRANCH:#$VCS_STATUS_LOCAL_BRANCH} ]]; then
res+="${meta}:${clean}${(V)VCS_STATUS_REMOTE_BRANCH//\%/%%}"
fi
# Display "wip" if the latest commit's summary contains "wip" or "WIP".
if [[ $VCS_STATUS_COMMIT_SUMMARY == (|*[^[:alnum:]])(wip|WIP)(|[^[:alnum:]]*) ]]; then
res+=" ${modified}wip"
fi
# ⇣42 if behind the remote.
(( VCS_STATUS_COMMITS_BEHIND )) && res+=" ${clean}⇣${VCS_STATUS_COMMITS_BEHIND}"
# ⇡42 if ahead of the remote; no leading space if also behind the remote: ⇣42⇡42.
(( VCS_STATUS_COMMITS_AHEAD && !VCS_STATUS_COMMITS_BEHIND )) && res+=" "
(( VCS_STATUS_COMMITS_AHEAD )) && res+="${clean}⇡${VCS_STATUS_COMMITS_AHEAD}"
# ⇠42 if behind the push remote.
(( VCS_STATUS_PUSH_COMMITS_BEHIND )) && res+=" ${clean}⇠${VCS_STATUS_PUSH_COMMITS_BEHIND}"
(( VCS_STATUS_PUSH_COMMITS_AHEAD && !VCS_STATUS_PUSH_COMMITS_BEHIND )) && res+=" "
# ⇢42 if ahead of the push remote; no leading space if also behind: ⇠42⇢42.
(( VCS_STATUS_PUSH_COMMITS_AHEAD )) && res+="${clean}⇢${VCS_STATUS_PUSH_COMMITS_AHEAD}"
# *42 if have stashes.
(( VCS_STATUS_STASHES )) && res+=" ${clean}*${VCS_STATUS_STASHES}"
# 'merge' if the repo is in an unusual state.
[[ -n $VCS_STATUS_ACTION ]] && res+=" ${conflicted}${VCS_STATUS_ACTION}"
# ~42 if have merge conflicts.
(( VCS_STATUS_NUM_CONFLICTED )) && res+=" ${conflicted}~${VCS_STATUS_NUM_CONFLICTED}"
# +42 if have staged changes.
(( VCS_STATUS_NUM_STAGED )) && res+=" ${modified}+${VCS_STATUS_NUM_STAGED}"
# !42 if have unstaged changes.
(( VCS_STATUS_NUM_UNSTAGED )) && res+=" ${modified}!${VCS_STATUS_NUM_UNSTAGED}"
# ?42 if have untracked files. It's really a question mark, your font isn't broken.
# See POWERLEVEL9K_VCS_UNTRACKED_ICON above if you want to use a different icon.
# Remove the next line if you don't want to see untracked files at all.
(( VCS_STATUS_NUM_UNTRACKED )) && res+=" ${untracked}${(g::)POWERLEVEL9K_VCS_UNTRACKED_ICON}${VCS_STATUS_NUM_UNTRACKED}"
# "─" if the number of unstaged files is unknown. This can happen due to
# POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY (see below) being set to a non-negative number lower
# than the number of files in the Git index, or due to bash.showDirtyState being set to false
# in the repository config. The number of staged and untracked files may also be unknown
# in this case.
(( VCS_STATUS_HAS_UNSTAGED == -1 )) && res+=" ${modified}─"
typeset -g my_git_format=$res
}
functions -M my_git_formatter 2>/dev/null
# Don't count the number of unstaged, untracked and conflicted files in Git repositories with
# more than this many files in the index. Negative value means infinity.
#
# If you are working in Git repositories with tens of millions of files and seeing performance
# sagging, try setting POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY to a number lower than the output
# of `git ls-files | wc -l`. Alternatively, add `bash.showDirtyState = false` to the repository's
# config: `git config bash.showDirtyState false`.
typeset -g POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY=-1
# Don't show Git status in prompt for repositories whose workdir matches this pattern.
# For example, if set to '~', the Git repository at $HOME/.git will be ignored.
# Multiple patterns can be combined with '|': '~(|/foo)|/bar/baz/*'.
typeset -g POWERLEVEL9K_VCS_DISABLED_WORKDIR_PATTERN='~'
# Disable the default Git status formatting.
typeset -g POWERLEVEL9K_VCS_DISABLE_GITSTATUS_FORMATTING=true
# Install our own Git status formatter.
typeset -g POWERLEVEL9K_VCS_CONTENT_EXPANSION='${$((my_git_formatter(1)))+${my_git_format}}'
typeset -g POWERLEVEL9K_VCS_LOADING_CONTENT_EXPANSION='${$((my_git_formatter(0)))+${my_git_format}}'
# Enable counters for staged, unstaged, etc.
typeset -g POWERLEVEL9K_VCS_{STAGED,UNSTAGED,UNTRACKED,CONFLICTED,COMMITS_AHEAD,COMMITS_BEHIND}_MAX_NUM=-1
# Icon color.
typeset -g POWERLEVEL9K_VCS_VISUAL_IDENTIFIER_COLOR=76
typeset -g POWERLEVEL9K_VCS_LOADING_VISUAL_IDENTIFIER_COLOR=244
# Custom icon.
typeset -g POWERLEVEL9K_VCS_VISUAL_IDENTIFIER_EXPANSION=
# Custom prefix.
# typeset -g POWERLEVEL9K_VCS_PREFIX='%fon '
# Show status of repositories of these types. You can add svn and/or hg if you are
# using them. If you do, your prompt may become slow even when your current directory
# isn't in an svn or hg reposotiry.
typeset -g POWERLEVEL9K_VCS_BACKENDS=(git)
# These settings are used for repositories other than Git or when gitstatusd fails and
# Powerlevel10k has to fall back to using vcs_info.
typeset -g POWERLEVEL9K_VCS_CLEAN_FOREGROUND=76
typeset -g POWERLEVEL9K_VCS_UNTRACKED_FOREGROUND=76
typeset -g POWERLEVEL9K_VCS_MODIFIED_FOREGROUND=178
##########################[ status: exit code of the last command ]###########################
# Enable OK_PIPE, ERROR_PIPE and ERROR_SIGNAL status states to allow us to enable, disable and
# style them independently from the regular OK and ERROR state.
typeset -g POWERLEVEL9K_STATUS_EXTENDED_STATES=true
# Status on success. No content, just an icon. No need to show it if prompt_char is enabled as
# it will signify success by turning green.
typeset -g POWERLEVEL9K_STATUS_OK=false
typeset -g POWERLEVEL9K_STATUS_OK_FOREGROUND=70
typeset -g POWERLEVEL9K_STATUS_OK_VISUAL_IDENTIFIER_EXPANSION='✔'
# Status when some part of a pipe command fails but the overall exit status is zero. It may look
# like this: 1|0.
typeset -g POWERLEVEL9K_STATUS_OK_PIPE=true
typeset -g POWERLEVEL9K_STATUS_OK_PIPE_FOREGROUND=70
typeset -g POWERLEVEL9K_STATUS_OK_PIPE_VISUAL_IDENTIFIER_EXPANSION='✔'
# Status when it's just an error code (e.g., '1'). No need to show it if prompt_char is enabled as
# it will signify error by turning red.
typeset -g POWERLEVEL9K_STATUS_ERROR=false
typeset -g POWERLEVEL9K_STATUS_ERROR_FOREGROUND=160
typeset -g POWERLEVEL9K_STATUS_ERROR_VISUAL_IDENTIFIER_EXPANSION='✘'
# Status when the last command was terminated by a signal.
typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL=true
typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL_FOREGROUND=160
# Use terse signal names: "INT" instead of "SIGINT(2)".
typeset -g POWERLEVEL9K_STATUS_VERBOSE_SIGNAME=false
typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL_VISUAL_IDENTIFIER_EXPANSION='✘'
# Status when some part of a pipe command fails and the overall exit status is also non-zero.
# It may look like this: 1|0.
typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE=true
typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE_FOREGROUND=160
typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE_VISUAL_IDENTIFIER_EXPANSION='✘'
###################[ command_execution_time: duration of the last command ]###################
# Show duration of the last command if takes at least this many seconds.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_THRESHOLD=3
# Show this many fractional digits. Zero means round to seconds.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_PRECISION=0
# Execution time color.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_FOREGROUND=101
# Duration format: 1d 2h 3m 4s.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_FORMAT='d h m s'
# Custom icon.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_VISUAL_IDENTIFIER_EXPANSION=
# Custom prefix.
# typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_PREFIX='%ftook '
#######################[ background_jobs: presence of background jobs ]#######################
# Don't show the number of background jobs.
typeset -g POWERLEVEL9K_BACKGROUND_JOBS_VERBOSE=false
# Background jobs color.
typeset -g POWERLEVEL9K_BACKGROUND_JOBS_FOREGROUND=70
# Custom icon.
# typeset -g POWERLEVEL9K_BACKGROUND_JOBS_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######################[ direnv: direnv status (https://direnv.net/) ]########################
# Direnv color.
typeset -g POWERLEVEL9K_DIRENV_FOREGROUND=178
# Custom icon.
# typeset -g POWERLEVEL9K_DIRENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
###############[ asdf: asdf version manager (https://github.com/asdf-vm/asdf) ]###############
# Default asdf color. Only used to display tools for which there is no color override (see below).
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_FOREGROUND.
typeset -g POWERLEVEL9K_ASDF_FOREGROUND=66
# There are four parameters that can be used to hide asdf tools. Each parameter describes
# conditions under which a tool gets hidden. Parameters can hide tools but not unhide them. If at
# least one parameter decides to hide a tool, that tool gets hidden. If no parameter decides to
# hide a tool, it gets shown.
#
# Special note on the difference between POWERLEVEL9K_ASDF_SOURCES and
# POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW. Consider the effect of the following commands:
#
# asdf local python 3.8.1
# asdf global python 3.8.1
#
# After running both commands the current python version is 3.8.1 and its source is "local" as
# it takes precedence over "global". If POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW is set to false,
# it'll hide python version in this case because 3.8.1 is the same as the global version.
# POWERLEVEL9K_ASDF_SOURCES will hide python version only if the value of this parameter doesn't
# contain "local".
# Hide tool versions that don't come from one of these sources.
#
# Available sources:
#
# - shell `asdf current` says "set by ASDF_${TOOL}_VERSION environment variable"
# - local `asdf current` says "set by /some/not/home/directory/file"
# - global `asdf current` says "set by /home/username/file"
#
# Note: If this parameter is set to (shell local global), it won't hide tools.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_SOURCES.
typeset -g POWERLEVEL9K_ASDF_SOURCES=(shell local global)
# If set to false, hide tool versions that are the same as global.
#
# Note: The name of this parameter doesn't reflect its meaning at all.
# Note: If this parameter is set to true, it won't hide tools.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_PROMPT_ALWAYS_SHOW.
typeset -g POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW=false
# If set to false, hide tool versions that are equal to "system".
#
# Note: If this parameter is set to true, it won't hide tools.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_SHOW_SYSTEM.
typeset -g POWERLEVEL9K_ASDF_SHOW_SYSTEM=true
# If set to non-empty value, hide tools unless there is a file matching the specified file pattern
# in the current directory, or its parent directory, or its grandparent directory, and so on.
#
# Note: If this parameter is set to empty value, it won't hide tools.
# Note: SHOW_ON_UPGLOB isn't specific to asdf. It works with all prompt segments.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_SHOW_ON_UPGLOB.
#
# Example: Hide nodejs version when there is no package.json and no *.js files in the current
# directory, in `..`, in `../..` and so on.
#
# typeset -g POWERLEVEL9K_ASDF_NODEJS_SHOW_ON_UPGLOB='*.js|package.json'
typeset -g POWERLEVEL9K_ASDF_SHOW_ON_UPGLOB=
# Ruby version from asdf.
typeset -g POWERLEVEL9K_ASDF_RUBY_FOREGROUND=168
# typeset -g POWERLEVEL9K_ASDF_RUBY_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_RUBY_SHOW_ON_UPGLOB='*.foo|*.bar'
# Python version from asdf.
typeset -g POWERLEVEL9K_ASDF_PYTHON_FOREGROUND=37
# typeset -g POWERLEVEL9K_ASDF_PYTHON_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_PYTHON_SHOW_ON_UPGLOB='*.foo|*.bar'
# Go version from asdf.
typeset -g POWERLEVEL9K_ASDF_GOLANG_FOREGROUND=37
# typeset -g POWERLEVEL9K_ASDF_GOLANG_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_GOLANG_SHOW_ON_UPGLOB='*.foo|*.bar'
# Node.js version from asdf.
typeset -g POWERLEVEL9K_ASDF_NODEJS_FOREGROUND=70
# typeset -g POWERLEVEL9K_ASDF_NODEJS_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_NODEJS_SHOW_ON_UPGLOB='*.foo|*.bar'
# Rust version from asdf.
typeset -g POWERLEVEL9K_ASDF_RUST_FOREGROUND=37
# typeset -g POWERLEVEL9K_ASDF_RUST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_RUST_SHOW_ON_UPGLOB='*.foo|*.bar'
# .NET Core version from asdf.
typeset -g POWERLEVEL9K_ASDF_DOTNET_CORE_FOREGROUND=134
# typeset -g POWERLEVEL9K_ASDF_DOTNET_CORE_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_DOTNET_SHOW_ON_UPGLOB='*.foo|*.bar'
# Flutter version from asdf.
typeset -g POWERLEVEL9K_ASDF_FLUTTER_FOREGROUND=38
# typeset -g POWERLEVEL9K_ASDF_FLUTTER_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_FLUTTER_SHOW_ON_UPGLOB='*.foo|*.bar'
# Lua version from asdf.
typeset -g POWERLEVEL9K_ASDF_LUA_FOREGROUND=32
# typeset -g POWERLEVEL9K_ASDF_LUA_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_LUA_SHOW_ON_UPGLOB='*.foo|*.bar'
# Java version from asdf.
typeset -g POWERLEVEL9K_ASDF_JAVA_FOREGROUND=32
# typeset -g POWERLEVEL9K_ASDF_JAVA_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_JAVA_SHOW_ON_UPGLOB='*.foo|*.bar'
# Perl version from asdf.
typeset -g POWERLEVEL9K_ASDF_PERL_FOREGROUND=67
# typeset -g POWERLEVEL9K_ASDF_PERL_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_PERL_SHOW_ON_UPGLOB='*.foo|*.bar'
# Erlang version from asdf.
typeset -g POWERLEVEL9K_ASDF_ERLANG_FOREGROUND=125
# typeset -g POWERLEVEL9K_ASDF_ERLANG_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_ERLANG_SHOW_ON_UPGLOB='*.foo|*.bar'
# Elixir version from asdf.
typeset -g POWERLEVEL9K_ASDF_ELIXIR_FOREGROUND=129
# typeset -g POWERLEVEL9K_ASDF_ELIXIR_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_ELIXIR_SHOW_ON_UPGLOB='*.foo|*.bar'
# Postgres version from asdf.
typeset -g POWERLEVEL9K_ASDF_POSTGRES_FOREGROUND=31
# typeset -g POWERLEVEL9K_ASDF_POSTGRES_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_POSTGRES_SHOW_ON_UPGLOB='*.foo|*.bar'
# PHP version from asdf.
typeset -g POWERLEVEL9K_ASDF_PHP_FOREGROUND=99
# typeset -g POWERLEVEL9K_ASDF_PHP_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_PHP_SHOW_ON_UPGLOB='*.foo|*.bar'
# Haskell version from asdf.
typeset -g POWERLEVEL9K_ASDF_HASKELL_FOREGROUND=172
# typeset -g POWERLEVEL9K_ASDF_HASKELL_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_HASKELL_SHOW_ON_UPGLOB='*.foo|*.bar'
# Julia version from asdf.
typeset -g POWERLEVEL9K_ASDF_JULIA_FOREGROUND=70
# typeset -g POWERLEVEL9K_ASDF_JULIA_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_JULIA_SHOW_ON_UPGLOB='*.foo|*.bar'
##########[ nordvpn: nordvpn connection status, linux only (https://nordvpn.com/) ]###########
# NordVPN connection indicator color.
typeset -g POWERLEVEL9K_NORDVPN_FOREGROUND=39
# Hide NordVPN connection indicator when not connected.
typeset -g POWERLEVEL9K_NORDVPN_{DISCONNECTED,CONNECTING,DISCONNECTING}_CONTENT_EXPANSION=
typeset -g POWERLEVEL9K_NORDVPN_{DISCONNECTED,CONNECTING,DISCONNECTING}_VISUAL_IDENTIFIER_EXPANSION=
# Custom icon.
# typeset -g POWERLEVEL9K_NORDVPN_VISUAL_IDENTIFIER_EXPANSION='⭐'
#################[ ranger: ranger shell (https://github.com/ranger/ranger) ]##################
# Ranger shell color.
typeset -g POWERLEVEL9K_RANGER_FOREGROUND=178
# Custom icon.
# typeset -g POWERLEVEL9K_RANGER_VISUAL_IDENTIFIER_EXPANSION='⭐'
######################[ nnn: nnn shell (https://github.com/jarun/nnn) ]#######################
# Nnn shell color.
typeset -g POWERLEVEL9K_NNN_FOREGROUND=72
# Custom icon.
# typeset -g POWERLEVEL9K_NNN_VISUAL_IDENTIFIER_EXPANSION='⭐'
##################[ xplr: xplr shell (https://github.com/sayanarijit/xplr) ]##################
# xplr shell color.
typeset -g POWERLEVEL9K_XPLR_FOREGROUND=72
# Custom icon.
# typeset -g POWERLEVEL9K_XPLR_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########################[ vim_shell: vim shell indicator (:sh) ]###########################
# Vim shell indicator color.
typeset -g POWERLEVEL9K_VIM_SHELL_FOREGROUND=34
# Custom icon.
# typeset -g POWERLEVEL9K_VIM_SHELL_VISUAL_IDENTIFIER_EXPANSION='⭐'
######[ midnight_commander: midnight commander shell (https://midnight-commander.org/) ]######
# Midnight Commander shell color.
typeset -g POWERLEVEL9K_MIDNIGHT_COMMANDER_FOREGROUND=178
# Custom icon.
# typeset -g POWERLEVEL9K_MIDNIGHT_COMMANDER_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ nix_shell: nix shell (https://nixos.org/nixos/nix-pills/developing-with-nix-shell.html) ]##
# Nix shell color.
typeset -g POWERLEVEL9K_NIX_SHELL_FOREGROUND=74
# Tip: If you want to see just the icon without "pure" and "impure", uncomment the next line.
# typeset -g POWERLEVEL9K_NIX_SHELL_CONTENT_EXPANSION=
# Custom icon.
# typeset -g POWERLEVEL9K_NIX_SHELL_VISUAL_IDENTIFIER_EXPANSION='⭐'
##################################[ disk_usage: disk usage ]##################################
# Colors for different levels of disk usage.
typeset -g POWERLEVEL9K_DISK_USAGE_NORMAL_FOREGROUND=35
typeset -g POWERLEVEL9K_DISK_USAGE_WARNING_FOREGROUND=220
typeset -g POWERLEVEL9K_DISK_USAGE_CRITICAL_FOREGROUND=160
# Thresholds for different levels of disk usage (percentage points).
typeset -g POWERLEVEL9K_DISK_USAGE_WARNING_LEVEL=90
typeset -g POWERLEVEL9K_DISK_USAGE_CRITICAL_LEVEL=95
# If set to true, hide disk usage when below $POWERLEVEL9K_DISK_USAGE_WARNING_LEVEL percent.
typeset -g POWERLEVEL9K_DISK_USAGE_ONLY_WARNING=false
# Custom icon.
# typeset -g POWERLEVEL9K_DISK_USAGE_VISUAL_IDENTIFIER_EXPANSION='⭐'
######################################[ ram: free RAM ]#######################################
# RAM color.
typeset -g POWERLEVEL9K_RAM_FOREGROUND=66
# Custom icon.
# typeset -g POWERLEVEL9K_RAM_VISUAL_IDENTIFIER_EXPANSION='⭐'
#####################################[ swap: used swap ]######################################
# Swap color.
typeset -g POWERLEVEL9K_SWAP_FOREGROUND=96
# Custom icon.
# typeset -g POWERLEVEL9K_SWAP_VISUAL_IDENTIFIER_EXPANSION='⭐'
######################################[ load: CPU load ]######################################
# Show average CPU load over this many last minutes. Valid values are 1, 5 and 15.
typeset -g POWERLEVEL9K_LOAD_WHICH=5
# Load color when load is under 50%.
typeset -g POWERLEVEL9K_LOAD_NORMAL_FOREGROUND=66
# Load color when load is between 50% and 70%.
typeset -g POWERLEVEL9K_LOAD_WARNING_FOREGROUND=178
# Load color when load is over 70%.
typeset -g POWERLEVEL9K_LOAD_CRITICAL_FOREGROUND=166
# Custom icon.
# typeset -g POWERLEVEL9K_LOAD_VISUAL_IDENTIFIER_EXPANSION='⭐'
################[ todo: todo items (https://github.com/todotxt/todo.txt-cli) ]################
# Todo color.
typeset -g POWERLEVEL9K_TODO_FOREGROUND=110
# Hide todo when the total number of tasks is zero.
typeset -g POWERLEVEL9K_TODO_HIDE_ZERO_TOTAL=true
# Hide todo when the number of tasks after filtering is zero.
typeset -g POWERLEVEL9K_TODO_HIDE_ZERO_FILTERED=false
# Todo format. The following parameters are available within the expansion.
#
# - P9K_TODO_TOTAL_TASK_COUNT The total number of tasks.
# - P9K_TODO_FILTERED_TASK_COUNT The number of tasks after filtering.
#
# These variables correspond to the last line of the output of `todo.sh -p ls`:
#
# TODO: 24 of 42 tasks shown
#
# Here 24 is P9K_TODO_FILTERED_TASK_COUNT and 42 is P9K_TODO_TOTAL_TASK_COUNT.
#
# typeset -g POWERLEVEL9K_TODO_CONTENT_EXPANSION='$P9K_TODO_FILTERED_TASK_COUNT'
# Custom icon.
# typeset -g POWERLEVEL9K_TODO_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ timewarrior: timewarrior tracking status (https://timewarrior.net/) ]############
# Timewarrior color.
typeset -g POWERLEVEL9K_TIMEWARRIOR_FOREGROUND=110
# If the tracked task is longer than 24 characters, truncate and append "…".
# Tip: To always display tasks without truncation, delete the following parameter.
# Tip: To hide task names and display just the icon when time tracking is enabled, set the
# value of the following parameter to "".
typeset -g POWERLEVEL9K_TIMEWARRIOR_CONTENT_EXPANSION='${P9K_CONTENT:0:24}${${P9K_CONTENT:24}:+…}'
# Custom icon.
# typeset -g POWERLEVEL9K_TIMEWARRIOR_VISUAL_IDENTIFIER_EXPANSION='⭐'
##############[ taskwarrior: taskwarrior task count (https://taskwarrior.org/) ]##############
# Taskwarrior color.
typeset -g POWERLEVEL9K_TASKWARRIOR_FOREGROUND=74
# Taskwarrior segment format. The following parameters are available within the expansion.
#
# - P9K_TASKWARRIOR_PENDING_COUNT The number of pending tasks: `task +PENDING count`.
# - P9K_TASKWARRIOR_OVERDUE_COUNT The number of overdue tasks: `task +OVERDUE count`.
#
# Zero values are represented as empty parameters.
#
# The default format:
#
# '${P9K_TASKWARRIOR_OVERDUE_COUNT:+"!$P9K_TASKWARRIOR_OVERDUE_COUNT/"}$P9K_TASKWARRIOR_PENDING_COUNT'
#
# typeset -g POWERLEVEL9K_TASKWARRIOR_CONTENT_EXPANSION='$P9K_TASKWARRIOR_PENDING_COUNT'
# Custom icon.
# typeset -g POWERLEVEL9K_TASKWARRIOR_VISUAL_IDENTIFIER_EXPANSION='⭐'
##################################[ context: user@hostname ]##################################
# Context color when running with privileges.
typeset -g POWERLEVEL9K_CONTEXT_ROOT_FOREGROUND=178
# Context color in SSH without privileges.
typeset -g POWERLEVEL9K_CONTEXT_{REMOTE,REMOTE_SUDO}_FOREGROUND=180
# Default context color (no privileges, no SSH).
typeset -g POWERLEVEL9K_CONTEXT_FOREGROUND=180
# Context format when running with privileges: bold user@hostname.
typeset -g POWERLEVEL9K_CONTEXT_ROOT_TEMPLATE='%B%n@%m'
# Context format when in SSH without privileges: user@hostname.
typeset -g POWERLEVEL9K_CONTEXT_{REMOTE,REMOTE_SUDO}_TEMPLATE='%n@%m'
# Default context format (no privileges, no SSH): user@hostname.
typeset -g POWERLEVEL9K_CONTEXT_TEMPLATE='%n@%m'
# Don't show context unless running with privileges or in SSH.
# Tip: Remove the next line to always show context.
typeset -g POWERLEVEL9K_CONTEXT_{DEFAULT,SUDO}_{CONTENT,VISUAL_IDENTIFIER}_EXPANSION=
# Custom icon.
# typeset -g POWERLEVEL9K_CONTEXT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Custom prefix.
# typeset -g POWERLEVEL9K_CONTEXT_PREFIX='%fwith '
###[ virtualenv: python virtual environment (https://docs.python.org/3/library/venv.html) ]###
# Python virtual environment color.
typeset -g POWERLEVEL9K_VIRTUALENV_FOREGROUND=37
# Don't show Python version next to the virtual environment name.
typeset -g POWERLEVEL9K_VIRTUALENV_SHOW_PYTHON_VERSION=false
# If set to "false", won't show virtualenv if pyenv is already shown.
# If set to "if-different", won't show virtualenv if it's the same as pyenv.
typeset -g POWERLEVEL9K_VIRTUALENV_SHOW_WITH_PYENV=false
# Separate environment name from Python version only with a space.
typeset -g POWERLEVEL9K_VIRTUALENV_{LEFT,RIGHT}_DELIMITER=
# Custom icon.
# typeset -g POWERLEVEL9K_VIRTUALENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
#####################[ anaconda: conda environment (https://conda.io/) ]######################
# Anaconda environment color.
typeset -g POWERLEVEL9K_ANACONDA_FOREGROUND=37
# Anaconda segment format. The following parameters are available within the expansion.
#
# - CONDA_PREFIX Absolute path to the active Anaconda/Miniconda environment.
# - CONDA_DEFAULT_ENV Name of the active Anaconda/Miniconda environment.
# - CONDA_PROMPT_MODIFIER Configurable prompt modifier (see below).
# - P9K_ANACONDA_PYTHON_VERSION Current python version (python --version).
#
# CONDA_PROMPT_MODIFIER can be configured with the following command:
#
# conda config --set env_prompt '({default_env}) '
#
# The last argument is a Python format string that can use the following variables:
#
# - prefix The same as CONDA_PREFIX.
# - default_env The same as CONDA_DEFAULT_ENV.
# - name The last segment of CONDA_PREFIX.
# - stacked_env Comma-separated list of names in the environment stack. The first element is
# always the same as default_env.
#
# Note: '({default_env}) ' is the default value of env_prompt.
#
# The default value of POWERLEVEL9K_ANACONDA_CONTENT_EXPANSION expands to $CONDA_PROMPT_MODIFIER
# without the surrounding parentheses, or to the last path component of CONDA_PREFIX if the former
# is empty.
typeset -g POWERLEVEL9K_ANACONDA_CONTENT_EXPANSION='${${${${CONDA_PROMPT_MODIFIER#\(}% }%\)}:-${CONDA_PREFIX:t}}'
# Custom icon.
# typeset -g POWERLEVEL9K_ANACONDA_VISUAL_IDENTIFIER_EXPANSION='⭐'
################[ pyenv: python environment (https://github.com/pyenv/pyenv) ]################
# Pyenv color.
typeset -g POWERLEVEL9K_PYENV_FOREGROUND=37
# Hide python version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_PYENV_SOURCES=(shell local global)
# If set to false, hide python version if it's the same as global:
# $(pyenv version-name) == $(pyenv global).
typeset -g POWERLEVEL9K_PYENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide python version if it's equal to "system".
typeset -g POWERLEVEL9K_PYENV_SHOW_SYSTEM=true
# Pyenv segment format. The following parameters are available within the expansion.
#
# - P9K_CONTENT Current pyenv environment (pyenv version-name).
# - P9K_PYENV_PYTHON_VERSION Current python version (python --version).
#
# The default format has the following logic:
#
# 1. Display just "$P9K_CONTENT" if it's equal to "$P9K_PYENV_PYTHON_VERSION" or
# starts with "$P9K_PYENV_PYTHON_VERSION/".
# 2. Otherwise display "$P9K_CONTENT $P9K_PYENV_PYTHON_VERSION".
typeset -g POWERLEVEL9K_PYENV_CONTENT_EXPANSION='${P9K_CONTENT}${${P9K_CONTENT:#$P9K_PYENV_PYTHON_VERSION(|/*)}:+ $P9K_PYENV_PYTHON_VERSION}'
# Custom icon.
# typeset -g POWERLEVEL9K_PYENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
################[ goenv: go environment (https://github.com/syndbg/goenv) ]################
# Goenv color.
typeset -g POWERLEVEL9K_GOENV_FOREGROUND=37
# Hide go version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_GOENV_SOURCES=(shell local global)
# If set to false, hide go version if it's the same as global:
# $(goenv version-name) == $(goenv global).
typeset -g POWERLEVEL9K_GOENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide go version if it's equal to "system".
typeset -g POWERLEVEL9K_GOENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_GOENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ nodenv: node.js version from nodenv (https://github.com/nodenv/nodenv) ]##########
# Nodenv color.
typeset -g POWERLEVEL9K_NODENV_FOREGROUND=70
# Hide node version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_NODENV_SOURCES=(shell local global)
# If set to false, hide node version if it's the same as global:
# $(nodenv version-name) == $(nodenv global).
typeset -g POWERLEVEL9K_NODENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide node version if it's equal to "system".
typeset -g POWERLEVEL9K_NODENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_NODENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##############[ nvm: node.js version from nvm (https://github.com/nvm-sh/nvm) ]###############
# Nvm color.
typeset -g POWERLEVEL9K_NVM_FOREGROUND=70
# Custom icon.
# typeset -g POWERLEVEL9K_NVM_VISUAL_IDENTIFIER_EXPANSION='⭐'
############[ nodeenv: node.js environment (https://github.com/ekalinin/nodeenv) ]############
# Nodeenv color.
typeset -g POWERLEVEL9K_NODEENV_FOREGROUND=70
# Don't show Node version next to the environment name.
typeset -g POWERLEVEL9K_NODEENV_SHOW_NODE_VERSION=false
# Separate environment name from Node version only with a space.
typeset -g POWERLEVEL9K_NODEENV_{LEFT,RIGHT}_DELIMITER=
# Custom icon.
# typeset -g POWERLEVEL9K_NODEENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##############################[ node_version: node.js version ]###############################
# Node version color.
typeset -g POWERLEVEL9K_NODE_VERSION_FOREGROUND=70
# Show node version only when in a directory tree containing package.json.
typeset -g POWERLEVEL9K_NODE_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_NODE_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######################[ go_version: go version (https://golang.org) ]########################
# Go version color.
typeset -g POWERLEVEL9K_GO_VERSION_FOREGROUND=37
# Show go version only when in a go project subdirectory.
typeset -g POWERLEVEL9K_GO_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_GO_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#################[ rust_version: rustc version (https://www.rust-lang.org) ]##################
# Rust version color.
typeset -g POWERLEVEL9K_RUST_VERSION_FOREGROUND=37
# Show rust version only when in a rust project subdirectory.
typeset -g POWERLEVEL9K_RUST_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_RUST_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
###############[ dotnet_version: .NET version (https://dotnet.microsoft.com) ]################
# .NET version color.
typeset -g POWERLEVEL9K_DOTNET_VERSION_FOREGROUND=134
# Show .NET version only when in a .NET project subdirectory.
typeset -g POWERLEVEL9K_DOTNET_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_DOTNET_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#####################[ php_version: php version (https://www.php.net/) ]######################
# PHP version color.
typeset -g POWERLEVEL9K_PHP_VERSION_FOREGROUND=99
# Show PHP version only when in a PHP project subdirectory.
typeset -g POWERLEVEL9K_PHP_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_PHP_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ laravel_version: laravel php framework version (https://laravel.com/) ]###########
# Laravel version color.
typeset -g POWERLEVEL9K_LARAVEL_VERSION_FOREGROUND=161
# Custom icon.
# typeset -g POWERLEVEL9K_LARAVEL_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
####################[ java_version: java version (https://www.java.com/) ]####################
# Java version color.
typeset -g POWERLEVEL9K_JAVA_VERSION_FOREGROUND=32
# Show java version only when in a java project subdirectory.
typeset -g POWERLEVEL9K_JAVA_VERSION_PROJECT_ONLY=true
# Show brief version.
typeset -g POWERLEVEL9K_JAVA_VERSION_FULL=false
# Custom icon.
# typeset -g POWERLEVEL9K_JAVA_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
###[ package: name@version from package.json (https://docs.npmjs.com/files/package.json) ]####
# Package color.
typeset -g POWERLEVEL9K_PACKAGE_FOREGROUND=117
# Package format. The following parameters are available within the expansion.
#
# - P9K_PACKAGE_NAME The value of `name` field in package.json.
# - P9K_PACKAGE_VERSION The value of `version` field in package.json.
#
# typeset -g POWERLEVEL9K_PACKAGE_CONTENT_EXPANSION='${P9K_PACKAGE_NAME//\%/%%}@${P9K_PACKAGE_VERSION//\%/%%}'
# Custom icon.
# typeset -g POWERLEVEL9K_PACKAGE_VISUAL_IDENTIFIER_EXPANSION='⭐'
#############[ rbenv: ruby version from rbenv (https://github.com/rbenv/rbenv) ]##############
# Rbenv color.
typeset -g POWERLEVEL9K_RBENV_FOREGROUND=168
# Hide ruby version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_RBENV_SOURCES=(shell local global)
# If set to false, hide ruby version if it's the same as global:
# $(rbenv version-name) == $(rbenv global).
typeset -g POWERLEVEL9K_RBENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide ruby version if it's equal to "system".
typeset -g POWERLEVEL9K_RBENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_RBENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######################[ rvm: ruby version from rvm (https://rvm.io) ]########################
# Rvm color.
typeset -g POWERLEVEL9K_RVM_FOREGROUND=168
# Don't show @gemset at the end.
typeset -g POWERLEVEL9K_RVM_SHOW_GEMSET=false
# Don't show ruby- at the front.
typeset -g POWERLEVEL9K_RVM_SHOW_PREFIX=false
# Custom icon.
# typeset -g POWERLEVEL9K_RVM_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ fvm: flutter version management (https://github.com/leoafarias/fvm) ]############
# Fvm color.
typeset -g POWERLEVEL9K_FVM_FOREGROUND=38
# Custom icon.
# typeset -g POWERLEVEL9K_FVM_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ luaenv: lua version from luaenv (https://github.com/cehoffman/luaenv) ]###########
# Lua color.
typeset -g POWERLEVEL9K_LUAENV_FOREGROUND=32
# Hide lua version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_LUAENV_SOURCES=(shell local global)
# If set to false, hide lua version if it's the same as global:
# $(luaenv version-name) == $(luaenv global).
typeset -g POWERLEVEL9K_LUAENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide lua version if it's equal to "system".
typeset -g POWERLEVEL9K_LUAENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_LUAENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
###############[ jenv: java version from jenv (https://github.com/jenv/jenv) ]################
# Java color.
typeset -g POWERLEVEL9K_JENV_FOREGROUND=32
# Hide java version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_JENV_SOURCES=(shell local global)
# If set to false, hide java version if it's the same as global:
# $(jenv version-name) == $(jenv global).
typeset -g POWERLEVEL9K_JENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide java version if it's equal to "system".
typeset -g POWERLEVEL9K_JENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_JENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ plenv: perl version from plenv (https://github.com/tokuhirom/plenv) ]############
# Perl color.
typeset -g POWERLEVEL9K_PLENV_FOREGROUND=67
# Hide perl version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_PLENV_SOURCES=(shell local global)
# If set to false, hide perl version if it's the same as global:
# $(plenv version-name) == $(plenv global).
typeset -g POWERLEVEL9K_PLENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide perl version if it's equal to "system".
typeset -g POWERLEVEL9K_PLENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_PLENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
############[ phpenv: php version from phpenv (https://github.com/phpenv/phpenv) ]############
# PHP color.
typeset -g POWERLEVEL9K_PHPENV_FOREGROUND=99
# Hide php version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_PHPENV_SOURCES=(shell local global)
# If set to false, hide php version if it's the same as global:
# $(phpenv version-name) == $(phpenv global).
typeset -g POWERLEVEL9K_PHPENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide php version if it's equal to "system".
typeset -g POWERLEVEL9K_PHPENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_PHPENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######[ scalaenv: scala version from scalaenv (https://github.com/scalaenv/scalaenv) ]#######
# Scala color.
typeset -g POWERLEVEL9K_SCALAENV_FOREGROUND=160
# Hide scala version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_SCALAENV_SOURCES=(shell local global)
# If set to false, hide scala version if it's the same as global:
# $(scalaenv version-name) == $(scalaenv global).
typeset -g POWERLEVEL9K_SCALAENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide scala version if it's equal to "system".
typeset -g POWERLEVEL9K_SCALAENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_SCALAENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ haskell_stack: haskell version from stack (https://haskellstack.org/) ]###########
# Haskell color.
typeset -g POWERLEVEL9K_HASKELL_STACK_FOREGROUND=172
# Hide haskell version if it doesn't come from one of these sources.
#
# shell: version is set by STACK_YAML
# local: version is set by stack.yaml up the directory tree
# global: version is set by the implicit global project (~/.stack/global-project/stack.yaml)
typeset -g POWERLEVEL9K_HASKELL_STACK_SOURCES=(shell local)
# If set to false, hide haskell version if it's the same as in the implicit global project.
typeset -g POWERLEVEL9K_HASKELL_STACK_ALWAYS_SHOW=true
# Custom icon.
# typeset -g POWERLEVEL9K_HASKELL_STACK_VISUAL_IDENTIFIER_EXPANSION='⭐'
#############[ kubecontext: current kubernetes context (https://kubernetes.io/) ]#############
# Show kubecontext only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show kubecontext.
typeset -g POWERLEVEL9K_KUBECONTEXT_SHOW_ON_COMMAND='kubectl|helm|kubens|kubectx|oc|istioctl|kogito|k9s|helmfile|fluxctl|stern'
# Kubernetes context classes for the purpose of using different colors, icons and expansions with
# different contexts.
#
# POWERLEVEL9K_KUBECONTEXT_CLASSES is an array with even number of elements. The first element
# in each pair defines a pattern against which the current kubernetes context gets matched.
# More specifically, it's P9K_CONTENT prior to the application of context expansion (see below)
# that gets matched. If you unset all POWERLEVEL9K_KUBECONTEXT_*CONTENT_EXPANSION parameters,
# you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_KUBECONTEXT_CLASSES defines the context class. Patterns are tried in order. The
# first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_KUBECONTEXT_CLASSES=(
# '*prod*' PROD
# '*test*' TEST
# '*' DEFAULT)
#
# If your current kubernetes context is "deathray-testing/default", its class is TEST
# because "deathray-testing/default" doesn't match the pattern '*prod*' but does match '*test*'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_FOREGROUND=28
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <'
typeset -g POWERLEVEL9K_KUBECONTEXT_CLASSES=(
# '*prod*' PROD # These values are examples that are unlikely
# '*test*' TEST # to match your needs. Customize them as needed.
'*' DEFAULT)
typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_FOREGROUND=134
# typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Use POWERLEVEL9K_KUBECONTEXT_CONTENT_EXPANSION to specify the content displayed by kubecontext
# segment. Parameter expansions are very flexible and fast, too. See reference:
# http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion.
#
# Within the expansion the following parameters are always available:
#
# - P9K_CONTENT The content that would've been displayed if there was no content
# expansion defined.
# - P9K_KUBECONTEXT_NAME The current context's name. Corresponds to column NAME in the
# output of `kubectl config get-contexts`.
# - P9K_KUBECONTEXT_CLUSTER The current context's cluster. Corresponds to column CLUSTER in the
# output of `kubectl config get-contexts`.
# - P9K_KUBECONTEXT_NAMESPACE The current context's namespace. Corresponds to column NAMESPACE
# in the output of `kubectl config get-contexts`. If there is no
# namespace, the parameter is set to "default".
# - P9K_KUBECONTEXT_USER The current context's user. Corresponds to column AUTHINFO in the
# output of `kubectl config get-contexts`.
#
# If the context points to Google Kubernetes Engine (GKE) or Elastic Kubernetes Service (EKS),
# the following extra parameters are available:
#
# - P9K_KUBECONTEXT_CLOUD_NAME Either "gke" or "eks".
# - P9K_KUBECONTEXT_CLOUD_ACCOUNT Account/project ID.
# - P9K_KUBECONTEXT_CLOUD_ZONE Availability zone.
# - P9K_KUBECONTEXT_CLOUD_CLUSTER Cluster.
#
# P9K_KUBECONTEXT_CLOUD_* parameters are derived from P9K_KUBECONTEXT_CLUSTER. For example,
# if P9K_KUBECONTEXT_CLUSTER is "gke_my-account_us-east1-a_my-cluster-01":
#
# - P9K_KUBECONTEXT_CLOUD_NAME=gke
# - P9K_KUBECONTEXT_CLOUD_ACCOUNT=my-account
# - P9K_KUBECONTEXT_CLOUD_ZONE=us-east1-a
# - P9K_KUBECONTEXT_CLOUD_CLUSTER=my-cluster-01
#
# If P9K_KUBECONTEXT_CLUSTER is "arn:aws:eks:us-east-1:123456789012:cluster/my-cluster-01":
#
# - P9K_KUBECONTEXT_CLOUD_NAME=eks
# - P9K_KUBECONTEXT_CLOUD_ACCOUNT=123456789012
# - P9K_KUBECONTEXT_CLOUD_ZONE=us-east-1
# - P9K_KUBECONTEXT_CLOUD_CLUSTER=my-cluster-01
typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION=
# Show P9K_KUBECONTEXT_CLOUD_CLUSTER if it's not empty and fall back to P9K_KUBECONTEXT_NAME.
POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION+='${P9K_KUBECONTEXT_CLOUD_CLUSTER:-${P9K_KUBECONTEXT_NAME}}'
# Append the current context's namespace if it's not "default".
POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION+='${${:-/$P9K_KUBECONTEXT_NAMESPACE}:#/default}'
# Custom prefix.
# typeset -g POWERLEVEL9K_KUBECONTEXT_PREFIX='%fat '
################[ terraform: terraform workspace (https://www.terraform.io) ]#################
# Don't show terraform workspace if it's literally "default".
typeset -g POWERLEVEL9K_TERRAFORM_SHOW_DEFAULT=false
# POWERLEVEL9K_TERRAFORM_CLASSES is an array with even number of elements. The first element
# in each pair defines a pattern against which the current terraform workspace gets matched.
# More specifically, it's P9K_CONTENT prior to the application of context expansion (see below)
# that gets matched. If you unset all POWERLEVEL9K_TERRAFORM_*CONTENT_EXPANSION parameters,
# you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_TERRAFORM_CLASSES defines the workspace class. Patterns are tried in order. The
# first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_TERRAFORM_CLASSES=(
# '*prod*' PROD
# '*test*' TEST
# '*' OTHER)
#
# If your current terraform workspace is "project_test", its class is TEST because "project_test"
# doesn't match the pattern '*prod*' but does match '*test*'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_TERRAFORM_TEST_FOREGROUND=28
# typeset -g POWERLEVEL9K_TERRAFORM_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_TERRAFORM_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <'
typeset -g POWERLEVEL9K_TERRAFORM_CLASSES=(
# '*prod*' PROD # These values are examples that are unlikely
# '*test*' TEST # to match your needs. Customize them as needed.
'*' OTHER)
typeset -g POWERLEVEL9K_TERRAFORM_OTHER_FOREGROUND=38
# typeset -g POWERLEVEL9K_TERRAFORM_OTHER_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ aws: aws profile (https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-profiles.html) ]#
# Show aws only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show aws.
typeset -g POWERLEVEL9K_AWS_SHOW_ON_COMMAND='aws|awless|terraform|pulumi|terragrunt'
# POWERLEVEL9K_AWS_CLASSES is an array with even number of elements. The first element
# in each pair defines a pattern against which the current AWS profile gets matched.
# More specifically, it's P9K_CONTENT prior to the application of context expansion (see below)
# that gets matched. If you unset all POWERLEVEL9K_AWS_*CONTENT_EXPANSION parameters,
# you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_AWS_CLASSES defines the profile class. Patterns are tried in order. The
# first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_AWS_CLASSES=(
# '*prod*' PROD
# '*test*' TEST
# '*' DEFAULT)
#
# If your current AWS profile is "company_test", its class is TEST
# because "company_test" doesn't match the pattern '*prod*' but does match '*test*'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_AWS_TEST_FOREGROUND=28
# typeset -g POWERLEVEL9K_AWS_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_AWS_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <'
typeset -g POWERLEVEL9K_AWS_CLASSES=(
# '*prod*' PROD # These values are examples that are unlikely
# '*test*' TEST # to match your needs. Customize them as needed.
'*' DEFAULT)
typeset -g POWERLEVEL9K_AWS_DEFAULT_FOREGROUND=208
# typeset -g POWERLEVEL9K_AWS_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# AWS segment format. The following parameters are available within the expansion.
#
# - P9K_AWS_PROFILE The name of the current AWS profile.
# - P9K_AWS_REGION The region associated with the current AWS profile.
typeset -g POWERLEVEL9K_AWS_CONTENT_EXPANSION='${P9K_AWS_PROFILE//\%/%%}${P9K_AWS_REGION:+ ${P9K_AWS_REGION//\%/%%}}'
#[ aws_eb_env: aws elastic beanstalk environment (https://aws.amazon.com/elasticbeanstalk/) ]#
# AWS Elastic Beanstalk environment color.
typeset -g POWERLEVEL9K_AWS_EB_ENV_FOREGROUND=70
# Custom icon.
# typeset -g POWERLEVEL9K_AWS_EB_ENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ azure: azure account name (https://docs.microsoft.com/en-us/cli/azure) ]##########
# Show azure only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show azure.
typeset -g POWERLEVEL9K_AZURE_SHOW_ON_COMMAND='az|terraform|pulumi|terragrunt'
# Azure account name color.
typeset -g POWERLEVEL9K_AZURE_FOREGROUND=32
# Custom icon.
# typeset -g POWERLEVEL9K_AZURE_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ gcloud: google cloud account and project (https://cloud.google.com/) ]###########
# Show gcloud only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show gcloud.
typeset -g POWERLEVEL9K_GCLOUD_SHOW_ON_COMMAND='gcloud|gcs'
# Google cloud color.
typeset -g POWERLEVEL9K_GCLOUD_FOREGROUND=32
# Google cloud format. Change the value of POWERLEVEL9K_GCLOUD_PARTIAL_CONTENT_EXPANSION and/or
# POWERLEVEL9K_GCLOUD_COMPLETE_CONTENT_EXPANSION if the default is too verbose or not informative
# enough. You can use the following parameters in the expansions. Each of them corresponds to the
# output of `gcloud` tool.
#
# Parameter | Source
# -------------------------|--------------------------------------------------------------------
# P9K_GCLOUD_CONFIGURATION | gcloud config configurations list --format='value(name)'
# P9K_GCLOUD_ACCOUNT | gcloud config get-value account
# P9K_GCLOUD_PROJECT_ID | gcloud config get-value project
# P9K_GCLOUD_PROJECT_NAME | gcloud projects describe $P9K_GCLOUD_PROJECT_ID --format='value(name)'
#
# Note: ${VARIABLE//\%/%%} expands to ${VARIABLE} with all occurrences of '%' replaced with '%%'.
#
# Obtaining project name requires sending a request to Google servers. This can take a long time
# and even fail. When project name is unknown, P9K_GCLOUD_PROJECT_NAME is not set and gcloud
# prompt segment is in state PARTIAL. When project name gets known, P9K_GCLOUD_PROJECT_NAME gets
# set and gcloud prompt segment transitions to state COMPLETE.
#
# You can customize the format, icon and colors of gcloud segment separately for states PARTIAL
# and COMPLETE. You can also hide gcloud in state PARTIAL by setting
# POWERLEVEL9K_GCLOUD_PARTIAL_VISUAL_IDENTIFIER_EXPANSION and
# POWERLEVEL9K_GCLOUD_PARTIAL_CONTENT_EXPANSION to empty.
typeset -g POWERLEVEL9K_GCLOUD_PARTIAL_CONTENT_EXPANSION='${P9K_GCLOUD_PROJECT_ID//\%/%%}'
typeset -g POWERLEVEL9K_GCLOUD_COMPLETE_CONTENT_EXPANSION='${P9K_GCLOUD_PROJECT_NAME//\%/%%}'
# Send a request to Google (by means of `gcloud projects describe ...`) to obtain project name
# this often. Negative value disables periodic polling. In this mode project name is retrieved
# only when the current configuration, account or project id changes.
typeset -g POWERLEVEL9K_GCLOUD_REFRESH_PROJECT_NAME_SECONDS=60
# Custom icon.
# typeset -g POWERLEVEL9K_GCLOUD_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ google_app_cred: google application credentials (https://cloud.google.com/docs/authentication/production) ]#
# Show google_app_cred only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show google_app_cred.
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_SHOW_ON_COMMAND='terraform|pulumi|terragrunt'
# Google application credentials classes for the purpose of using different colors, icons and
# expansions with different credentials.
#
# POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES is an array with even number of elements. The first
# element in each pair defines a pattern against which the current kubernetes context gets
# matched. More specifically, it's P9K_CONTENT prior to the application of context expansion
# (see below) that gets matched. If you unset all POWERLEVEL9K_GOOGLE_APP_CRED_*CONTENT_EXPANSION
# parameters, you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES defines the context class. Patterns are tried in order.
# The first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES=(
# '*:*prod*:*' PROD
# '*:*test*:*' TEST
# '*' DEFAULT)
#
# If your current Google application credentials is "service_account deathray-testing x@y.com",
# its class is TEST because it doesn't match the pattern '* *prod* *' but does match '* *test* *'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_FOREGROUND=28
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_CONTENT_EXPANSION='$P9K_GOOGLE_APP_CRED_PROJECT_ID'
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES=(
# '*:*prod*:*' PROD # These values are examples that are unlikely
# '*:*test*:*' TEST # to match your needs. Customize them as needed.
'*' DEFAULT)
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_FOREGROUND=32
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Use POWERLEVEL9K_GOOGLE_APP_CRED_CONTENT_EXPANSION to specify the content displayed by
# google_app_cred segment. Parameter expansions are very flexible and fast, too. See reference:
# http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion.
#
# You can use the following parameters in the expansion. Each of them corresponds to one of the
# fields in the JSON file pointed to by GOOGLE_APPLICATION_CREDENTIALS.
#
# Parameter | JSON key file field
# ---------------------------------+---------------
# P9K_GOOGLE_APP_CRED_TYPE | type
# P9K_GOOGLE_APP_CRED_PROJECT_ID | project_id
# P9K_GOOGLE_APP_CRED_CLIENT_EMAIL | client_email
#
# Note: ${VARIABLE//\%/%%} expands to ${VARIABLE} with all occurrences of '%' replaced by '%%'.
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_CONTENT_EXPANSION='${P9K_GOOGLE_APP_CRED_PROJECT_ID//\%/%%}'
###############################[ public_ip: public IP address ]###############################
# Public IP color.
typeset -g POWERLEVEL9K_PUBLIC_IP_FOREGROUND=94
# Custom icon.
# typeset -g POWERLEVEL9K_PUBLIC_IP_VISUAL_IDENTIFIER_EXPANSION='⭐'
########################[ vpn_ip: virtual private network indicator ]#########################
# VPN IP color.
typeset -g POWERLEVEL9K_VPN_IP_FOREGROUND=81
# When on VPN, show just an icon without the IP address.
# Tip: To display the private IP address when on VPN, remove the next line.
typeset -g POWERLEVEL9K_VPN_IP_CONTENT_EXPANSION=
# Regular expression for the VPN network interface. Run `ifconfig` or `ip -4 a show` while on VPN
# to see the name of the interface.
typeset -g POWERLEVEL9K_VPN_IP_INTERFACE='(gpd|wg|(.*tun)|tailscale)[0-9]*'
# If set to true, show one segment per matching network interface. If set to false, show only
# one segment corresponding to the first matching network interface.
# Tip: If you set it to true, you'll probably want to unset POWERLEVEL9K_VPN_IP_CONTENT_EXPANSION.
typeset -g POWERLEVEL9K_VPN_IP_SHOW_ALL=false
# Custom icon.
# typeset -g POWERLEVEL9K_VPN_IP_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ ip: ip address and bandwidth usage for a specified network interface ]###########
# IP color.
typeset -g POWERLEVEL9K_IP_FOREGROUND=38
# The following parameters are accessible within the expansion:
#
# Parameter | Meaning
# ----------------------+-------------------------------------------
# P9K_IP_IP | IP address
# P9K_IP_INTERFACE | network interface
# P9K_IP_RX_BYTES | total number of bytes received
# P9K_IP_TX_BYTES | total number of bytes sent
# P9K_IP_RX_BYTES_DELTA | number of bytes received since last prompt
# P9K_IP_TX_BYTES_DELTA | number of bytes sent since last prompt
# P9K_IP_RX_RATE | receive rate (since last prompt)
# P9K_IP_TX_RATE | send rate (since last prompt)
typeset -g POWERLEVEL9K_IP_CONTENT_EXPANSION='$P9K_IP_IP${P9K_IP_RX_RATE:+ %70F⇣$P9K_IP_RX_RATE}${P9K_IP_TX_RATE:+ %215F⇡$P9K_IP_TX_RATE}'
# Show information for the first network interface whose name matches this regular expression.
# Run `ifconfig` or `ip -4 a show` to see the names of all network interfaces.
typeset -g POWERLEVEL9K_IP_INTERFACE='[ew].*'
# Custom icon.
# typeset -g POWERLEVEL9K_IP_VISUAL_IDENTIFIER_EXPANSION='⭐'
#########################[ proxy: system-wide http/https/ftp proxy ]##########################
# Proxy color.
typeset -g POWERLEVEL9K_PROXY_FOREGROUND=68
# Custom icon.
# typeset -g POWERLEVEL9K_PROXY_VISUAL_IDENTIFIER_EXPANSION='⭐'
################################[ battery: internal battery ]#################################
# Show battery in red when it's below this level and not connected to power supply.
typeset -g POWERLEVEL9K_BATTERY_LOW_THRESHOLD=20
typeset -g POWERLEVEL9K_BATTERY_LOW_FOREGROUND=160
# Show battery in green when it's charging or fully charged.
typeset -g POWERLEVEL9K_BATTERY_{CHARGING,CHARGED}_FOREGROUND=70
# Show battery in yellow when it's discharging.
typeset -g POWERLEVEL9K_BATTERY_DISCONNECTED_FOREGROUND=178
# Battery pictograms going from low to high level of charge.
typeset -g POWERLEVEL9K_BATTERY_STAGES='\uf58d\uf579\uf57a\uf57b\uf57c\uf57d\uf57e\uf57f\uf580\uf581\uf578'
# Don't show the remaining time to charge/discharge.
typeset -g POWERLEVEL9K_BATTERY_VERBOSE=false
#####################################[ wifi: wifi speed ]#####################################
# WiFi color.
typeset -g POWERLEVEL9K_WIFI_FOREGROUND=68
# Custom icon.
# typeset -g POWERLEVEL9K_WIFI_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Use different colors and icons depending on signal strength ($P9K_WIFI_BARS).
#
# # Wifi colors and icons for different signal strength levels (low to high).
# typeset -g my_wifi_fg=(68 68 68 68 68) # <-- change these values
# typeset -g my_wifi_icon=('WiFi' 'WiFi' 'WiFi' 'WiFi' 'WiFi') # <-- change these values
#
# typeset -g POWERLEVEL9K_WIFI_CONTENT_EXPANSION='%F{${my_wifi_fg[P9K_WIFI_BARS+1]}}$P9K_WIFI_LAST_TX_RATE Mbps'
# typeset -g POWERLEVEL9K_WIFI_VISUAL_IDENTIFIER_EXPANSION='%F{${my_wifi_fg[P9K_WIFI_BARS+1]}}${my_wifi_icon[P9K_WIFI_BARS+1]}'
#
# The following parameters are accessible within the expansions:
#
# Parameter | Meaning
# ----------------------+---------------
# P9K_WIFI_SSID | service set identifier, a.k.a. network name
# P9K_WIFI_LINK_AUTH | authentication protocol such as "wpa2-psk" or "none"; empty if unknown
# P9K_WIFI_LAST_TX_RATE | wireless transmit rate in megabits per second
# P9K_WIFI_RSSI | signal strength in dBm, from -120 to 0
# P9K_WIFI_NOISE | noise in dBm, from -120 to 0
# P9K_WIFI_BARS | signal strength in bars, from 0 to 4 (derived from P9K_WIFI_RSSI and P9K_WIFI_NOISE)
####################################[ time: current time ]####################################
# Current time color.
typeset -g POWERLEVEL9K_TIME_FOREGROUND=66
# Format for the current time: 09:51:02. See `man 3 strftime`.
typeset -g POWERLEVEL9K_TIME_FORMAT='%D{%H:%M:%S}'
# If set to true, time will update when you hit enter. This way prompts for the past
# commands will contain the start times of their commands as opposed to the default
# behavior where they contain the end times of their preceding commands.
typeset -g POWERLEVEL9K_TIME_UPDATE_ON_COMMAND=false
# Custom icon.
typeset -g POWERLEVEL9K_TIME_VISUAL_IDENTIFIER_EXPANSION=
# Custom prefix.
# typeset -g POWERLEVEL9K_TIME_PREFIX='%fat '
# Example of a user-defined prompt segment. Function prompt_example will be called on every
# prompt if `example` prompt segment is added to POWERLEVEL9K_LEFT_PROMPT_ELEMENTS or
# POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS. It displays an icon and orange text greeting the user.
#
# Type `p10k help segment` for documentation and a more sophisticated example.
function prompt_example() {
p10k segment -f 208 -i '⭐' -t 'hello, %n'
}
# User-defined prompt segments may optionally provide an instant_prompt_* function. Its job
# is to generate the prompt segment for display in instant prompt. See
# https://github.com/romkatv/powerlevel10k/blob/master/README.md#instant-prompt.
#
# Powerlevel10k will call instant_prompt_* at the same time as the regular prompt_* function
# and will record all `p10k segment` calls it makes. When displaying instant prompt, Powerlevel10k
# will replay these calls without actually calling instant_prompt_*. It is imperative that
# instant_prompt_* always makes the same `p10k segment` calls regardless of environment. If this
# rule is not observed, the content of instant prompt will be incorrect.
#
# Usually, you should either not define instant_prompt_* or simply call prompt_* from it. If
# instant_prompt_* is not defined for a segment, the segment won't be shown in instant prompt.
function instant_prompt_example() {
# Since prompt_example always makes the same `p10k segment` calls, we can call it from
# instant_prompt_example. This will give us the same `example` prompt segment in the instant
# and regular prompts.
prompt_example
}
# User-defined prompt segments can be customized the same way as built-in segments.
# typeset -g POWERLEVEL9K_EXAMPLE_FOREGROUND=208
# typeset -g POWERLEVEL9K_EXAMPLE_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Transient prompt works similarly to the builtin transient_rprompt option. It trims down prompt
# when accepting a command line. Supported values:
#
# - off: Don't change prompt when accepting a command line.
# - always: Trim down prompt when accepting a command line.
# - same-dir: Trim down prompt when accepting a command line unless this is the first command
# typed after changing current working directory.
typeset -g POWERLEVEL9K_TRANSIENT_PROMPT=off
# Instant prompt mode.
#
# - off: Disable instant prompt. Choose this if you've tried instant prompt and found
# it incompatible with your zsh configuration files.
# - quiet: Enable instant prompt and don't print warnings when detecting console output
# during zsh initialization. Choose this if you've read and understood
# https://github.com/romkatv/powerlevel10k/blob/master/README.md#instant-prompt.
# - verbose: Enable instant prompt and print a warning when detecting console output during
# zsh initialization. Choose this if you've never tried instant prompt, haven't
# seen the warning, or if you are unsure what this all means.
typeset -g POWERLEVEL9K_INSTANT_PROMPT=verbose
# Hot reload allows you to change POWERLEVEL9K options after Powerlevel10k has been initialized.
# For example, you can type POWERLEVEL9K_BACKGROUND=red and see your prompt turn red. Hot reload
# can slow down prompt by 1-2 milliseconds, so it's better to keep it turned off unless you
# really need it.
typeset -g POWERLEVEL9K_DISABLE_HOT_RELOAD=true
# If p10k is already loaded, reload configuration.
# This works even with POWERLEVEL9K_DISABLE_HOT_RELOAD=true.
(( ! $+functions[p10k] )) || p10k reload
}
# Tell `p10k configure` which file it should overwrite.
typeset -g POWERLEVEL9K_CONFIG_FILE=${${(%):-%x}:a}
(( ${#p10k_config_opts} )) && setopt ${p10k_config_opts[@]}
'builtin' 'unset' 'p10k_config_opts'
|
#set -x
# Authors:
# Eduardo Garcia (bidu@lncc.br)
# Diego Volpatto (volpatto@lncc.br)
dirPadrao="dirExp00"
dirNew="dirExp01"
dirExp="$(pwd)"/${1:-${dirPadrao}}
dirExpNew="$(pwd)"/${2:-${dirNew}}
comandoRun="$(pwd)/rodarSimulador.sh $dirNew"
echo $comandoRun
eval $comandoRun
comando="diff $dirExp/disp.1 $dirExpNew/disp.1"
echo $comando
eval $comando
read -p "Correto? s ou n: " checkVar
#echo $checkVar;
if [ $checkVar = "s" ]; then
echo $checkVar
fileName="$(date +%H_%M_%d_%m_%Yfontes)";
echo criando o seguinte diretorio de copia $fileName;
cp -r fontes $fileName;
ls -ltr $fileName;
else
exit
fi
|
<reponame>mouchtaris/jleon
package gv
package isi
package io
import java.nio.channels.{ WritableByteChannel, ReadableByteChannel }
import java.nio.file.{ StandardOpenOption ⇒ opt, Files ⇒ JFiles, Path ⇒ JPath }
trait File extends Any {
@inline
final def exists(path: JPath): Boolean =
JFiles exists path
@inline
final def create(path: JPath): WritableByteChannel =
JFiles newByteChannel (path, opt.CREATE_NEW, opt.WRITE)
@inline
final def open(path: JPath): ReadableByteChannel =
JFiles newByteChannel (path, opt.CREATE, opt.READ)
@inline
final def append(path: JPath): WritableByteChannel =
JFiles newByteChannel (path, opt.CREATE, opt.WRITE, opt.APPEND)
@inline
final def truncate(path: JPath): WritableByteChannel =
JFiles newByteChannel (path, opt.CREATE, opt.WRITE, opt.TRUNCATE_EXISTING)
@inline
final def remove(path: JPath): Unit =
JFiles delete path
}
object File extends File
|
<reponame>gavofyork/RipInPeace<gh_stars>1-10
#pragma once
#include <thread>
#include <vector>
#include <QSystemTrayIcon>
#include <QDialog>
#include <QTime>
#include "DiscInfo.h"
#include "Paranoia.h"
#include "ui_Info.h"
class QAction;
class QTableWidget;
class Settings;
struct cddb_conn_s;
struct cddb_disc_s;
class RIP;
class Progress: public QWidget
{
public:
Progress(RIP* _r);
RIP* rip() const { return m_r; }
private:
void paintEvent(QPaintEvent*);
RIP* m_r;
};
class RIP: public QSystemTrayIcon
{
Q_OBJECT
public:
RIP();
~RIP();
QString directory() const { return QString::fromUtf8(m_path.c_str()); }
QString filename() const { return QString::fromUtf8(m_filename.c_str()); }
QString device() const { return QString::fromUtf8(m_device.c_str()); }
int paranoia() const { return m_paranoia; }
int squeeze() const { return m_squeeze; }
float amountDone() const;
QVector<QPair<float, float> > progressVector() const;
std::vector<std::pair<unsigned, unsigned> > const& progress() const { return m_progress; }
public slots:
void setDirectory(QString _s) { m_path = _s.toUtf8().data(); }
void setFilename(QString _s) { m_filename = _s.toUtf8().data(); }
void setParanoia(int _s) { m_paranoia = _s; }
void setDevice(QString _s) { m_device = _s.toUtf8().data(); }
void setSqueeze(int _s) { m_squeeze = _s; }
void update();
void onConfirm();
void onUnconfirm();
private slots:
void onActivated(QSystemTrayIcon::ActivationReason);
void onAbortRip();
void onAbout();
void onQuit();
void updatePreset(int);
void plantInfo();
void harvestInfo();
private:
void rip();
void getDiscInfo();
void eject();
void tagAll();
void moveAll();
void createPopup();
void showPopup();
void readSettings();
void writeSettings();
virtual void timerEvent(QTimerEvent*);
QFrame* m_popup;
Settings* m_settings;
Ui::Info m_info;
DiscInfo m_di;
std::vector<DiscInfo> m_dis;
DiscIdentity m_id;
std::string m_path;
std::string m_filename;
std::string m_device;
int m_paranoia;
int m_squeeze;
Paranoia m_p;
std::thread* m_ripper;
std::thread* m_identifier;
bool m_ripped;
bool m_identified;
bool m_justRipped;
bool m_confirmed;
QTime m_started;
int m_lastPercentDone;
bool m_startingRip;
std::vector<std::pair<unsigned, unsigned> > m_progress;
std::string m_temp;
QIcon m_inactive;
QTime m_aborting;
Progress* m_progressPie;
QAction* m_abortRip;
QAction* m_unconfirm;
#if !defined(FINAL)
QAction* m_testIcon;
#endif
};
|
var fs = require("fs");
var statInfo = fs.statSync("let03.js");
console.log(statInfo);
var isFile = statInfo.isFile();
console.log("Is File: "+ isFile); // Is File: true
var isDir = statInfo.isDirectory();
console.log("Is Dir: "+ isDir); //Is Dir: false
|
def mostFrequentElement(arr):
max_count = 1;
res = arr[0];
curr_count = 1;
for i in range(1, len(arr)-1):
if arr[i] == arr[i+1]:
curr_count += 1;
else:
if curr_count > max_count:
max_count = curr_count
res = arr[i]
curr_count = 1;
if curr_count > max_count:
max_count = curr_count
res = arr[len(arr)-1]
return res;
arr = [2, 5, 3, 5, 9, 5, 1, 2]
print("Most frequent element is %d"%(mostFrequentElement(arr)))
|
#!/bin/bash
CUR=`pwd`
TOPDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )/.." && pwd )"
HOSTNAME=${1:-open4us.org}
DBNAME=${2:-wordpress}
DBUSER=${3:-dbuser}
DBPASS=${4:-}
cd ${TOPDIR}
#
# Git submodules
#
git submodule init
git submodule update
#
# Process substitutions in .in files
#
subst="s|@env_dir@|${TOPDIR}/python_env|;s|@dbname@|${DBNAME}|;s|@dbuser@|${DBUSER}|;s|@dbpass@|${DBPASS}|"
subst_files="docroot/wp-config-local.php"
for i in ${subst_files}
do
sed -e "${subst}" < "$i.in" > "$i"
done
cd ${CUR}
|
#! /usr/bin/env ruby
# encoding: utf-8
# frozen-string-literal: true
require "fileutils"
require "json"
require "open-uri"
require "yaml"
class Object
def array_enclosed
[self]
end
end
class Array
def array_enclosed
self
end
end
DataInf = Struct.new(:seibetu, :nenrei, :sintyoo, :atai, :nendo, :taizyuu)
def load_config
$config = YAML.load_stream(File.read("config.yaml"))[-1]
$app_id = $config["appId"]
end
def create_data_inf(class_inf, data_inf_value)
class_obj_seibetu = class_inf.find {|class_obj| class_obj["@id"] == "cat01" }
cls_seibetu = class_obj_seibetu["CLASS"].array_enclosed.find {|cls| cls["@code"] == data_inf_value["@cat01"] }
seibetu = cls_seibetu["@name"]
class_obj_nenrei = class_inf.find {|class_obj| class_obj["@id"] == "cat02" }
cls_nenrei = class_obj_nenrei["CLASS"].array_enclosed.find {|cls| cls["@code"] == data_inf_value["@cat02"] }
nenrei = cls_nenrei["@name"][0..-3].to_i
class_obj_sintyoo = class_inf.find {|class_obj| class_obj["@id"] == "cat03" }
cls_sintyoo = class_obj_sintyoo["CLASS"].array_enclosed.find {|cls| cls["@code"] == data_inf_value["@cat03"] }
sintyoo = cls_sintyoo["@name"].sub("(cm)", "").sub("~", "")
if sintyoo == "計"
sintyoo = -1
else
sintyoo = sintyoo.to_i
end
class_obj_atai = class_inf.find {|class_obj| class_obj["@id"] == "cat04" }
cls_atai = class_obj_atai["CLASS"].array_enclosed.find {|cls| cls["@code"] == data_inf_value["@cat04"] }
atai = cls_atai["@name"]
class_obj_time = class_inf.find {|class_obj| class_obj["@id"] == "time" }
cls_time = class_obj_time["CLASS"].array_enclosed.find {|cls| cls["@code"] == data_inf_value["@time"] }
nendo = cls_time["@name"][0..-2].to_i
taizyuu = data_inf_value["$"].to_f
DataInf.new(seibetu, nenrei, sintyoo, atai, nendo, taizyuu)
end
def main_loop(nendo, nenrei, stats_data_id, command)
uri = URI "http://api.e-stat.go.jp/rest/3.0/app/json/getStatsData?appId=#{$app_id}&lang=J&statsDataId=#{stats_data_id}&metaGetFlg=Y&cntGetFlg=N&explanationGetFlg=N&annotationGetFlg=N§ionHeaderFlg=2&cdCat04=0000010&cdTime=#{nendo}100000"
filepath = "#{ENV["TEMP"]}/#{nendo}/#{nenrei}sai-zyosi.json"
unless FileTest.exist?(filepath)
FileUtils.move(open(uri).path, filepath)
end
json = File.read(filepath)
h = JSON.parse(json)["GET_STATS_DATA"]
unless h["RESULT"]["STATUS"] == 0
raise h["RESULT"]["ERROR_MSG"]
end
table_inf = h["STATISTICAL_DATA"]["TABLE_INF"]
table_title = table_inf["TITLE"]["$"]
class_inf = h["STATISTICAL_DATA"]["CLASS_INF"]["CLASS_OBJ"]
data_inf = h["STATISTICAL_DATA"]["DATA_INF"]["VALUE"]
class_obj_time = class_inf.find {|class_obj| class_obj["@id"] == "time" }
nendo = class_obj_time["CLASS"]["@name"][0..-2].to_i
data_inf = data_inf.map {|data_inf_value|
create_data_inf(class_inf, data_inf_value)
}
if command == "view"
puts "#{table_title}(#{nendo}年度)"
p data_inf.size
p *data_inf #.values_at(0..2, -3..-1)
else
create_api(nendo, nenrei, data_inf, command)
end
end
def create_api(nendo, nenrei, data_inf, command)
filepath_base = "api/#{nendo}/#{nenrei}sai/%scm/zyosi.json"
sintyoo_betu_data_inf = data_inf.map {|data_inf_value|
data_inf_value.to_h.slice(:nendo, :seibetu, :nenrei, :sintyoo, :taizyuu)
}
.group_by {|data_inf_value| data_inf_value[:sintyoo] }
.transform_values {|data_inf_value| data_inf_value[0] }
sintyoo_betu_data_inf.each {|sintyoo, data_inf|
json = JSON.dump(data_inf)
filepath = filepath_base % data_inf[:sintyoo]
FileUtils.mkdir_p(File.dirname(filepath))
open(filepath, "w") {|f|
f.puts(json)
}
}
Dir.glob(filepath_base % "*") {|filepath|
h = JSON.load(File.read(filepath))
p h #.values_at(0..2, -3..-1)
}
end
def main
load_config
raise unless FileUtils.pwd == File.dirname(File.expand_path(__FILE__))
ENV["TEMP"] = "tmp"
FileUtils.mkdir_p("#{ENV["TEMP"]}")
command = "view"
command = "update"
source = YAML.load_stream(File.read("source.yaml"))[-1]["source"]
if command == "update"
source.each {|nendo, h|
h["statsDataId"].each {|nenrei, stats_data_id|
#p [nendo, nenrei, stats_data_id]
FileUtils.mkdir_p("#{ENV["TEMP"]}/#{nendo}")
main_loop(nendo, nenrei, stats_data_id, command)
sleep 0.2
}
}
else
nendo = 2018
nenrei = 17
h = source.fetch(nendo)
stats_data_id = h["statsDataId"].fetch(nenrei)
#p [nendo, nenrei, stats_data_id]
main_loop(nendo, nenrei, stats_data_id, command)
end
end
if __FILE__ == $0
main
end
|
$ ballerina run global-variables.bal
#Prints the value of the global variable 'total'.
98
#Prints the updated value of the global variable 'content'.
This is a sample text
|
#!/bin/bash
# Runs tests for a module using all harnesses and python versions.
#
# This is only relevant for your local device when suing VirtualBox VMs.
# It expects that you have created the VMs with Vagrant, waited for the
# to boot, and created Snapshots of them.
#
MODULE=$1
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && cd .. && cd .. && pwd )"
HARNESS=(
bigip-12.0.0 bigip-12.1.0 bigip-12.1.0-hf1 bigip-12.1.0-hf2
bigip-12.1.1 bigip-12.1.1-hf1 bigip-12.1.1-hf2 bigip-12.1.2
bigip-12.1.2-hf1 bigip-13.0.0 bigip-13.0.0-hf1 bigip-13.0.0-hf2
)
PYTHONS=( py2.7 py3.5 py3.6)
HOST="localhost"
PORT="10443"
SNAPSHOT="Snapshot 1"
for i in "${HARNESS[@]}"
do
VBoxManage controlvm ${i} poweroff
VBoxManage snapshot ${i} restore "${SNAPSHOT}"
sleep 3
VBoxManage startvm ${i} --type headless
for x in $(seq 1 10); do
curl -k https://${HOST}:${PORT}/
if [ $? -eq 0 ]; then
break
fi
sleep 6
done
echo "Ready"
for k in "${PYTHONS[@]}"
do
docker-compose -f "${DIR}/devtools/docker-compose.yaml" run --rm ${k} make $MODULE
if [ $? -ne 0 ]; then
VBoxManage controlvm ${i} poweroff
echo "FAILED: ${k} - ${i}"
exit 1
fi
done
VBoxManage controlvm ${i} poweroff
done
echo "SUCCESS"
|
/**
* This file is used to declare unit test for the exercises that are
* mainly bodyweight exercises (e.g. pullups)
*/
import 'react-native';
import {
getSingleExerciseStrengthScore,
getOneRepMaximumForBodyWeightExercise,
} from '../src/components/strengthScore';
import {isBodyweightExercise} from 'components/content';
const bodyweight = 80;
const age = 30;
//################################# Test isBodyweightExercise() //#################################
describe('Checking isBodyweightExercise()', () => {
test('Pullup is bodyweight exercise', () => {
expect(isBodyweightExercise('pullup')).toBe(true);
});
test('Bench press is not abodyweight exercise', () => {
expect(isBodyweightExercise('benchpress')).toBe(false);
});
test('Unknown exercise id', () => {
expect(() => {
isBodyweightExercise('THISDOESNOTEXIST');
}).toThrow();
});
test('Undefined exercise id', () => {
expect(() => {
isBodyweightExercise();
}).toThrow();
});
});
//################################# Test getOneRepMaximumForBodyWeightExercise() //#################################
describe('Checking getOneRepMaximumForBodyWeightExercise()', () => {
test('One rep max without extra weight', () => {
expect(getOneRepMaximumForBodyWeightExercise(bodyweight, 0, 5, 5)).toBe(95);
});
test('Test with faulty input types', () => {
expect(() => {
getOneRepMaximumForBodyWeightExercise('103', 15, 5, 2.5);
}).toThrow();
});
});
//################################# Test getSingleExerciseStrengthScore() //#################################
describe('Checking getSingleExerciseStrengthScore()', () => {
test('get pullup strength score (default male)', () => {
expect(getSingleExerciseStrengthScore(true, age, bodyweight, 'pullup', 95)).toBe(50.2);
});
});
|
<gh_stars>0
import { Button, TextInput } from 'evergreen-ui';
import React from 'react';
import Thread from './diamond-threads/Thread'
interface ThreadClientState {
lastSubmissionId: string | null,
}
export default class ThreadClient extends React.Component {
state: ThreadClientState = {
lastSubmissionId: null,
}
enteredURL: string = '';
submitURL() {
let commentsIndex = this.enteredURL.indexOf('/comments/');
if (commentsIndex === -1) {
return;
}
let submissionId = this.enteredURL.substr(commentsIndex + 10, 6);
this.setState({
lastSubmissionId: submissionId,
})
}
render(): React.ReactNode {
return (
<div style={{display: 'flex', flexDirection: 'column', alignItems: 'center'}}>
<div style={{alignSelf: 'stretch', height: 150, display: 'flex', justifyContent: 'center', alignItems: 'center'}}>
<div style={{display: 'flex', alignItems: 'center'}}>
<TextInput
placeholder={"Enter a reddit URL"}
width={800}
height={60}
onChange={(e: any) => {
this.enteredURL = e.target.value;
}}
style={{marginRight: 20}}
/>
<Button appearance={'primary'} height={56} onClick={() => {this.submitURL()}}>Go</Button>
</div>
</div>
{this.state.lastSubmissionId ? <Thread submissionId={this.state.lastSubmissionId} /> : null}
</div>
);
}
}
|
<reponame>Wiskey-farketmez/cerberus_research<gh_stars>100-1000
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
import okhttp3.FormBody;
import okhttp3.RequestBody;
import java.io.File;
import java.io.FileWriter;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Random;
import java.util.Scanner;
public class queryCerberus{
static String campaignName;
static String C2URL;
static String C2CommKey;
static String deviceID;
static String newID;
static String registerDevice;
static String requestInjection;
static String requestInfo;
public queryCerberus(boolean testMode, String payload){
loadConfig();
System.out.println("Current ID: "+deviceID);
newID=_randomString(4)+"-"+_randomString(4)+"-"+_randomString(4)+"-"+_randomString(4);
registerDevice="{\"ID\":\""+newID+"\",\"AR\":\"8\",\"TT\":\""+campaignName+"\",\"CY\":\"us\",\"OP\":\"Android\",\"MD\":\"Unknown Custom\"}";
requestInjection="{\"AK\":\""+payload+"\"}";
requestInfo="{\"DM\":\"1\",\"AD\":\"null\",\"BL\":\"64\",\"TW\":\"48\",\"SA\":\"0\",\"SP\":\"2\",\"SS\":\"1\",\"LE\":\"en\",\"SY\":\"0\",\"SM\":\"0\",\"ID\":\""+deviceID+"\",\"IS\":\"\",\"NR\":\"\",\"GA\":\"\",\"PS\":\"0\",\"PC\":\"0\",\"PP\":\"0\",\"PO\":\"0\"}";
}
public static String _randomString(int var1) {
String var2 = "qwertyuiopasdfghjklzxcvbnm1234567890";
Random var3 = new Random();
StringBuilder var4 = new StringBuilder();
for(int var5 = 0; var5 < var1; ++var5) {
var4.append(var2.charAt(var3.nextInt(var2.length())));
}
return var4.toString();
}
private static String _base64DecodeHexDecodeRC4Decrypt(String _ciphertext, String _key) {
try {
byte[] var4 = Base64.decode(_ciphertext, 0);
String var2 = new String(var4, "UTF-8");
byte[] var6 = _hexDecode(var2);
_RC4Implementation var5 = new _RC4Implementation(_key.getBytes());
_ciphertext = new String(var5._RC4Decrypt(var6));
return _ciphertext;
} catch (Exception var3) {
return "";
}
}
private static String _base64EncodeHexEncodeRC4Encrypt(String _plaintext, String _key) {
byte[] var5 = (new _RC4Implementation(_key.getBytes()))._RC4Encrypt(_plaintext.getBytes());
StringBuffer var2 = new StringBuffer(var5.length * 2);
int var3 = var5.length;
for(int var4 = 0; var4 < var3; ++var4) {
_plaintext = Integer.toString(var5[var4] & 255, 16);
if (_plaintext.length() < 2) {
var2.append('0');
}
var2.append(_plaintext);
}
return Base64.encodeToString(var2.toString().getBytes(), 0);
}
public static byte[] _hexDecode(String var0) {
int var1 = var0.length();
byte[] var2 = new byte[var1 / 2];
for(int var3 = 0; var3 < var1; var3 += 2) {
var2[var3 / 2] = (byte)((byte)((Character.digit(var0.charAt(var3), 16) << 4) + Character.digit(var0.charAt(var3 + 1), 16)));
}
return var2;
}
public static String HTTPRequest(String command, String body){
try{
OkHttpClient client = new OkHttpClient();
RequestBody formBody = new FormBody.Builder()
.add("q", command)
.add("ws", _base64EncodeHexEncodeRC4Encrypt(body, C2CommKey))
.build();
System.out.println(_base64EncodeHexEncodeRC4Encrypt(body, C2CommKey));
Request request = new Request.Builder()
.url(C2URL)
.post(formBody)
.build();
Response response = client.newCall(request).execute();
return _base64DecodeHexDecodeRC4Decrypt(response.body().string(), C2CommKey);
}catch(Exception e){
System.out.println("Ex: FAIL");
return null;
}
}
public static void loadConfig(){
try {
Scanner scanner = new Scanner(new File("config.txt"));
campaignName=scanner.nextLine();
C2URL=scanner.nextLine();
C2CommKey=scanner.nextLine();
deviceID=scanner.nextLine();
scanner.close();
} catch (FileNotFoundException e) {
System.out.println("Error while loading config");
}
}
public static void saveConfig(){
try{
FileWriter out= new FileWriter("config.txt");
out.write(campaignName+"\n");
out.write(C2URL+"\n");
out.write(C2CommKey+"\n");
out.write(newID+"\n");
out.close();
}catch(Exception e){
System.out.println("Error while saving config");
}
}
public static void main(String args[]){
queryCerberus d;
if(!(args.length < 2))
d=new queryCerberus(false, args[1]);
else
d=new queryCerberus(false, "");
System.out.println(d.C2URL);
String response="";
boolean outDirection=false;
switch(args[0]){
case "info":
response=HTTPRequest("info_device",requestInfo);
break;
case "getinject":
response=HTTPRequest("d_attacker", requestInjection);
outDirection=true;
break;
case "register":
System.out.println("New ID: "+newID);
response=HTTPRequest("new_device", registerDevice);
saveConfig();
break;
default:
System.out.println("Unknown parameter");
break;
}
if(outDirection){
if(response.length()>3){
try{
FileWriter outF = new FileWriter(args[1]+".html");
outF.write(response);
outF.close();
System.out.println("Successfully saved to: "+args[1]+".html");
}catch(Exception e){
System.out.println("Exception while trying to save");
}
}else{
System.out.println("NOT FOUND: "+args[1]);
}
}else{
System.out.println(response);
}
}
}
|
package search;
import java.io.BufferedReader;
import java.io.InputStreamReader;
/**
*
* @author exponential-e
* 백준 20125번: 쿠키의 신체 측정
*
* @see https://www.acmicpc.net/problem/20125
*
*/
public class Boj20125 {
private static final String SPACE = " ";
private static final String NEW_LINE = "\n";
private static final char HEAD = '*';
private static final int[][] DIRECTIONS = {{0, -1}, {0, 1}, {1, 0}};
private static final int ROW = 0, COL = 1;
private static Point start;
private static int N;
private static class Point {
int row;
int col;
public Point(int row, int col) {
this.row = row;
this.col = col;
}
}
public static void main(String[] args) throws Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
N = Integer.parseInt(br.readLine());
char[][] board = new char[N][N];
boolean flag = false;
for(int i = 0; i < N; i++) {
String input = br.readLine();
for(int j = 0; j < N; j++) {
board[i][j] = input.charAt(j);
if(!flag && board[i][j] == HEAD) {
flag = true;
start = new Point(i + 1, j);
}
}
}
System.out.println(analysis(board));
}
private static String analysis(char[][] arr) {
StringBuilder sb = new StringBuilder();
sb.append(start.row + 1).append(SPACE).append(start.col + 1).append(NEW_LINE);
int leftArm = lengthCheck(arr, DIRECTIONS[0], new Point(start.row, start.col), -1);
int rightArm = lengthCheck(arr, DIRECTIONS[1], new Point(start.row, start.col), -1);
int back = lengthCheck(arr, DIRECTIONS[2], start, -1);
int leftLeg = lengthCheck(arr, DIRECTIONS[2], new Point(start.row, start.col - 1), 0);
int rightLeg = lengthCheck(arr, DIRECTIONS[2], new Point(start.row, start.col + 1), 0);
sb.append(leftArm).append(SPACE).append(rightArm).append(SPACE)
.append(back).append(SPACE).append(leftLeg).append(SPACE).append(rightLeg);
return sb.toString();
}
private static int lengthCheck(char[][] arr, final int[] D, Point s, int count){
while(!outOfRange(s.row, s.col) && arr[s.row][s.col] == HEAD) {
s.row += D[ROW];
s.col += D[COL];
count++;
}
return count;
}
private static boolean outOfRange(int row, int col) {
return row < 0 || row >= N || col < 0 || col >= N;
}
}
|
export GOPATH=$GOPATH:`pwd`/src/go
export PATH=$PATH:${GOPATH//://bin:}/bin
echo "go path? ${GOPATH}"
gopherjs build github.com/eapearson/example -o src/plugin/iframe_root/apps/example.js
|
<filename>sandbox/src/main/java/org/mammon/sandbox/objects/bank/BlindedIdentity.java
package org.mammon.sandbox.objects.bank;
import org.mammon.math.FiniteField;
import org.mammon.math.Group;
import org.mammon.messaging.FromPersistent;
import org.mammon.messaging.PersistAs;
import org.mammon.sandbox.objects.example.ExampleFiniteField;
import org.mammon.sandbox.objects.example.ExampleGroup;
import org.mammon.sandbox.objects.example.ExamplePaymentHashFunction;
import org.mammon.sandbox.objects.example.ExampleSetup;
import org.mammon.sandbox.objects.example.ExampleSignatureHashFunction;
import org.mammon.scheme.brands.BrandsSchemeSetup;
import org.mammon.scheme.brands.generic.bank.AbstractBankPrivate;
import org.mammon.scheme.brands.generic.bank.AbstractBlindedIdentity;
import org.mammon.scheme.brands.generic.bank.AbstractIssuedWitnesses;
public class BlindedIdentity
extends
AbstractBlindedIdentity<ExampleGroup, ExampleFiniteField, Long, ExampleSignatureHashFunction, ExamplePaymentHashFunction> {
@FromPersistent(AbstractBlindedIdentity.class)
public BlindedIdentity(@PersistAs("setup") ExampleSetup setup, @PersistAs("bank") AbstractBankPrivate<ExampleGroup, ExampleFiniteField, Long, ExampleSignatureHashFunction, ExamplePaymentHashFunction> bank,
@PersistAs("payerIdentity") Group.Element<ExampleGroup> payerIdentity) {
super(setup, bank, payerIdentity);
}
@Override
protected AbstractIssuedWitnesses<ExampleGroup, ExampleFiniteField, Long, ExampleSignatureHashFunction, ExamplePaymentHashFunction> newAbstractIssuedWitnesses(
BrandsSchemeSetup<ExampleGroup, ExampleFiniteField, Long, ExampleSignatureHashFunction, ExamplePaymentHashFunction> setup,
AbstractBankPrivate<ExampleGroup, ExampleFiniteField, Long, ExampleSignatureHashFunction, ExamplePaymentHashFunction> bank,
Group.Element<ExampleGroup> payerIdentity, FiniteField.Element<ExampleFiniteField> w) {
return new IssuedWitnesses((ExampleSetup) setup, (AbstractBankPrivate<ExampleGroup, ExampleFiniteField, Long, ExampleSignatureHashFunction, ExamplePaymentHashFunction>) bank, payerIdentity, w);
}
}
|
package com.littlejenny.gulimall.rabbitmq.config;
import com.littlejenny.common.constant.RabbitmqConstants;
import org.springframework.amqp.core.Binding;
import org.springframework.amqp.core.Exchange;
import org.springframework.amqp.core.Queue;
import org.springframework.amqp.core.TopicExchange;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.util.HashMap;
import java.util.Map;
@Configuration
public class ObjectConfig {
@Bean
public Queue delayStockCheck(){
Map args = new HashMap();
args.put("x-dead-letter-exchange",RabbitmqConstants.HANDLESTOCK_EXCHANGE);
args.put("x-dead-letter-routing-key",RabbitmqConstants.HANDLESTOCK_REAL_QUEUE_KEY);
//1分鐘
args.put("x-message-ttl",60000);
Queue queue = new Queue(RabbitmqConstants.HANDLESTOCK_DELAY_QUEUE,true,false,false,args);
return queue;
}
@Bean
public Queue realStockCheck(){
Queue queue = new Queue(RabbitmqConstants.HANDLESTOCK_REAL_QUEUE,true,false,false,null);
return queue;
}
@Bean
public Binding bindingDelayStockCheck(){
Binding binding = new Binding(RabbitmqConstants.HANDLESTOCK_DELAY_QUEUE, Binding.DestinationType.QUEUE,RabbitmqConstants.HANDLESTOCK_EXCHANGE,RabbitmqConstants.HANDLESTOCK_DELAY_QUEUE_KEY,null);
return binding;
}
@Bean
public Binding bindingRealStockCheck(){
Binding binding = new Binding(RabbitmqConstants.HANDLESTOCK_REAL_QUEUE, Binding.DestinationType.QUEUE,RabbitmqConstants.HANDLESTOCK_EXCHANGE,RabbitmqConstants.HANDLESTOCK_REAL_QUEUE_KEY,null);
return binding;
}
@Bean
public Exchange exchangeStockCheck(){
TopicExchange topicExchange = new TopicExchange(RabbitmqConstants.HANDLESTOCK_EXCHANGE,true,false);
return topicExchange;
}
@Bean
public Queue delayOrderCheck(){
Map args = new HashMap();
args.put("x-dead-letter-exchange",RabbitmqConstants.HANDLEORDER_EXCHANGE);
args.put("x-dead-letter-routing-key",RabbitmqConstants.HANDLEORDER_REAL_QUEUE_KEY);
//1分鐘
args.put("x-message-ttl",60000);
Queue queue = new Queue(RabbitmqConstants.HANDLEORDER_DELAY_QUEUE,true,false,false,args);
return queue;
}
@Bean
public Queue realOrderCheck(){
Queue queue = new Queue(RabbitmqConstants.HANDLEORDER_REAL_QUEUE,true,false,false,null);
return queue;
}
@Bean
public Binding bindingDelayOrderCheck(){
Binding binding = new Binding(RabbitmqConstants.HANDLEORDER_DELAY_QUEUE, Binding.DestinationType.QUEUE,RabbitmqConstants.HANDLEORDER_EXCHANGE,RabbitmqConstants.HANDLEORDER_DELAY_QUEUE_KEY,null);
return binding;
}
@Bean
public Binding bindingRealOrderCheck(){
Binding binding = new Binding(RabbitmqConstants.HANDLEORDER_REAL_QUEUE, Binding.DestinationType.QUEUE,RabbitmqConstants.HANDLEORDER_EXCHANGE,RabbitmqConstants.HANDLEORDER_REAL_QUEUE_KEY,null);
return binding;
}
@Bean
public Exchange exchangeOrderCheck(){
TopicExchange topicExchange = new TopicExchange(RabbitmqConstants.HANDLEORDER_EXCHANGE,true,false);
return topicExchange;
}
@Bean
public Queue secOrder(){
Queue queue = new Queue(RabbitmqConstants.HANDLESECORDER_QUEUE,true,false,false,null);
return queue;
}
@Bean
public Binding bindingsecOrder(){
Binding binding = new Binding(RabbitmqConstants.HANDLESECORDER_QUEUE, Binding.DestinationType.QUEUE,RabbitmqConstants.HANDLESECORDER_QUEUE_KEY,RabbitmqConstants.HANDLEORDER_REAL_QUEUE_KEY,null);
return binding;
}
}
|
<gh_stars>1-10
export const PunctuationNovel = `year,exclam.year,quest.year,period.year,comma.year,semi.year,colon.year,total.year
1791,0.0023517012,0.0035255413,0.0422664336,0.0845731794,0.0122242124,0.0038474668,0.1522737553
1792,0.0058104946,0.0046323854,0.0433463537,0.0826068559,0.0137638736,0.001451127,0.1520516165
1793,0.0019927577,0.0020350048,0.0303960136,0.086308696,0.0168440037,0.0025124902,0.1416536156
1794,0.0045937551,0.0032412452,0.0357796071,0.0910974345,0.010906563,0.002433975,0.1485479335
1795,0.0040986195,0.0027170791,0.043380177,0.0823779746,0.0096638919,0.0032884542,0.1479180857
1796,0.0047895494,0.0042288746,0.0361409826,0.0919123949,0.0122238942,0.0038978401,0.1558802459
1797,0.006511493,0.0037669047,0.0430997015,0.0950387186,0.0150784137,0.0019243482,0.1661956036
1798,0.0032709764,0.0038039388,0.0389993805,0.0864668111,0.0136412633,0.0020693552,0.150652579
1799,0.0033381612,0.0037454267,0.0435000693,0.0838669954,0.0082417033,0.0034581066,0.146980793
1800,0.0031085442,0.0041869311,0.041035223,0.0892383442,0.0100997449,0.0029802913,0.1550433343
1801,0.0035075377,0.0033878751,0.04198477,0.0876636559,0.0110343492,0.0037445345,0.1539784003
1802,0.0039329924,0.0027288505,0.0361230834,0.080595768,0.0139712201,0.0036582762,0.1427182556
1803,0.0044220192,0.0030805204,0.0357964765,0.0825581244,0.0146251855,0.0048156793,0.1485112447
1804,0.0037665884,0.0034116892,0.0423966081,0.0902831682,0.0139356396,0.0039234731,0.1603017891
1805,0.0059443583,0.003426234,0.0335333443,0.0881500904,0.0125145259,0.0042790104,0.149009997
1806,0.0043703436,0.0025607711,0.0345421651,0.0834701429,0.0134252705,0.0044328398,0.1443020931
1807,0.0046538566,0.0031150954,0.0346119382,0.0891600221,0.0145087262,0.0045126907,0.1518559154
1808,0.0038755381,0.002694065,0.0365005165,0.0922017368,0.0135697223,0.0040769423,0.1551391823
1809,0.0030763825,0.0026654009,0.0333335552,0.089025838,0.0141485864,0.0036487815,0.1474637098
1810,0.0037453581,0.0026183025,0.0326135355,0.0890746217,0.0132900699,0.0030919971,0.1458167741
1811,0.004160417,0.0025408822,0.0331610546,0.0851466994,0.0128387785,0.0031511178,0.142942723
1812,0.0040899983,0.0035694775,0.0380929398,0.0875914019,0.015326018,0.0031435314,0.1530914759
1813,0.0043923217,0.0030249274,0.0328455667,0.088406965,0.0148707932,0.0030813875,0.1487949342
1814,0.0044938429,0.0031646131,0.0361227489,0.0919251217,0.0128873758,0.0026869568,0.1522538551
1815,0.0040964346,0.0028337315,0.03228559,0.0916901792,0.0144354362,0.0027661881,0.1491217797
1816,0.003999267,0.0043483222,0.0358985168,0.0914622345,0.0172847169,0.0028504196,0.1568696331
1817,0.0046379535,0.002777753,0.0347598088,0.0900733984,0.0133755439,0.0029468301,0.1495844027
1818,0.0040903434,0.0030181772,0.034124659,0.0870164682,0.0136357951,0.0024247389,0.1457661879
1819,0.0032114635,0.0027603354,0.0329528335,0.0927856072,0.01121348,0.0027369794,0.1472843178
1820,0.0043871375,0.0034652526,0.0309759363,0.0874631396,0.0117045649,0.0019240727,0.1413304296
1821,0.003426929,0.002791824,0.0321702241,0.0935313791,0.0137654884,0.0024658709,0.1494089856
1822,0.0045291046,0.0033135856,0.0310991864,0.0945639186,0.0126737,0.002442298,0.1498835764
1823,0.0044528056,0.0035799256,0.0340741195,0.0952295558,0.0134646572,0.0014144656,0.1531623606
1824,0.0041459154,0.0035349125,0.033675106,0.0909160376,0.0118616692,0.0018890221,0.1471191871
1825,0.0036970038,0.0029790298,0.0343877623,0.0951824869,0.0130193932,0.0026197176,0.1532438327
1826,0.0050063081,0.003148576,0.035120297,0.0915093589,0.0126281238,0.0021034344,0.1507083779
1827,0.0035269628,0.0029863088,0.033583089,0.0940894901,0.0119517572,0.0021081513,0.1498992356
1828,0.0038698239,0.0033108571,0.0353819235,0.0931258474,0.0117734584,0.001868189,0.1503981538
1829,0.0041671043,0.0030093123,0.0342268866,0.0926447282,0.0124027256,0.0023444665,0.1496291413
1830,0.0049957801,0.0031947807,0.0348611761,0.0921343028,0.0112494269,0.0016668904,0.148939379
1831,0.0055638805,0.0031869039,0.0366191172,0.0875768156,0.0112512312,0.0018849403,0.1473246626
1832,0.0049036,0.0029811274,0.032944264,0.0883495294,0.0121988257,0.0022504186,0.1445516613
1833,0.004795707,0.0034570031,0.0365767726,0.0855412167,0.0106050992,0.0016797797,0.1433115559
1834,0.0047935697,0.0032832086,0.034571261,0.0906547774,0.0113704491,0.0017173359,0.1473764038
1835,0.0055604332,0.0037665993,0.033829719,0.0941204157,0.0125485001,0.0019903287,0.1527886757
1836,0.0046571625,0.0035436212,0.0352108507,0.0901081942,0.0113628546,0.0016233765,0.1478298876
1837,0.0055985654,0.0037543219,0.0368793684,0.0897743153,0.0122574499,0.0026830377,0.1517815327
1838,0.0054226354,0.0039768136,0.0362126746,0.0894878068,0.0108936057,0.001178313,0.1480139405
1839,0.0047502495,0.0034320367,0.0364545666,0.0890354205,0.0121211775,0.0014835795,0.1480566866
1840,0.0055828953,0.0039385391,0.0361401461,0.0884793887,0.0113865694,0.0015260645,0.1479067628
1841,0.0061020422,0.0038541514,0.0383840605,0.0913261641,0.0109699448,0.0012778131,0.1524522768
1842,0.0053796421,0.0036747417,0.0376045218,0.0900216391,0.0115797657,0.0013309201,0.1505382428
1843,0.0066530174,0.003451703,0.0361436635,0.0922938512,0.0109736181,0.0013113375,0.15193972
1844,0.0066762959,0.0040122854,0.0372579032,0.0905498222,0.0106631256,0.0011457256,0.1511391743
1845,0.0068565773,0.0038242964,0.0378301685,0.0894081024,0.0103409093,0.0013593665,0.1503327307
1846,0.0060497411,0.0040271824,0.0388716938,0.0901790334,0.0093803209,0.0009872514,0.150034535
1847,0.0065305815,0.003726117,0.0368725304,0.0893925706,0.0111088596,0.001949478,0.1503187466
1848,0.0048453251,0.0039890541,0.0396501188,0.0933404031,0.010751375,0.0015879912,0.1550542921
1849,0.0048642021,0.0036741119,0.0384539334,0.0909257062,0.011327965,0.0023038989,0.1524741096
1850,0.0052234303,0.0036451728,0.0392243337,0.0909713021,0.0111104857,0.0011158186,0.1521345193
1851,0.0052771607,0.00391009,0.037171228,0.0931190663,0.0126563975,0.0019224862,0.1549081094
1852,0.0058379913,0.00361719,0.0374904902,0.0937095971,0.011556552,0.0017190621,0.1547497944
1853,0.0059341124,0.004405143,0.0423311721,0.0879906985,0.0107828386,0.0014158669,0.1539233595
1854,0.0059326969,0.0046753835,0.0434693952,0.0908308794,0.0102506926,0.0016314995,0.1575540808
1855,0.0060237617,0.004549199,0.0441955699,0.0910991421,0.0112130205,0.001557364,0.1594159742
1856,0.0057405156,0.0043863038,0.0452759056,0.0910622085,0.0103406252,0.0011662741,0.158885255
1857,0.0066452323,0.0046717251,0.0431804559,0.0877731628,0.0101165525,0.0016931789,0.1550705541
1858,0.0040798476,0.0043644041,0.0434302992,0.0851535274,0.0118199164,0.0018913683,0.1517123064
1859,0.0058517345,0.0048321616,0.0449488224,0.0883031477,0.0102067105,0.0013898798,0.1562974314
1860,0.0048915134,0.0041417809,0.0433404982,0.0891880791,0.0106658612,0.0014442004,0.1545515702
1861,0.005347598,0.0054043119,0.0464634915,0.0833196962,0.009510309,0.0017322175,0.1529324566
1862,0.0057713693,0.0054766242,0.049339857,0.0822317111,0.0094820404,0.0010147532,0.153936969
1863,0.0043536553,0.0047644896,0.0475316963,0.0837523636,0.0096170213,0.001825707,0.1527123874
1864,0.0045057844,0.0045976054,0.0456344668,0.0829664254,0.0091279507,0.0011342636,0.1488804912
1865,0.0040274449,0.0048237059,0.0446954722,0.0852650979,0.0101424686,0.0010078331,0.1509523442
1866,0.0045852539,0.0044799294,0.0444246151,0.0817371494,0.0099950815,0.0013222842,0.1475407447
1867,0.0045647274,0.0046298608,0.0469292994,0.0819414701,0.0096071123,0.0011411369,0.149579561
1868,0.0047699019,0.0047285905,0.047255797,0.0790661086,0.0085424276,0.0011490115,0.146436339
1869,0.0039082768,0.0044804458,0.044643313,0.0811833859,0.0096598711,0.0009699026,0.1458181721
1870,0.0039805412,0.0047524752,0.0486578223,0.0824795921,0.0101340754,0.0012556069,0.1523178647
1871,0.0046969588,0.0049387357,0.0467347617,0.0812708418,0.0103865663,0.0016156078,0.1505472618
1872,0.0042294316,0.0045975742,0.0463792684,0.0801711496,0.0098367429,0.0014173833,0.1477413697
1873,0.0041266739,0.0049315828,0.0501618625,0.0781382455,0.0090159531,0.0010796713,0.148229267
1874,0.0039264169,0.0047803625,0.0506366709,0.0812039667,0.0090822956,0.0011947764,0.1517664588
1875,0.0042407466,0.0051478627,0.0528431547,0.077215627,0.0084795285,0.0011013308,0.1502465431
1876,0.0043767155,0.0050522601,0.0504771212,0.0782446672,0.0096286056,0.0013227876,0.1501907925
1877,0.0046947783,0.0047701463,0.0464189548,0.0790212727,0.0095889423,0.0012642546,0.1466464586
1878,0.0041357013,0.0049768805,0.049944358,0.0811645074,0.0093952795,0.0011283865,0.1518540884
1879,0.0039912755,0.0045979921,0.0506357862,0.0770288349,0.0087364071,0.0013754554,0.1474417276
1880,0.0036762714,0.0052609831,0.0524102553,0.0767001378,0.0090778857,0.0012639807,0.1490983016
1881,0.0039276912,0.0050975073,0.0497389194,0.0787599213,0.009221707,0.0011501285,0.1488816841
1882,0.0037011362,0.00503336,0.0514093178,0.0771857379,0.0088696784,0.0011422418,0.1483097246
1883,0.0041764596,0.0052361943,0.0518334717,0.0783041329,0.0091989374,0.0012232302,0.150999488
1884,0.0038524801,0.0048982611,0.0543572822,0.0782136984,0.0093533012,0.0014149469,0.1533810135
1885,0.0048589258,0.0053141576,0.0536655172,0.0781094308,0.0089519947,0.0014029915,0.153374234
1886,0.0045392079,0.005130678,0.0539850028,0.0780569251,0.0087341705,0.0015364141,0.1531187855
1887,0.0047668774,0.0052808521,0.0552951942,0.0778425543,0.0078007176,0.0011345401,0.1529866926
1888,0.0038563056,0.0053934432,0.0542007617,0.0773894502,0.0085689561,0.0011945042,0.1514397695
1889,0.004593025,0.0052242655,0.0541482781,0.0767117798,0.0086681784,0.0012628406,0.151737039
1890,0.0043655486,0.0055789946,0.055956519,0.0762522902,0.0077008703,0.0015847087,0.1527251992
1891,0.0040603648,0.0052377945,0.0555843649,0.0780596797,0.0077133358,0.001717141,0.1535720687
1892,0.0044186585,0.0051841004,0.0609319961,0.0765950044,0.007566947,0.0015353432,0.1578690853
1893,0.004114311,0.0053938844,0.0633183947,0.0741587167,0.0078785153,0.0012672734,0.1577076232
1894,0.004495459,0.0054583208,0.0626570834,0.0754068735,0.0075759903,0.0013201887,0.1582789651
1895,0.0037466237,0.0046959792,0.0673051873,0.0747603009,0.0086283976,0.0018633004,0.1629396514
1896,0.0048939968,0.0056275653,0.0756114913,0.0749811985,0.0082830701,0.0020368575,0.1744410524
1897,0.0040537067,0.0056378842,0.0544174359,0.0735421514,0.008355509,0.0011084136,0.1476223438
1898,0.0036402914,0.0018285677,0.0251401698,0.0898132468,0.0086578162,0.0026530021,0.1347374294
1900,0.0049826101,0.005159684,0.0508063718,0.0706974221,0.0059174361,0.0015575467,0.1399399265
1901,0.0040041347,0.0044395782,0.0529343687,0.072208629,0.0064665126,0.0019288205,0.1431226086
1902,0.0041322568,0.0047857372,0.0548760741,0.0696188252,0.0038833004,0.001342971,0.1394801295
1903,0.0035397188,0.0051031901,0.0511270212,0.0691579392,0.0059559105,0.0013407968,0.1368127641
1904,0.0046705854,0.0053055271,0.0514035831,0.0711423081,0.0054912417,0.0015027864,0.1401913236
1905,0.0044899317,0.0049191322,0.0503892578,0.0709948673,0.0052736398,0.0014762739,0.138391533
1906,0.0038525183,0.0050529385,0.0512073239,0.0712774191,0.0044451126,0.0017067065,0.1383342356
1907,0.0053544171,0.0050192959,0.0523439198,0.0699508765,0.006674438,0.0020380234,0.1425722691
1908,0.0051171859,0.0055101878,0.0555779169,0.064798081,0.004881194,0.0015004947,0.1382856458
1909,0.0046768896,0.0052143578,0.0552345282,0.0695845354,0.0043708249,0.0015895384,0.1413455668
1910,0.0045909167,0.0047876597,0.0535766023,0.067921326,0.0049330096,0.0017650924,0.1383513641
1911,0.0051518229,0.0055477967,0.0539831308,0.0679924255,0.0051720361,0.001342078,0.1398179497
1912,0.0039530826,0.004706846,0.0556129856,0.0634227166,0.0049526437,0.0017462021,0.1349843293
1913,0.0042330474,0.0053710945,0.0568956168,0.0636746889,0.0039118824,0.001689041,0.1365153856
1914,0.0046825194,0.0061233328,0.0568475975,0.0657350097,0.0056173073,0.0015376514,0.1411212099
1915,0.0052715005,0.0052421452,0.0569609154,0.0662426148,0.0037940538,0.0016168422,0.1399576096
1916,0.0050357132,0.0054496756,0.0547362964,0.0674356123,0.0046850999,0.0016743918,0.1397212355
1917,0.0037837971,0.0047875668,0.0561302464,0.0625903328,0.0042901232,0.0014166389,0.1336815239
1918,0.0049429439,0.0056307994,0.0610705942,0.06523256,0.0041428356,0.001659984,0.143367803
1919,0.0043835005,0.0047970839,0.0578784252,0.0653798796,0.0041855664,0.0013845899,0.1386336305
1920,0.0042232354,0.0047689142,0.0551070677,0.0604684652,0.0044834408,0.0017620571,0.1315550565
1921,0.0042425526,0.005308356,0.0605378684,0.0620380483,0.0041196463,0.0013547923,0.1383090306
1922,0.0043158696,0.0054619405,0.0628181727,0.063131461,0.0039865169,0.0018843786,0.1422805381
1923,0.0050768211,0.0057516222,0.0644629631,0.0627137483,0.0033079408,0.0011293303,0.1430410701
1924,0.0044196343,0.0057311725,0.0591367691,0.067126912,0.0041491306,0.0014005422,0.1425952079
1925,0.0039116328,0.0060563543,0.0592198721,0.0651911028,0.0037767303,0.0016560019,0.1407079037
1926,0.0043139962,0.006339704,0.0618828068,0.0660668502,0.0037490493,0.0013964575,0.144492768
1927,0.003485563,0.006056547,0.0592428968,0.0653276596,0.0042499819,0.001605112,0.1408774341
1928,0.0044115923,0.0065221433,0.0589362854,0.0673398044,0.0040925534,0.001617767,0.144112342
1929,0.0039904874,0.0062500798,0.0656865358,0.0656173063,0.0039523871,0.001424098,0.1474525723
1930,0.0033144999,0.0061694688,0.0621639694,0.0632340833,0.0032642275,0.0013603946,0.1404542272
1931,0.0044800126,0.0060602935,0.0645143675,0.0641146013,0.0032136802,0.0016597776,0.1445933737
1932,0.0035059315,0.0065201664,0.0633506364,0.0665652099,0.0039339259,0.0015686069,0.1460914916
1933,0.0039987909,0.0074186379,0.0694092921,0.0639599316,0.0027762999,0.0020471074,0.1504101497
1934,0.0041275143,0.0073203816,0.0638245913,0.0642294003,0.0036850252,0.0023139159,0.146413757
1935,0.0037020213,0.0071765528,0.0656593078,0.062160614,0.0027217096,0.0015264403,0.1437877351
1936,0.0038885574,0.0070977816,0.0653598137,0.0601671253,0.0027300807,0.0020053798,0.1421544825
1937,0.0025599732,0.0071359009,0.0635022038,0.0632147899,0.0032766437,0.0013892044,0.1416298283
1938,0.0035053499,0.0074502276,0.0624220885,0.0674315387,0.0036053072,0.0022252853,0.1476904877
1939,0.0032070273,0.0068233704,0.0650135764,0.0622111477,0.002721332,0.0017948363,0.1426280369
1940,0.0027924093,0.0075605415,0.0709411921,0.0604239388,0.0037625765,0.0025445786,0.1487415496
1941,0.0039490793,0.0074494664,0.0687302122,0.0647645412,0.0028630371,0.0026052617,0.1509112276
1942,0.0036782537,0.0081731831,0.0720178613,0.0620697076,0.0027010993,0.0021204477,0.1514248471
1943,0.0032350312,0.0080295475,0.0697582478,0.0615199915,0.0027176635,0.0016031503,0.1473671546
1944,0.0037909722,0.0081552879,0.0715628358,0.0599362395,0.0040324155,0.0018898793,0.1498648553
1945,0.0022368055,0.0065364582,0.0663443625,0.0609265905,0.0030947037,0.002088264,0.1419053632
1946,0.0051590947,0.006864409,0.0690507574,0.0611310395,0.0030198971,0.0022832889,0.1483477987
1947,0.0038848285,0.0073130463,0.0688101529,0.0639072798,0.0030898666,0.0020607846,0.1496649323
1948,0.0033607476,0.0073047808,0.0705984317,0.0598631907,0.0024384677,0.0017775699,0.1459605283
1949,0.0039558418,0.0078004003,0.0695145131,0.0631561408,0.002790554,0.001452021,0.1492656283
1950,0.0036620116,0.0076888901,0.0659128246,0.0616152408,0.0028706378,0.0017749376,0.1442888374
1951,0.003072401,0.0075585214,0.0707175444,0.0588483437,0.0024319982,0.0012834127,0.144350556
1952,0.0023290693,0.0081787997,0.0728597077,0.0573298622,0.0022284214,0.0010975066,0.1444650808
1953,0.0033475202,0.0076844376,0.0718156022,0.0610075949,0.0022205462,0.0012302984,0.1477017125
1954,0.0026066739,0.0071021609,0.0681654028,0.0599157899,0.0031152019,0.001484729,0.1430031222
1955,0.0028023873,0.0079604927,0.0727109721,0.0620731223,0.0024322986,0.0011402729,0.1495459998
1956,0.0022463793,0.0079787081,0.0735692652,0.061743216,0.0026544315,0.0011179632,0.1497280154
1957,0.0030105851,0.0084126799,0.0719956732,0.0611711121,0.0018929465,0.0013214953,0.1486540149
1958,0.0026133875,0.0073338749,0.0718156936,0.0629910101,0.001794517,0.0011838985,0.148475747
1959,0.0031879317,0.0085405994,0.0695171715,0.0616160067,0.0022995085,0.001049098,0.1472740943
1960,0.0025586035,0.008609808,0.0697652382,0.0634064042,0.0020246667,0.0012759072,0.1483323856
1961,0.0020364707,0.0077349224,0.0681035695,0.0578080057,0.0024106202,0.0013536998,0.1405876947
1962,0.0029806011,0.008134152,0.0670361317,0.0639324334,0.0021740435,0.002239503,0.1476257909
1963,0.0026290772,0.0083538147,0.0736277902,0.0654145398,0.0022834843,0.001518063,0.1549193663
1964,0.0015773493,0.0084168975,0.071926051,0.0640959719,0.0025933361,0.0011762191,0.1505767002
1965,0.0028602961,0.0080327726,0.0681354564,0.0665732054,0.002620674,0.0012328482,0.150104746
1966,0.0021554751,0.0087541979,0.0720742847,0.0615246805,0.0026857633,0.0012566916,0.1490838556
1967,0.0023640788,0.0078334962,0.0717894512,0.0611476348,0.0024719658,0.0016548733,0.1482325089
1968,0.0025244459,0.0086071698,0.0716364895,0.061470961,0.002907747,0.0017679807,0.1498899328
1969,0.0021522942,0.0077955064,0.0708116005,0.0641448779,0.0019291923,0.0018069599,0.1498901073
1970,0.0019510531,0.0085939976,0.071956675,0.0638199681,0.002122001,0.0023167804,0.1513099137
1971,0.0025326764,0.0084690579,0.0723162908,0.0652662263,0.0023014522,0.0017805864,0.1537755186
1972,0.0032381223,0.0088681555,0.0758152231,0.0623306468,0.0020432991,0.0013452895,0.1552229118
1973,0.0020558009,0.008553706,0.0751370543,0.0626287806,0.0019288697,0.0014188017,0.1528157526
1974,0.002520462,0.008088138,0.0742618307,0.0645679773,0.0020056195,0.0011556438,0.1535857868
1975,0.0023425599,0.0087763597,0.071948631,0.0646027672,0.0024453913,0.0014872146,0.1527258924
1976,0.0021740955,0.0098503632,0.081167396,0.0592028756,0.0015715211,0.0015133678,0.1563168154
1977,0.0020948952,0.0092129165,0.0766530995,0.061710254,0.001977111,0.0015311439,0.1541973009
1978,0.0023592121,0.0085212904,0.0751420842,0.0615922057,0.0019932711,0.0009892644,0.1512681103
1979,0.0019034792,0.0075199611,0.074192394,0.0623225654,0.0014397334,0.0010324648,0.1488926334
1980,0.0020305571,0.0081700392,0.0705598328,0.0660880689,0.0019880701,0.0012963355,0.1509249374
1981,0.0023790967,0.007307902,0.0727430469,0.062991357,0.0024443753,0.0013894622,0.1498875498
1982,0.0023303046,0.0082050744,0.0745318569,0.0621251123,0.0023144559,0.001195299,0.1511861601
1983,0.002268091,0.0076306316,0.0690127839,0.063029335,0.0018686086,0.0013098817,0.1457249431
1984,0.0018620251,0.0084457744,0.0730473861,0.0645044598,0.0020792678,0.001190776,0.1521410857
1985,0.0018338154,0.0093512083,0.077929746,0.0597626026,0.0015580119,0.0008935207,0.1517821716
1986,0.0017545275,0.007986755,0.0752090648,0.0629537321,0.0016457999,0.0010095002,0.151280189
1987,0.0027772168,0.0085330517,0.0790834172,0.0595784014,0.001614119,0.0012474183,0.1534013518
1988,0.0015537206,0.0079273999,0.0728532905,0.0617072164,0.0018554054,0.0009051885,0.1475964444
1989,0.0022732766,0.0076966075,0.0709894118,0.0610874948,0.0014456543,0.0011905871,0.1451424023
1990,0.0024424813,0.0092311729,0.0758057843,0.061879177,0.0013200698,0.0012539961,0.1526446288
`
|
/* ///////////////////////// LEGAL NOTICE ///////////////////////////////
This file is part of ZScripts,
a modular script framework for Pokemon Online server scripting.
Copyright (C) 2013 <NAME>, aka "ArchZombie" / "ArchZombie0x", <<EMAIL>>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
/////////////////////// END LEGAL NOTICE /////////////////////////////// */
/** Modprobe
* @memberof script.modules
* @name modprobe
* @namespace
*/
/** @scope script.modules.modprobe */
({
require: ["commands", "logs", "com", "theme", "less"]
,
loadModule: function ()
{
this.commands.registerCommand("modprobe", this);
}
,
/**
* @type commandDescriptor
*/
modprobe:
{
desc: "Manages loadable modules"
,
options :
{
"load": "Loads modules"
,
"unload": "Unloads modules"
,
"reload": "Reloads modules"
}
,
perm: function (src, cmd, chan)
{
return sys.auth(src) === 3;
}
,
/** The modprobe command will list all the modules, or --load, --unload, or --reload them */
code: function (src, cmd, chan)
{
if (cmd.flags.load || cmd.flags.l)
{
if (cmd.flags.unload || cmd.flags.u || cmd.flags.reload || cmd.flags.r) throw new Error("Modprobe is confused.");
for (var x in cmd.args)
{
// this.com.message([src], "Loading module " +
this.script.loadModule(cmd.args[x]);
}
return;
}
if (cmd.flags.unload || cmd.flags.u)
{
if (cmd.flags.load || cmd.flags.l || cmd.flags.reload || cmd.flags.r) throw new Error("Modprobe is confused.");
for (var x in cmd.args)
{
this.script.unloadModule(cmd.args[x]);
}
return;
}
if (cmd.flags.reload || cmd.flags.r)
{
if (cmd.flags.unload || cmd.flags.u || cmd.flags.load || cmd.flags.l) throw new Error("Modprobe is confused.");
for (var x in cmd.args)
{
this.script.reloadModule(cmd.args[x]);
}
return;
}
if (cmd.args.length == 0)
{
this.com.message([src], "Loaded modules:", this.theme.INFO);
var modlist = [];
for (var x in this.script.modules)
{
modlist.push(x);
}
this.less.less(src, modlist.join("\n"), false);
return;
}
var str = [];
for (var x in cmd.args)
{
var test = this.script.modules[cmd.args[x]];
str.push("<b>Module " + cmd.args[x] + ":</b>");
if (!test)
{
str.push("Module not loaded.");
continue;
}
str.push("Requires: " + this.script.modules[cmd.args[x]].require.join(", "));
str.push("Required by: " + this.script.modules[cmd.args[x]].submodules.join(", "));
str.push("Contains: " + Object.keys(this.script.modules[cmd.args[x]]).join (", "));
}
this.less.less(src, str.join("<br/>"), true);
}
}
});
|
/**
* Mnemonist HashedArrayTree Typings
* ==================================
*/
import {IArrayLikeConstructor} from './utils/types';
type HashedArrayTreeOptions = {
initialCapacity?: number;
initialLength?: number;
blockSize?: number;
}
export default class HashedArrayTree<T> {
// Members
blockSize: number;
capacity: number;
length: number;
// Constructor
constructor(ArrayClass: IArrayLikeConstructor, capacity: number);
constructor(ArrayClass: IArrayLikeConstructor, options: HashedArrayTreeOptions);
// Methods
set(index: number, value: T): this;
get(index: number): T | undefined;
grow(capacity: number): this;
resize(length: number): this;
push(value: T): number;
pop(): T | undefined;
inspect(): any;
}
|
<gh_stars>1-10
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
module Selenium
module WebDriver
module Support
class Color
RGB_PATTERN = %r{^\s*rgb\(\s*(\d{1,3})\s*,
\s*(\d{1,3})\s*,
\s*(\d{1,3})\s*\)\s*$}x
RGB_PCT_PATTERN = %r{^\s*rgb\(\s*(\d{1,3}|\d{1,2}\.\d+)%\s*,
\s*(\d{1,3}|\d{1,2}\.\d+)%\s*,
\s*(\d{1,3}|\d{1,2}\.\d+)%\s*\)\s*$}x
RGBA_PATTERN = %r{^\s*rgba\(\s*(\d{1,3})\s*,
\s*(\d{1,3})\s*,
\s*(\d{1,3})\s*,
\s*(0|1|0\.\d+)\s*\)\s*$}x
RGBA_PCT_PATTERN = %r{^\s*rgba\(\s*(\d{1,3}|\d{1,2}\.\d+)
%\s*,\s*(\d{1,3}|\d{1,2}\.\d+)
%\s*,\s*(\d{1,3}|\d{1,2}\.\d+)
%\s*,\s*(0|1|0\.\d+)\s*\)\s*$}x
HEX_PATTERN = /#(\h{2})(\h{2})(\h{2})/
HEX3_PATTERN = /#(\h)(\h)(\h)/
HSL_PATTERN = %r{^\s*hsl\(\s*(\d{1,3})\s*,
\s*(\d{1,3})%\s*,
\s*(\d{1,3})%\s*\)\s*$}x
HSLA_PATTERN = %r{^\s*hsla\(\s*(\d{1,3})\s*,
\s*(\d{1,3})%\s*,
\s*(\d{1,3})%\s*,
\s*(0|1|0\.\d+)\s*\)\s*$}x
attr_reader :red, :green, :blue, :alpha
def self.from_string(str)
case str
when RGB_PATTERN
new Regexp.last_match(1), Regexp.last_match(2), Regexp.last_match(3)
when RGB_PCT_PATTERN
array = [Regexp.last_match(1), Regexp.last_match(2), Regexp.last_match(3)]
new(*array.map { |e| Float(e) / 100 * 255 })
when RGBA_PATTERN
new Regexp.last_match(1), Regexp.last_match(2), Regexp.last_match(3), Regexp.last_match(4)
when RGBA_PCT_PATTERN
array = [Regexp.last_match(1), Regexp.last_match(2), Regexp.last_match(3)]
new(*array.map { |e| Float(e) / 100 * 255 } << Regexp.last_match(4))
when HEX_PATTERN
array = [Regexp.last_match(1), Regexp.last_match(2), Regexp.last_match(3)]
new(*array.map { |e| e.to_i(16) })
when HEX3_PATTERN
array = [Regexp.last_match(1), Regexp.last_match(2), Regexp.last_match(3)]
new(*array.map { |e| (e * 2).to_i(16) })
when HSL_PATTERN, HSLA_PATTERN
from_hsl(Regexp.last_match(1), Regexp.last_match(2), Regexp.last_match(3), Regexp.last_match(4))
else
raise ArgumentError, "could not convert #{str.inspect} into color"
end
end
def self.from_hsl(h, s, l, a)
h = Float(h) / 360
s = Float(s) / 100
l = Float(l) / 100
a = Float(a || 1)
if s.zero?
r = l
g = r
b = r
else
luminocity2 = l < 0.5 ? l * (1 + s) : l + s - l * s
luminocity1 = 2 * l - luminocity2
r = hue_to_rgb(luminocity1, luminocity2, h + 1.0 / 3.0)
g = hue_to_rgb(luminocity1, luminocity2, h)
b = hue_to_rgb(luminocity1, luminocity2, h - 1.0 / 3.0)
end
new (r * 255).round, (g * 255).round, (b * 255).round, a
end
def self.hue_to_rgb(lum1, lum2, hue)
hue += 1 if hue < 0.0
hue -= 1 if hue > 1.0
if hue < 1.0 / 6.0
(lum1 + (lum2 - lum1) * 6.0 * hue)
elsif hue < 1.0 / 2.0
lum2
elsif hue < 2.0 / 3.0
lum1 + (lum2 - lum1) * ((2.0 / 3.0) - hue) * 6.0
else
lum1
end
end
def initialize(red, green, blue, alpha = 1)
@red = Integer(red)
@green = Integer(green)
@blue = Integer(blue)
@alpha = Float(alpha)
end
def ==(other)
return true if equal?(other)
return false unless other.is_a?(self.class)
[red, green, blue, alpha] == [other.red, other.green, other.blue, other.alpha]
end
alias_method :eql?, :==
def hash
[red, green, blue, alpha].hash ^ self.class.hash
end
def rgb
"rgb(#{red}, #{green}, #{blue})"
end
def rgba
a = alpha == 1 ? '1' : alpha
"rgba(#{red}, #{green}, #{blue}, #{a})"
end
def hex
format '#%02x%02x%02x', red, green, blue
end
end # Color
end # Support
end # WebDriver
end # Selenium
|
import subprocess
def install_jupyter_kernel(kernel_name, sys_prefix):
subprocess.run(['jupyter', 'kernelspec', 'install', kernel_name, '--sys-prefix'], check=True)
def launch_jupyter_notebook(notebook_dir, token):
subprocess.run(['jupyter', 'notebook', f'--notebook-dir={notebook_dir}', f'--NotebookApp.token={token}'], check=True)
def main():
# Install python-tbb kernel
install_jupyter_kernel('python-tbb', True)
# Install python-smp kernel
install_jupyter_kernel('python-smp', True)
# Launch Jupyter notebook
launch_jupyter_notebook('.', 'Default')
if __name__ == "__main__":
main()
|
## ARGV and ARGF
ruby -e 'puts ARGV' f[1-3].txt greeting.txt
ruby -ne 'puts "#{ARGV.size}: " + ARGV * ","' f[12].txt table.txt
ruby -ne 'puts "--- #{ARGF.filename} ---" if $. == 1;
print;
ARGF.close if ARGF.eof' greeting.txt table.txt
ruby -ne 'print if ARGF.eof' greeting.txt table.txt
ruby -ne '(print; ARGF.close) if $.==2' greeting.txt table.txt
ruby -pe 'ARGF.close if $.>=1' greeting.txt table.txt
ruby -e 'puts gets' greeting.txt
ruby -e 'puts gets, "---", ARGF.read' greeting.txt
ruby -e 'puts readlines' greeting.txt
ruby -e 'puts ARGF.readchar' greeting.txt
## STDIN
printf 'apple\nmango\n' | ruby -e 'puts readline'
printf 'apple\nmango\n' | ruby -e 'puts readline' greeting.txt
printf 'apple\nmango\n' | ruby -e 'puts STDIN.readline' greeting.txt
## Skipping remaining contents per file
ruby -ne '(puts ARGF.filename; ARGF.close) if /I/' f[1-3].txt greeting.txt
ruby -ne '$m1=true if /e\b/; $m2=true if /[bm]at/i;
(puts ARGF.filename; $m1=$m2=false; ARGF.close; next) if $m1 && $m2;
$m1=$m2=false if ARGF.eof' f[1-3].txt greeting.txt
|
package store;
public interface DataStore {
byte[] get(byte[] key);
void put(byte[] key, byte[] value);
void close();
}
|
#!/bin/bash
set -o nounset
set -o errexit
set -o pipefail
function read_shared_dir() {
local key="$1"
yq r "${SHARED_DIR}/cluster-config.yaml" "$key"
}
function populate_artifact_dir() {
set +e
echo "Copying log bundle..."
cp "${dir}"/log-bundle-*.tar.gz "${ARTIFACT_DIR}/" 2>/dev/null
echo "Removing REDACTED info from log..."
sed '
s/password: .*/password: REDACTED/;
s/X-Auth-Token.*/X-Auth-Token REDACTED/;
s/UserData:.*,/UserData: REDACTED,/;
' "${dir}/.openshift_install.log" > "${ARTIFACT_DIR}/.openshift_install.log"
}
function prepare_next_steps() {
set +e
echo "Setup phase finished, prepare env for next steps"
populate_artifact_dir
echo "Copying required artifacts to shared dir"
#Copy the auth artifacts to shared dir for the next steps
cp \
-t "${SHARED_DIR}" \
"${dir}/auth/kubeconfig" \
"${dir}/auth/kubeadmin-password" \
"${dir}/metadata.json"
}
trap 'prepare_next_steps' EXIT TERM
trap 'CHILDREN=$(jobs -p); if test -n "${CHILDREN}"; then kill ${CHILDREN} && wait; fi' TERM
if [[ -z "$OPENSHIFT_INSTALL_RELEASE_IMAGE_OVERRIDE" ]]; then
echo "OPENSHIFT_INSTALL_RELEASE_IMAGE_OVERRIDE is an empty string, exiting"
exit 1
fi
echo "Installing from release ${OPENSHIFT_INSTALL_RELEASE_IMAGE_OVERRIDE}"
export SSH_PRIV_KEY_PATH=${CLUSTER_PROFILE_DIR}/ssh-privatekey
export PULL_SECRET_PATH=${CLUSTER_PROFILE_DIR}/pull-secret
export OPENSHIFT_INSTALL_INVOKER=openshift-internal-ci/${JOB_NAME}/${BUILD_ID}
export HOME=/tmp
export KUBECONFIG=${HOME}/.kube/config
dir=/tmp/installer
mkdir "${dir}/"
cp "${SHARED_DIR}/install-config.yaml" "${dir}/"
# move private key to ~/.ssh/ so that installer can use it to gather logs on
# bootstrap failure
mkdir -p ~/.ssh
cp "${SSH_PRIV_KEY_PATH}" ~/.ssh/
# Increase log verbosity and ensure it gets saved
export TF_LOG=DEBUG
export TF_LOG_PATH=${ARTIFACT_DIR}/terraform.log
echo "$(date +%s)" > "${SHARED_DIR}/TEST_TIME_INSTALL_START"
echo "Creating manifest"
mock-nss.sh openshift-install create manifests --dir=${dir}
sed -i '/^ channel:/d' ${dir}/manifests/cvo-overrides.yaml
# Bump the libvirt masters memory to 16GB
export TF_VAR_libvirt_master_memory=${MASTER_MEMORY}
ls ${dir}/openshift
for ((i=0; i<${MASTER_REPLICAS}; i++))
do
yq write --inplace ${dir}/openshift/99_openshift-cluster-api_master-machines-${i}.yaml spec.providerSpec.value[domainMemory] ${MASTER_MEMORY}
yq write --inplace ${dir}/openshift/99_openshift-cluster-api_master-machines-${i}.yaml spec.providerSpec.value.volume[volumeSize] ${MASTER_DISK}
done
# Bump the libvirt workers memory to 16GB
yq write --inplace ${dir}/openshift/99_openshift-cluster-api_worker-machineset-0.yaml spec.template.spec.providerSpec.value[domainMemory] ${WORKER_MEMORY}
# Bump the libvirt workers disk to to 30GB
yq write --inplace ${dir}/openshift/99_openshift-cluster-api_worker-machineset-0.yaml spec.template.spec.providerSpec.value.volume[volumeSize] ${WORKER_DISK}
while IFS= read -r -d '' item
do
manifest="$( basename "${item}" )"
cp "${item}" "${dir}/manifests/${manifest##manifest_}"
done < <( find "${SHARED_DIR}" -name "manifest_*.yml" -print0)
echo "Installing cluster"
date "+%F %X" > "${SHARED_DIR}/CLUSTER_INSTALL_START_TIME"
mock-nss.sh openshift-install create cluster --dir="${dir}" --log-level=debug 2>&1 | grep --line-buffered -v 'password\|X-Auth-Token\|UserData:' &
openshift_install="$!"
# While openshift-install is running...
# Block for injecting DNS below release 4.8
# TO-DO Remove after 4.7 EOL
if [ "${BRANCH}" == "4.7" ] || [ "${BRANCH}" == "4.6" ]; then
REMOTE_LIBVIRT_URI=$(read_shared_dir 'REMOTE_LIBVIRT_URI')
CLUSTER_NAME=$(read_shared_dir 'CLUSTER_NAME')
i=0
while kill -0 $openshift_install 2> /dev/null; do
sleep 60
echo "Polling libvirt for network, attempt #$((++i))"
LIBVIRT_NETWORK=$(mock-nss.sh virsh --connect "${REMOTE_LIBVIRT_URI}" net-list --name | grep "${CLUSTER_NAME::21}" || true)
if [[ -n "${LIBVIRT_NETWORK}" ]]; then
echo "Libvirt network found. Injecting worker DNS records."
mock-nss.sh virsh --connect "${REMOTE_LIBVIRT_URI}" net-update --network "${LIBVIRT_NETWORK}" --command add-last --section dns-host --xml "$(< ${SHARED_DIR}/worker-hostrecords.xml)"
break
fi
done
fi
wait "${openshift_install}"
# Add a step to wait for installation to complete, in case the cluster takes longer to create than the default time of 30 minutes.
mock-nss.sh openshift-install --dir=${dir} --log-level=debug wait-for install-complete 2>&1 | grep --line-buffered -v 'password\|X-Auth-Token\|UserData:' &
wait "$!"
ret="$?"
echo "$(date +%s)" > "${SHARED_DIR}/TEST_TIME_INSTALL_END"
date "+%F %X" > "${SHARED_DIR}/CLUSTER_INSTALL_END_TIME"
exit "$ret"
|
class BaseModel:
item_type = None
computed_properties = []
def strip_computed_properties(self, data):
stripped_data = data.copy()
for prop in self.computed_properties:
if prop in stripped_data:
del stripped_data[prop]
return stripped_data
|
def add(x, y):
return x + y
def subtract(x, y):
return x - y
def multiply(x, y):
return x * y
def divide(x, y):
if y != 0:
return x / y
else:
return "Error! Division by zero."
def hello_world():
print("Hello, World!")
def main():
while True:
print("1. Addition")
print("2. Subtraction")
print("3. Multiplication")
print("4. Division")
print("5. Hello World")
print("6. Exit")
choice = input("Enter your choice: ")
if choice in ('1', '2', '3', '4'):
num1 = float(input("Enter first number: "))
num2 = float(input("Enter second number: "))
if choice == '1':
print("Result:", add(num1, num2))
elif choice == '2':
print("Result:", subtract(num1, num2))
elif choice == '3':
print("Result:", multiply(num1, num2))
elif choice == '4':
print("Result:", divide(num1, num2))
elif choice == '5':
hello_world()
elif choice == '6':
break
else:
print("Invalid input. Please try again.")
if __name__ == "__main__":
main()
|
TERMUX_PKG_HOMEPAGE=https://github.com/smxi/inxi
TERMUX_PKG_DESCRIPTION="Full featured CLI system information tool"
TERMUX_PKG_LICENSE="GPL-3.0"
TERMUX_PKG_MAINTAINER="@termux"
TERMUX_PKG_VERSION=3.3.00-1
TERMUX_PKG_SRCURL=https://github.com/smxi/inxi/archive/${TERMUX_PKG_VERSION}.tar.gz
TERMUX_PKG_SHA256=1180dd8dc71690322ec28f0ee7d9e013adea67bc94298742cfc5a2b4eefff6b9
TERMUX_PKG_DEPENDS="perl"
TERMUX_PKG_BUILD_IN_SRC=true
termux_step_make_install() {
install -Dm700 -t $TERMUX_PREFIX/bin/ inxi
install -Dm600 -t $TERMUX_PREFIX/share/man/man1/ inxi.1
}
|
<gh_stars>1-10
#include "volume/dsp_volume_agmu.h"
#include <QtCore/QVarLengthArray>
#include <QtCore/qmath.h>
#include "volume/dsp_helpers.h"
#include "volume/db.h"
#include "core/ts_logging_qt.h"
DspVolumeAGMU::DspVolumeAGMU(QObject *parent)
{
setParent(parent);
}
// Funcs
void DspVolumeAGMU::process(int16_t* samples, int32_t sample_count, int32_t channels)
{
sample_count = sample_count * channels;
auto peak = getPeak(samples, sample_count);
peak = qMax(m_peak, peak);
if (peak != m_peak)
{
m_peak = peak;
setGainDesired(computeGainDesired());
}
setGainCurrent(GetFadeStep(sample_count));
doProcess(samples, sample_count);
}
// Compute gain change
float DspVolumeAGMU::GetFadeStep(int sampleCount)
{
auto current_gain = getGainCurrent();
auto desired_gain = getGainDesired();
if (current_gain != desired_gain)
{
float fade_step_down = (kRateQuieter / m_sampleRate) * sampleCount;
float fade_step_up = (kRateLouder / m_sampleRate) * sampleCount;
if (current_gain < desired_gain - fade_step_up)
current_gain += fade_step_up;
else if (current_gain > desired_gain + fade_step_down)
current_gain -= fade_step_down;
else
current_gain = desired_gain;
}
return current_gain;
}
int16_t DspVolumeAGMU::GetPeak() const
{
return m_peak;
}
void DspVolumeAGMU::setPeak(int16_t val)
{
m_peak = val;
}
float DspVolumeAGMU::computeGainDesired()
{
return qMin((lin2db(32768.f / m_peak)) -2, 12.0f); // leave some headroom
}
|
from flask import Flask, jsonify
app = Flask(__name__)
@app.route("/add", methods=["GET"])
def add_numbers():
a = request.args.get("a")
b = request.args.get("b")
result = int(a) + int(b)
return jsonify(result)
|
#!/bin/bash
# create subclasses for Milli, Micro, Nano, Pico from the Femto subclass
# The Units subclass should be done manually, because it has some minor optimizations
# replacement method. It expects 3 parameters: The class prefix, the number of decimals, and the unit
createFromFemtos() {
folder=src/main/java/de/jpaw/fixedpoint/types
cat $folder/FemtoUnits.java | sed -e s/Femto/$1/g | sed -e s/15/$2/g | sed -e s/1000000000000000L/$3/g > $folder/$1Units.java
}
createFromFemtos Milli 3 1000L
createFromFemtos Micro 6 1000000L
createFromFemtos Nano 9 1000000000L
createFromFemtos Pico 12 1000000000000L
|
cd make/libmp4base/linux_amd64
make
|
(function(){
var eModules = [],
eType = 'creator',
entities = getEntityNames(eType);
for(var i = 0; i < entities.length; i++){
var m = entities[i] + "." + eType + ".backend.calls";
eModules.push(m);
jQuery.sap.registerModulePath(m, registerPrefix + "/pcmapps/" + entities[i] + "/" + entities[i] + "Creator/backend.calls");
}
sap.ui.define([], function() {
var server;
module("pcmapps --> Creator backend.calls", {
setup: function() {
window.generalNameSpace = {
pathToDestination: "sap/fiori/pcmcpapps/",
appService: ""
};
},
teardown : function() {
delete window.generalNameSpace;
delete window.BackendCall;
if (server.restore){
server.restore();
}
if (jQuery.ajax.restore){
jQuery.ajax.restore();
}
}
});
test("test service url and ajax calls", function(){
for(var i = 0; i < eModules.length; i++){
delete BackendCall;
jQuery.sap.require(eModules[i]);
strictEqual(BackendCall.getCsrfHeaderValue("GET"), "Fetch");
window.csrfToken = "X" + i;
strictEqual(BackendCall.getCsrfHeaderValue("POST"), "X" + i);
strictEqual(BackendCall.getPrefixUrl(), window.generalNameSpace.pathToDestination);
sinon.stub(jQuery, "ajax");
BackendCall.doCall({
url: "something",
method: "GET",
data: "myData",
async: false,
contentType: "text"
});
ok(jQuery.ajax.calledWithMatch({
url: BackendCall.getPrefixUrl() + "something",
type: "GET",
data: "myData",
async: false,
contentType: "text"
}));
BackendCall.reExecuteWithNewToken({
url: "something",
async: false
});
ok(jQuery.ajax.calledWithMatch({
url: BackendCall.getPrefixUrl() + window.generalNameSpace.appService + "/$metadata",
headers: {
"x-csrf-token": "Fetch"
}
}));
jQuery.ajax.restore();
server = sinon.fakeServer.create();
var callback = sinon.spy();
server.respondWith([200, { "Content-Type": "text/html", "Content-Length": 2, "x-csrf-token": "<PASSWORD>" }, "OK"]);
BackendCall.doCall({
url: "something",
method: "GET",
data: "myData",
successCallback: callback,
async: false,
contentType: "text"
});
ok(callback.calledOnce);
server.restore();
}
});
});
}());
|
Let x, y, and z represent the number of pips on each die of the three dice. Then the number of possible combinations of three dice rolls is equal to:
C(x,y,z) = (x * y * z) + (x * y * (z - 1)) + (x * (y - 1) * z) + (x * (y - 1) * (z - 1)) + ((x - 1) * y * z) + ((x - 1) * y * (z - 1)) + ((x - 1) * (y - 1) * z) + ((x - 1) * (y - 1) * (z - 1)).
|
#!/bin/bash
find . -type f \( -iname \*.json \) -exec sed -i '' 's/\"owner\"/\"_owner\"/g' {} \;
find . -type f \( -iname \*.json \) -exec sed -i '' 's/\"modified\"/\"_modified\"/g' {} \;
find . -type f \( -iname \*.json \) -exec sed -i '' 's/\"modifier\"/\"_modifier\"/g' {} \;
find . -type f \( -iname \*.json \) -exec sed -i '' 's/\"editable\"/\"_editable\"/g' {} \;
find . -type f \( -iname \*.json \) -exec sed -i '' 's/\"created\"/\"_created\"/g' {} \;
find . -type f \( -iname \*.json \) -exec sed -i '' 's/\"creator\"/\"_creator\"/g' {} \;
find . -type f \( -iname \*.json \) -exec sed -i '' 's/\"sysmlid\"/\"sysmlId\"/g' {} \;
# Remove qualifiedName
find . -type f \( -iname \*.json \) -exec sed -i '' '/\"qualifiedName\"/d' {} \;
find . -type f \( -iname \*.json \) -exec sed -i '' '/\"qualifiedId\"/d' {} \;
find . -type f \( -iname \*.json \) -exec sed -i '' '/\"siteCharacterizationId\"/d' {} \;
|
echo "***************************"
echo "** Building jar ***********"
echo "***************************"
mvn -DskipTests clean install
|
#!/bin/bash
fw_depends php7 nginx composer
sed -i 's|localhost|'"${DBHOST}"'|g' index.php
sed -i 's|root .*/FrameworkBenchmarks/limonade|root '"${TROOT}"'|g' deploy/nginx.conf
sed -i 's|/usr/local/nginx/|'"${IROOT}"'/nginx/|g' deploy/nginx.conf
php-fpm --fpm-config $FWROOT/config/php-fpm.conf -g $TROOT/deploy/php-fpm.pid
nginx -c $TROOT/deploy/nginx.conf
|
#!/bin/bash
# Installs mailcatcher using RVM. RVM allows us to install all mailcatcher
# dependencies reliably.
mailcatcher_version="$(/usr/bin/env mailcatcher --version 2>&1 | grep 'mailcatcher ' | cut -d " " -f 2)"
if [[ -n "${mailcatcher_version}" ]]; then
pkg="Mailcatcher"
space_count="$(( 20 - ${#pkg}))" #11
pack_space_count="$(( 30 - ${#mailcatcher_version}))"
real_space="$(( ${space_count} + ${pack_space_count} + ${#mailcatcher_version}))"
printf " * $pkg %${real_space}.${#mailcatcher_version}s ${mailcatcher_version}\n"
else
echo " * Mailcatcher [not installed]"
/usr/bin/env rvm default@mailcatcher --create do gem install mailcatcher --no-rdoc --no-ri
/usr/bin/env rvm wrapper default@mailcatcher --no-prefix mailcatcher catchmail
fi
if [[ -f "/etc/init/mailcatcher.conf" ]]; then
echo " *" Mailcatcher upstart already configured.
else
rsync -rvzh "/srv/config/init/mailcatcher.conf" "/etc/init/mailcatcher.conf"
echo " * /srv/config/init/mailcatcher.conf -> /etc/init/mailcatcher.conf"
fi
if [[ -f "/etc/php5/mods-available/mailcatcher.ini" ]]; then
echo " * Mailcatcher php already configured."
else
rsync -rvzh "/srv/config/php5-config/mailcatcher.ini" "/etc/php5/mods-available/mailcatcher.ini"
echo " * /srv/config/php5-config/mailcatcher.ini -> /etc/php5/mods-available/mailcatcher.ini"
fi
|
def create_crawler(self, base_name, role_arn, s3_script_bucket, script_path, db_name, table_name, s3_bucket_dst):
# Assuming the necessary AWS SDK (boto3) is imported and configured
# Construct the unique crawler name based on the base name
crawler_name = f"{base_name}_crawler"
# Create the web crawler using the AWS Glue client
glue_client = boto3.client('glue')
response = glue_client.create_crawler(
Name=crawler_name,
Role=role_arn,
Targets={
'S3Targets': [
{
'Path': f"s3://{s3_script_bucket}/{script_path}",
'Exclusions': []
}
]
},
DatabaseName=db_name,
TablePrefix=table_name,
SchemaChangePolicy={
'UpdateBehavior': 'UPDATE_IN_DATABASE',
'DeleteBehavior': 'LOG'
}
)
# Log the response for debugging or monitoring purposes
self._log("create_crawler response: ", response)
return crawler_name
|
#include <iostream>
#include <algorithm>
#include <string>
int main() {
std::string arr[] = {"Hello", "World", "Cats", "Dogs"};
std::sort(arr, arr+4);
for (int i = 0; i < 4; i++)
std::cout << arr[i] << std::endl;
return 0;
}
/* Output:
Cats
Dogs
Hello
World
*/
|
<filename>src/utils/mp3.ts
import getMp3DurationBits from 'get-mp3-duration';
import { extractLast } from './funcs';
const SUPPORTED_FORMATS = ['mp3'];
const getTagsSize = (buffer: Buffer): number => {
/* eslint-disable no-bitwise, max-len */
// http://id3.org/d3v2.3.0
if (buffer[0] === 0x49 && buffer[1] === 0x44 && buffer[2] === 0x33) {
// ID3
const id3v2Flags = buffer[5] as number;
const footerSize = id3v2Flags & 0x10 ? 10 : 0;
// ID3 size encoding is crazy (7 bits in each of 4 bytes)
const z0 = buffer[6] as number;
const z1 = buffer[7] as number;
const z2 = buffer[8] as number;
const z3 = buffer[9] as number;
if ((z0 & 0x80) === 0 && (z1 & 0x80) === 0 && (z2 & 0x80) === 0 && (z3 & 0x80) === 0) {
const tagSize = (z0 & 0x7f) * 2097152 + (z1 & 0x7f) * 16384 + (z2 & 0x7f) * 128 + (z3 & 0x7f);
return 10 + tagSize + footerSize;
}
}
return 0;
};
// eslint-enable no-bitwise, max-len
const getDuration = (buffer: Buffer): number => getMp3DurationBits(buffer);
const isSupported = (file: string): boolean => {
const format = extractLast(file, '.')[1];
return SUPPORTED_FORMATS.includes(format);
};
export default { getDuration, getTagsSize, isSupported };
|
#include <iostream>
struct Data {
// Define the structure of the Data object
};
struct FrameGraphPassResources {
// Define the structure of the FrameGraphPassResources object
};
void BlurData(const Data &data, FrameGraphPassResources &resources, void *ctx) {
// Apply the blur effect to the input data using the provided resources and context
// Ensure that the blur effect is applied within the GPU zone and the named debug marker
// Example implementation:
// Start the named debug marker
NAMED_DEBUG_MARKER(name);
// Start the Tracy GPU zone for blur
TracyGpuZone("Blur");
// Apply the blur effect using the provided resources and context
// Example:
// Apply blur effect using resources and context
// End the Tracy GPU zone for blur
TracyGpuZoneEnd();
}
|
public class User {
private Long id;
private String firstName;
private String lastName;
private Integer age;
private String email;
public User(Long id, String firstName, String lastName, Integer age, String email) {
this.id = id;
this.firstName = firstName;
this.lastName = lastName;
this.age = age;
this.email = email;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public Integer getAge() {
return age;
}
public void setAge(Integer age) {
this.age = age;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
}
public interface UserService {
User createUser(String firstName, String lastName, Integer age, String email);
Optional<User> findUserById(Long id);
Optional<User> updateUser(Long id, String firstName, String lastName, Integer age, String email);
void deleteUser(Long id);
}
|
import os
def get_absolute_path(root_path: str, relative_path: str) -> str:
return os.path.abspath(os.path.join(root_path, relative_path))
|
<filename>use-cases/Synthetic/t37/m1.js
var _;
_ = ArrayBuffer.length;
_ = ArrayBuffer.name;
_ = ArrayBuffer.prototype;
_ = ArrayBuffer.isView;
|
#!/bin/bash
pushd ~/piexperiments
git pull
stonks/stonkinstallscripts.sh
popd
|
from datetime import datetime
import pytz
def convert_timezone(dt, target_timezone):
"""
Converts the given datetime object to the target time zone.
Args:
dt: A datetime object representing the original date and time.
target_timezone: A string representing the target time zone (e.g., 'America/New_York').
Returns:
A datetime object representing the converted date and time in the target time zone.
"""
# Set the source time zone to UTC
source_timezone = pytz.timezone('UTC')
dt = source_timezone.localize(dt)
# Convert to the target time zone
target_timezone = pytz.timezone(target_timezone)
converted_dt = dt.astimezone(target_timezone)
return converted_dt
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.