text stringlengths 1 1.05M |
|---|
<filename>baas-tests/src/test/scala/com/ing/baker/baas/recipe/ReserveItemsInstance.scala
package com.ing.baker.baas.recipe
import cats.effect.{IO, Timer}
import cats.implicits._
import com.ing.baker.baas.recipe.CheckoutFlowEvents.ReserveItemsOutput
import com.ing.baker.baas.recipe.CheckoutFlowIngredients.{Item, OrderId, ReservedItems}
import com.ing.baker.baas.recipe.CheckoutFlowInteractions.ReserveItems
import scala.concurrent.Future
import scala.concurrent.duration._
class ReserveItemsInstance(implicit timer: Timer[IO]) extends ReserveItems {
override def apply(orderId: OrderId, items: List[Item]): Future[ReserveItemsOutput] = {
IO.sleep(1.second)
.as(CheckoutFlowEvents.ItemsReserved(ReservedItems(items, Array.fill(1000)(Byte.MaxValue))))
.unsafeToFuture()
}
}
class FailingOnceReserveItemsInstance extends ReserveItems {
var times = 1;
override def apply(orderId: OrderId, items: List[Item]): Future[ReserveItemsOutput] =
if (times == 1) { times = times + 1; Future.failed(new RuntimeException("oups")) }
else Future.successful(CheckoutFlowEvents.ItemsReserved(ReservedItems(items, Array.fill(1000)(Byte.MaxValue))))
}
class FailingReserveItemsInstance extends ReserveItems {
override def apply(orderId: OrderId, items: List[Item]): Future[ReserveItemsOutput] =
Future.failed(new RuntimeException("oups"))
}
|
<gh_stars>1-10
package com.vxml.tag;
import org.w3c.dom.Node;
import com.vxml.core.browser.VxmlBrowser;
import com.vxml.core.browser.VxmlScriptEngine;
public class ElseifTag extends AbstractTag {
public ElseifTag(Node node) {
super(node);
}
@Override
public void startTag() {
}
@Override
public void execute() {
Boolean isIfCondition = isIfConditionTrue();
if (!isIfCondition) {
String cond = getAttribute("cond");
Boolean elseIfCondition = (Boolean) VxmlBrowser.getContext().executeScript(cond);
if (elseIfCondition != null && elseIfCondition) {
setSkipExecute(false);
// Just to skip else tag
VxmlBrowser.getContext().executeScript(VxmlScriptEngine.SCRIPT_EXECUTION_NAME_SPACE
+ ".ifCondition=true");
} else {
setSkipExecute(true);
}
} else {
setSkipExecute(true);
}
}
private Boolean isIfConditionTrue() {
return (Boolean) VxmlBrowser.getContext().executeScript(
"_vxmlExecutionContext.ifConditionLevel_" + ifConditionLevel);
}
@Override
public void endTag() {
}
}
|
//
// TTCollectionView.h
// TT
//
// Created by 张福润 on 2017/3/22.
// Copyright © 2017年 张福润. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "TTConst.h"
@class TTCollectionView;
@protocol TTCollectionViewDelegate <NSObject>
- (void)collectionViewRefresh:(TTCollectionView *)collectionView;
- (void)collectionViewLoadMore:(TTCollectionView *)collectionView;
@end
@interface TTCollectionView : UICollectionView
@property (nonatomic, weak) id<TTCollectionViewDelegate> loadDelegate;
@property (nonatomic, assign) float footHeight;
@property (nonatomic, readonly) RefreshViewState state;
- (void)setRefreshHeaderViewBottom:(float)bottom;
- (void)isHiddenHeaderRefreshView:(BOOL)isHidden;
- (void)isDisplayMoreView:(BOOL)isDisplay;
- (void)didFinishedLoading;
- (void)startRefresh;
@end
|
//-----------------------------------------------------------------------------
// File: DxInstall.cpp
//
// Desc: Example code showing how to use DirectXSetup.
//
// This file contains code that will handle all messages sent to the
// DirectXSetupCallbackFunction, with the filtering level set at what the
// user wants. This way you can test to see which messages you want to
// handle automatically or pass on to the user.
//
// Call Tree:
// DirectXInstallWndProc See WINCODE.CPP
// DirectXInstall Set up the callback and handle return codes
// GetReply See WINCODE.CPP
// DirectXGetVersion Display the results of DirectXSetupGetVersion()
// DirectXSetupCallbackFunction Called from DirectXSetup
// GetReply See WINCODE.CPP
// SetButtons See WINCODE.CPP
// ShowButton See WINCODE.CPP
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//-----------------------------------------------------------------------------
#include <windows.h>
#pragma warning( disable : 4996 ) // disable deprecated warning
#include <strsafe.h>
#pragma warning( default : 4996 )
#include <stdio.h>
#include <string.h>
#include <dsetup.h>
#include "resource.h"
#include "DXInstall.h"
//-----------------------------------------------------------------------------
// Externs for global variables
//-----------------------------------------------------------------------------
extern DWORD g_dwStatus; // Filter setting for messages from DirectXSetup
extern HINSTANCE g_hInstance; // Global instance handle
extern HWND g_hDlg; // Window handle to dialog proc
extern WCHAR g_strAppTitle[256]; // Application title
extern INT g_iReply; // Global value for dialog return
//--------------------------------------------------------------------------------------
// DirectSetup dynamic linking support -- calls top-level dsetup.dll APIs with graceful
// failure if APIs are not present.
//--------------------------------------------------------------------------------------
typedef INT ( WINAPI* LPDIRECTXSETUPSETCALLBACK )( DSETUP_CALLBACK Callback );
typedef INT ( WINAPI* LPDIRECTXSETUPGETVERSION )( DWORD* lpdwVersion, DWORD* lpdwMinorVersion );
#ifdef UNICODE
typedef INT (WINAPI * LPDIRECTXSETUP)( HWND hWnd, LPWSTR lpszRootPath, DWORD dwFlags );
#else
typedef INT ( WINAPI* LPDIRECTXSETUP )( HWND hWnd, LPSTR lpszRootPath, DWORD dwFlags );
#endif
// Module and function pointers
static HMODULE s_hModDSetup = NULL;
static LPDIRECTXSETUPSETCALLBACK s_DirectXSetupSetCallback = NULL;
static LPDIRECTXSETUPGETVERSION s_DirectXSetupGetVersion = NULL;
static LPDIRECTXSETUP s_DirectXSetup = NULL;
//-----------------------------------------------------------------------------
// Ensure function pointers are initialized
//-----------------------------------------------------------------------------
bool InitDirectSetupAPIs()
{
// If module is non-NULL, this function has already been called. Note
// that this doesn't guarantee that all D3D9 procaddresses were found.
if( s_hModDSetup != NULL )
return true;
// This may fail if DirectX 9 isn't installed
s_hModDSetup = LoadLibrary( TEXT( "dsetup.dll" ) );
if( s_hModDSetup == NULL )
return false;
s_DirectXSetupSetCallback = ( LPDIRECTXSETUPSETCALLBACK )GetProcAddress( s_hModDSetup, "DirectXSetupSetCallback" );
s_DirectXSetupGetVersion = ( LPDIRECTXSETUPGETVERSION )GetProcAddress( s_hModDSetup, "DirectXSetupGetVersion" );
#ifdef UNICODE
s_DirectXSetup = (LPDIRECTXSETUP)GetProcAddress( s_hModDSetup, "DirectXSetupW" );
#else
s_DirectXSetup = ( LPDIRECTXSETUP )GetProcAddress( s_hModDSetup, "DirectXSetupA" );
#endif
if( s_DirectXSetupGetVersion == NULL ||
s_DirectXSetupSetCallback == NULL ||
s_DirectXSetup == NULL )
return false;
return true;
}
//-----------------------------------------------------------------------------
// Name: GetReply()
// Desc: Waits for the user to click on a button on our simulated message box
// See DlgProc for the code that sets g_wReply
//-----------------------------------------------------------------------------
DWORD GetReply( DWORD dwMsgType )
{
DWORD dwDefaultButton = 0;
// Wait until DlgProc() lets us know that the user clicked on a button
while( g_iReply == -1 )
{
MSG msg;
// Forward my messages...
while( PeekMessage( &msg, NULL, 0, 0, PM_REMOVE ) )
{
if( msg.message == WM_QUIT ||
msg.message == WM_CLOSE ||
msg.message == WM_SYSCOMMAND ||
msg.message == WM_DESTROY )
{
// Put the message back on the queue and get out of here.
PostMessage( msg.hwnd, msg.message, msg.wParam, msg.lParam );
break;
}
if( !IsDialogMessage( msg.hwnd, &msg ) )
{
TranslateMessage( &msg );
DispatchMessage( &msg );
}
}
}
// Return the proper ID value for the button the user clicked on
// This code simulates what MessageBox() would return
switch( dwMsgType & 0x0000000F )
{
case MB_OKCANCEL:
dwDefaultButton = ( g_iReply == IDBUT1 ) ? IDOK : IDCANCEL;
break;
case MB_OK:
dwDefaultButton = IDOK;
break;
case MB_RETRYCANCEL:
dwDefaultButton = ( g_iReply == IDBUT1 ) ? IDRETRY : IDCANCEL;
break;
case MB_ABORTRETRYIGNORE:
if( g_iReply == IDBUT1 )
dwDefaultButton = IDABORT;
else if( g_iReply == IDBUT2 )
dwDefaultButton = IDRETRY;
else
dwDefaultButton = IDIGNORE;
break;
case MB_YESNOCANCEL:
if( g_iReply == IDBUT1 )
dwDefaultButton = IDYES;
else if( g_iReply == IDBUT2 )
dwDefaultButton = IDNO;
else
dwDefaultButton = IDCANCEL;
break;
case MB_YESNO:
dwDefaultButton = ( g_iReply == IDBUT1 ) ? IDYES : IDNO;
break;
default:
dwDefaultButton = IDOK;
}
g_iReply = -1;
return dwDefaultButton;
}
//-----------------------------------------------------------------------------
// Name: DirectXSetupCallbackFunction()
// Desc: Handle each reason for why the callback was called, filtering each
// message by what the current state of g_fStatus is.
//-----------------------------------------------------------------------------
DWORD WINAPI DirectXSetupCallbackFunction( DWORD dwReason, DWORD dwMsgType,
LPWSTR strMessage, LPWSTR strName,
VOID* pInfo )
{
if( strMessage == NULL && strName == NULL )
return IDOK;
if( dwReason == DSETUP_CB_MSG_PROGRESS )
{
// Add code to update progress meter if there was one
return IDOK;
}
SetButtons( g_hDlg, 0xffffffff );
if( g_dwStatus == SHOW_ALL )
{
// Show all messages from DirectSetup
SetWindowText( GetDlgItem( g_hDlg, ID_MYMESSAGE ), strMessage );
// When dwMsgType is equal to zero we can display status information but
// should not wait for input from the user.
if( dwMsgType == 0 )
{
Sleep( 1000 );
return IDOK;
}
SetButtons( g_hDlg, dwMsgType );
}
else if( g_dwStatus == SHOW_UPGRADES )
{
// Show only upgrade messages
switch( dwReason )
{
case DSETUP_CB_MSG_INTERNAL_ERROR:
SetWindowText( GetDlgItem( g_hDlg, ID_MYMESSAGE ), strMessage );
SetButtons( g_hDlg, dwMsgType );
break;
default:
return IDOK;
}
}
else if( g_dwStatus == SHOW_PROBLEMS )
{
// Show only problem messages
switch( dwReason )
{
case DSETUP_CB_MSG_INTERNAL_ERROR:
SetWindowText( GetDlgItem( g_hDlg, ID_MYMESSAGE ), strMessage );
SetButtons( g_hDlg, dwMsgType );
break;
default:
return IDOK;
}
}
else if( g_dwStatus == SHOW_NONE )
{
// Don't show any messages
return IDOK;
}
return GetReply( dwMsgType );
}
//-----------------------------------------------------------------------------
// Name: DirectXGetVersion( HWND hWnd )
// Desc: Shows the results of a call to DirectXSetupGetVersion()
//-----------------------------------------------------------------------------
VOID DirectXGetVersion( HWND hWnd )
{
DWORD dwVersion;
DWORD dwRevision;
if( InitDirectSetupAPIs() == false )
{
MessageBox( hWnd,
L"dsetup.dll could not be found. In order to use the DirectSetup API, copy the entire contents of the \\Redist folder from the DirectX SDK into the same folder as this sample. See the documenation and source for further details.", L"Error", MB_OK | MB_ICONINFORMATION );
return;
}
s_DirectXSetupGetVersion( &dwVersion, &dwRevision );
// Use HIWORD(dwVersion); to get the DirectX major version
// Use LOWORD(dwVersion); to get the DirectX minor version
// For example: for DirectX 5 dwVersion == 0x00040005
WCHAR strBuf[128];
StringCchPrintf( strBuf, 128, L"Version 0x%08lX\nRevision %ld", dwVersion, dwRevision );
MessageBox( hWnd, strBuf, L"Results:", MB_OK | MB_ICONINFORMATION );
}
//-----------------------------------------------------------------------------
// Name: EnableReboot()
// Desc: Enables reboot functionality on Win2k+
//-----------------------------------------------------------------------------
VOID EnableReboot( VOID )
{
HANDLE hProcess = GetCurrentProcess();
HANDLE hToken;
if( OpenProcessToken( hProcess, TOKEN_ALL_ACCESS, &hToken ) )
{
LUID luidShutDown;
if( LookupPrivilegeValue( NULL, SE_SHUTDOWN_NAME, &luidShutDown ) )
{
TOKEN_PRIVILEGES Privilege;
Privilege.PrivilegeCount = 1;
Privilege.Privileges[0].Luid = luidShutDown;
Privilege.Privileges[0].Attributes = SE_PRIVILEGE_ENABLED;
AdjustTokenPrivileges( hToken, FALSE, &Privilege, 0, NULL, NULL );
}
CloseHandle( hToken );
}
}
//-----------------------------------------------------------------------------
// Name: DirectXInstall()
// Desc: Set up the callback function for DirectXSetup and handle the return
// results. This function starts a modeless version of MessageBox() so
// that the user can see the progress of the DirectX installation.
//-----------------------------------------------------------------------------
BOOL DirectXInstall( HWND hWnd )
{
WCHAR strSource[MAX_PATH] = {0};
WCHAR* strLastSlash = NULL;
WCHAR string[256];
INT iRetCode;
DWORD dwFlags;
if( InitDirectSetupAPIs() == false )
{
MessageBox( hWnd,
L"dsetup.dll could not be found. In order to use the DirectSetup API, copy the entire contents of the \\Redist folder from the DirectX SDK into the same folder as this sample. See the documenation and source for further details.", L"Error", MB_OK | MB_ICONINFORMATION );
return false;
}
// The DSETUP DLLs should be at the current path, along with the DirectX
// redist directory so that it can be found and set up. Get the exe
// name, and exe path
GetModuleFileName( NULL, strSource, MAX_PATH );
strSource[MAX_PATH - 1] = 0;
strLastSlash = wcsrchr( strSource, L'\\' );
if( strLastSlash ) // Chop the exe name from the exe path
*strLastSlash = 0;
// If the user wants any messages, bring up the simulated MessageBox
// dialog
if( g_dwStatus != SHOW_NONE )
{
// Create a modeless dialog box so we can show messages that don't
// need user input
g_hDlg = CreateDialog( g_hInstance, L"INSTDX", hWnd, ( DLGPROC )DlgProc );
if( g_hDlg == NULL )
{
WCHAR buf[200];
LoadString( g_hInstance, STR_NODIALOG, buf, 200 );
MessageBox( hWnd, string, g_strAppTitle, MB_ICONSTOP | MB_OK );
return FALSE;
}
ShowWindow( GetDlgItem( g_hDlg, IDBUT1 ), SW_HIDE );
ShowWindow( GetDlgItem( g_hDlg, IDBUT2 ), SW_HIDE );
ShowWindow( GetDlgItem( g_hDlg, IDBUT3 ), SW_HIDE );
ShowWindow( g_hDlg, SW_NORMAL );
}
// Set the callback function up before calling DirectXSetup
s_DirectXSetupSetCallback( ( DSETUP_CALLBACK )DirectXSetupCallbackFunction );
if( g_dwStatus != SHOW_NONE )
{
LoadString( g_hInstance, STR_STARTSETUP, string, 256 );
SetWindowText( GetDlgItem( g_hDlg, ID_MYMESSAGE ), string );
SetButtons( g_hDlg, 0xffffffff );
Sleep( 1000 );
}
dwFlags = DSETUP_DIRECTX;
//-------------------------------------------------------------------------
// Notes:
//
// 1) Test Install
// If you just want to test the install process without
// actually installing then add the DSETUP_TESTINSTALL flag like so:
// dwFlags |= DSETUP_TESTINSTALL;
//
// 2) Managed DirectX
// To install Managed DirectX you must add the DSETUP_MANAGEDDX flag like so:
// dwFlags |= DSETUP_MANAGEDDX
//
// Managed DirectX requires the .NET Framework to be installed before
// installing DirectX. DirectX does NOT install the .NET Framework.
//-------------------------------------------------------------------------
iRetCode = s_DirectXSetup( hWnd, strSource, dwFlags );
// If the user didn't want any message, we now need to bring up the dialog
// to reflect the return message from DirectXSetup
if( g_dwStatus == SHOW_NONE )
{
g_hDlg = CreateDialog( g_hInstance, L"INSTDX", hWnd, ( DLGPROC )DlgProc );
if( g_hDlg == NULL )
{
WCHAR buf[200];
LoadString( g_hInstance, STR_NODIALOG, buf, 200 );
MessageBox( hWnd, string, g_strAppTitle, MB_ICONSTOP | MB_OK );
return FALSE;
}
ShowWindow( GetDlgItem( g_hDlg, IDBUT1 ), SW_HIDE );
ShowWindow( GetDlgItem( g_hDlg, IDBUT2 ), SW_HIDE );
ShowWindow( GetDlgItem( g_hDlg, IDBUT3 ), SW_HIDE );
ShowWindow( g_hDlg, SW_NORMAL );
}
switch( iRetCode )
{
// Since our MessageBox dialog is still up, display the results in it
case DSETUPERR_NEWERVERSION:
MessageBox( hWnd, L"Installation is newer than one being installed",
L"DirectX Setup", MB_OK );
break;
case DSETUPERR_SUCCESS_RESTART:
LoadString( g_hInstance, STR_RESTART, string, 256 );
SetWindowText( GetDlgItem( g_hDlg, ID_MYMESSAGE ), string );
SetButtons( g_hDlg, 0xffffffff );
SetButtons( g_hDlg, MB_YESNO );
if( GetReply( MB_YESNO ) == IDYES )
{
// Restart Windows
EnableReboot();
InitiateSystemShutdownEx( NULL,
NULL,
0,
FALSE,
TRUE,
SHTDN_REASON_MAJOR_OPERATINGSYSTEM |
SHTDN_REASON_MINOR_UPGRADE |
SHTDN_REASON_FLAG_PLANNED );
}
break;
case DSETUPERR_SUCCESS:
LoadString( g_hInstance, STR_SUCCESS, string, 256 );
SetWindowText( GetDlgItem( g_hDlg, ID_MYMESSAGE ), string );
SetButtons( g_hDlg, 0xffffffff );
SetButtons( g_hDlg, MB_OK );
GetReply( MB_OK );
break;
case DSETUPERR_BADWINDOWSVERSION:
case DSETUPERR_SOURCEFILENOTFOUND:
case DSETUPERR_NOCOPY:
case DSETUPERR_OUTOFDISKSPACE:
case DSETUPERR_CANTFINDINF:
case DSETUPERR_CANTFINDDIR:
case DSETUPERR_INTERNAL:
case DSETUPERR_UNKNOWNOS:
LoadString( g_hInstance, STR_ERRORRETURN + ( iRetCode * -1 ) - 1, string, 256 );
SetWindowText( GetDlgItem( g_hDlg, ID_MYMESSAGE ), string );
SetButtons( g_hDlg, 0xffffffff );
SetButtons( g_hDlg, MB_OK );
GetReply( MB_OK );
break;
}
DestroyWindow( g_hDlg );
g_hDlg = NULL;
return TRUE;
}
|
def largest_connected_component_size(grid):
def dfs(row, col, label):
if row < 0 or col < 0 or row >= len(grid) or col >= len(grid[0]) or grid[row][col] != label:
return 0
size = 1
grid[row][col] = -1 # Mark cell as visited
size += dfs(row + 1, col, label)
size += dfs(row - 1, col, label)
size += dfs(row, col + 1, label)
size += dfs(row, col - 1, label)
return size
max_size = 0
for row in range(len(grid)):
for col in range(len(grid[0])):
if grid[row][col] > 0:
label = grid[row][col]
component_size = dfs(row, col, label)
max_size = max(max_size, component_size)
return max_size |
package com.digirati.taxman.rest.server;
import com.digirati.taxman.common.rdf.RdfModelFactory;
import com.digirati.taxman.common.rdf.RdfModelFormat;
import com.digirati.taxman.common.rdf.io.RdfModelReader;
import com.digirati.taxman.common.rdf.io.RdfModelWriter;
import com.digirati.taxman.common.taxonomy.ConceptModel;
import com.digirati.taxman.common.taxonomy.ConceptSchemeModel;
import com.digirati.taxman.rest.server.infrastructure.media.writer.TypedRdfModelMessageBodyWriter;
import com.github.jsonldjava.core.JsonLdOptions;
import com.google.common.io.Resources;
import org.apache.jena.rdf.model.Literal;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.rdf.model.Statement;
import org.apache.jena.riot.JsonLDWriteContext;
import org.apache.jena.sparql.util.Context;
import org.apache.jena.vocabulary.SKOS;
import org.json.JSONObject;
import java.io.FileOutputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.List;
public class PreferredLabelExtractor {
private static List<String> SOURCE_LIST = List.of(
"https://crutaxonomy.poolparty.biz/CRUvocabularies/b4248b52-46f4-44fc-8e6e-a039f65eb8b4",
"https://crutaxonomy.poolparty.biz/CRUvocabularies/20215cff-2f17-4a23-b75a-82dd807eef8f",
"https://crutaxonomy.poolparty.biz/CRUvocabularies/e165d861-3ba8-4980-807b-75037e4b5f13",
"https://crutaxonomy.poolparty.biz/CRUvocabularies/772c721d-265b-49c2-ad88-cf74d2d51220"
);
public static void main(String[] argv) throws Exception {
Model model = ModelFactory.createDefaultModel();
model.read(Files.newInputStream(Paths.get("skos.xml")), null, "RDFXML");
RdfModelFactory modelFactory = new RdfModelFactory();
RdfModelReader reader = new RdfModelReader(modelFactory);
RdfModelWriter writer = new RdfModelWriter();
List<ConceptModel> concepts = reader.readAll(ConceptModel.class, RdfModelFormat.RDFXML, Files.newInputStream(Paths.get("skos.xml")));
for (int i = 0; i < concepts.size(); i++) {
ConceptModel concept = concepts.get(i);
if (SOURCE_LIST.contains(concept.getUri().toString())) {
var frameUrl = TypedRdfModelMessageBodyWriter.class.getClassLoader().getResource("jsonld/framing/concept.json");
var frameString = Resources.toString(frameUrl, StandardCharsets.UTF_8);
JSONObject frameObject = new JSONObject(frameString);
frameObject.put("@id", concept.getUri().toASCIIString());
var jsonLdOptions = new JsonLdOptions();
jsonLdOptions.setCompactArrays(true);
jsonLdOptions.setOmitGraph(true);
jsonLdOptions.setOmitDefault(false);
jsonLdOptions.setUseNativeTypes(true);
jsonLdOptions.useNamespaces = true;
var jsonLdContext = new JsonLDWriteContext();
jsonLdContext.setFrame(frameObject.toString());
jsonLdContext.setOptions(jsonLdOptions);
try (var fos = new FileOutputStream("orphan-" + i + ".json")) {
writer.write(concept, RdfModelFormat.JSON_LD_FRAMED, fos, jsonLdContext);
}
}
}
}
}
|
#!/bin/bash -e
SOURCE=$1
# modify docker/local-universe/Makefile
sed -e "s/universe.service.consul/$SOURCE/g" -e "s/local-consul-universe.tar/local-universe.tar/g" -i docker/local-universe/Makefile
# modify scripts/local-universe.py
sed "s/universe.service.consul/$SOURCE/g" -i scripts/local-universe.py
|
<reponame>froala/svelte-froalacharts
const code =
`<script>
import FroalaCharts from 'froalacharts';
import SvelteFC, { fcRoot } from 'svelte-froalacharts';
import { sampleNames } from '../utils/constants.js';
import dataSource from './data.js';
fcRoot(FroalaCharts);
let chartObj,
chartConfig = {
id: 'column-chart',
type: 'pie',
width: '600',
height: '400',
renderAt: 'chart-container',
dataSource
};
const updateDataHandler = (arg, val) => {
chartObj.setChartAttribute(arg, val);
};
</script>`,
html =
`<div id="chart-container" >
<SvelteFC {...chartConfig} bind:chart={chartObj} />
</div>
<div style="display: flex; position: absolute; bottom: 15px;">
<div id="select-text">Choose a theme:</div>
<div class="change-type">
<div id="radio1">
<input
name="theme-selecter"
id="radioButton1"
type="radio"
on:change={() => {
updateDataHandler('theme', 'froala');
}}
checked="checked"
>
<label for="radioButton1">Froala</label>
</div>
<div id="radio2">
<input
name="theme-selecter"
id="radioButton2"
type="radio"
on:change={() => {
updateDataHandler('theme', 'gammel');
}}
>
<label for="radioButton2">Gammel</label>
</div>
<div id="radio3">
<input
name="theme-selecter"
id="radioButton3"
type="radio"
on:change={() => {
updateDataHandler('theme', 'candy');
}}
>
<label for="radioButton3">Candy</label>
</div>
<div id="radio4">
<input
name="theme-selecter"
id="radioButton4"
type="radio"
on:change={() => {
updateDataHandler('theme', 'zune');
}}
>
<label for="radioButton4">Zune</label>
</div>
<div id="radio5">
<input
name="theme-selecter"
id="radioButton5"
type="radio"
on:change={() => {
updateDataHandler('theme', 'ocean');
}}
>
<label for="radioButton5">Ocean</label>
</div>
<div id="radio6">
<input
name="theme-selecter"
id="radioButton6"
type="radio"
on:change={() => {
updateDataHandler('theme', 'carbon');
}}
>
<label for="radioButton6">Carbon</label>
</div>
</div>
</div>`,
data =
`export default {
"chart": {
"caption": "Countries With Most Oil Reserves [2017-18]",
"subCaption": "In MMbbl = One Million barrels",
"xAxisName": "Country",
"yAxisName": "Reserves (MMbbl)",
"numberSuffix": "K",
"theme": "froala",
"updateAnimduration": "0.4"
},
"data": [{
"label": "Venezuela",
"value": "290"
}, {
"label": "Saudi",
"value": "260"
}, {
"label": "Canada",
"value": "180"
}, {
"label": "Iran",
"value": "140"
}, {
"label": "Russia",
"value": "115"
}, {
"label": "UAE",
"value": "100"
}, {
"label": "US",
"value": "30"
}, {
"label": "China",
"value": "30"
}]
}`;
export default {
code,
html,
data
};
|
#!/usr/bin/env bash
# based on https://github.com/mongodb/mongo-tools
# the current directory
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# if an error exit right away, don't continue the build
set -e
# some info
echo
echo "Works like command: sudo ./scripts/build-tools.sh r4.3.2"
echo
# check if we are root
if [[ $EUID -ne 0 ]]; then
echo "This script must be ran via root 'sudo' command or using in 'sudo -i'."
exit 1
fi
# require mongo release
if [ -z "${1}" ]; then
echo "The first argument must be the MONGODB_RELEASE for example 'r4.3.2'"
exit 1
fi
MONGODB_RELEASE="${1}"
## delete all mongo other programs, we self compile
##apt remove --purge mongo*
## the required packages for debian
##apt -y install gcc python scons git glibc-source libssl-dev python-pip
apt -y install golang libpcap-dev
export GOROOT=$(go env GOROOT)
# generate build directory variable
BUILD=$DIR/../build/src/github.com/mongodb/
# delete previous build directory
rm -rf $BUILD/mongo-tools
# generate new build directory
mkdir -p $BUILD
# find out how many cores we have and we use that many
CORES=$(grep -c ^processor /proc/cpuinfo)
# go to the build directory
pushd $BUILD
# clone the mongo by branch
git clone https://github.com/mongodb/mongo-tools
# the mongo directory is a variables
MONGO_TOOLS=$BUILD/mongo-tools
# go to the mongo directory
pushd $MONGO_TOOLS
# checkout the mongo release
git checkout tags/${MONGODB_RELEASE}
bash ./build.sh
chown root:adm -R ./bin
chmod o-rwx -R ./bin
chmod ug+rx ./bin/*
cp -r ./bin/. /usr/bin
# for PROGRAM in bsondump mongodump mongoexport mongofiles mongoimport mongoreplay mongorestore mongostat mongotop
# do
# go build -o bin/${PROGRAM} -tags "ssl sasl" ${PROGRAM}/main/${PROGRAM}.go
# done
# exit of the mongo directory
popd
# exit the build directory
popd
# delete current build directory
rm -rf $BUILD/mongo-tools
|
<reponame>NetX-lab/RepNet-Experiments
#!/usr/bin/env python
# Numpy is a library for handling arrays (like data points)
import numpy as np
import math
# Pyplot is a module within the matplotlib library for plotting
import matplotlib.pylab as plt
import matplotlib
font = {'family' : 'sans',
#'weight' : 'normal',
'size' : 16}
matplotlib.rc('font', **font)
def getpt(data, percentile):
return np.percentile(data, percentile)
synfile = "time-syn.dat"
repfile = "time-rep.dat"
sinfile = "time-sin.dat"
sin = np.loadtxt(sinfile)
rep = np.loadtxt(repfile)
syn = np.loadtxt(synfile)
len1 = len(sin)
len2 = len(rep)
len3 = len(syn)
x = np.logspace(-1, 1.999, num=50)
p1 = []
p2 = []
p3 = []
xlab = []
for i in x:
p1.append(getpt(sin, 100-i))
p2.append(getpt(rep, 100-i))
p3.append(getpt(syn, 100-i))
xlab.append(i)
plt.figure(figsize=(6,4))
plt.plot(p1, xlab, 'bx-', label="TCP", linewidth=2, markersize=3)
plt.plot(p2, xlab, 'y*-', label="RepFlow", linewidth=2, markersize=3)
plt.plot(p3, xlab, 'ro-', label="RepSYN", linewidth=2, markersize=3)
plt.legend(loc='lower right', fontsize='medium')
plt.yscale('log')
#plt.xscale('log')
plt.ylabel('(%)')
plt.xlabel('Sorting Time (ms)')
plt.yticks([0.1, 0.2, 0.5, 1, 2, 4, 8, 16, 32], [99.9, 99.8, 99.5, 99, 98, 96, 92, 84, 68])
plt.xticks([100, 300, 500, 700, 900, 1100, 1300], [100, 300, 500, 700, 900, 1100, 1300])
plt.xlim([94, 1300])
# plt.axis('tight')
plt.ylim([64, 0.1])
plt.tight_layout(rect=(0,0,1,1))
plt.grid()
plt.savefig('./sorting-CDF.pdf', format='pdf')
plt.show()
|
<gh_stars>1-10
/*
* Licensed to Cloudera, Inc. under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Cloudera, Inc. licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cloudera.hue.livy
import java.util.concurrent.ConcurrentHashMap
import scala.collection.JavaConverters._
object LivyConf {
val SESSION_FACTORY_KEY = "livy.server.session.factory"
val SPARK_SUBMIT_KEY = "livy.server.spark-submit"
sealed trait SessionKind
case class Process() extends SessionKind
case class Yarn() extends SessionKind
}
/**
*
* @param loadDefaults whether to also load values from the Java system properties
*/
class LivyConf(loadDefaults: Boolean) {
import LivyConf._
/**
* Create a LivyConf that loads defaults from the system properties and the classpath.
* @return
*/
def this() = this(true)
private val settings = new ConcurrentHashMap[String, String]
if (loadDefaults) {
for ((k, v) <- System.getProperties.asScala if k.startsWith("livy.")) {
settings.put(k, v)
}
}
/** Set a configuration variable */
def set(key: String, value: String): LivyConf = {
if (key == null) {
throw new NullPointerException("null key")
}
if (value == null) {
throw new NullPointerException("null key")
}
settings.put(key, value)
this
}
/** Set if a parameter is not already configured */
def setIfMissing(key: String, value: String): LivyConf = {
if (!settings.contains(key)) {
settings.put(key, value)
}
this
}
/** Get a configuration variable */
def get(key: String): String = getOption(key).getOrElse(throw new NoSuchElementException(key))
/** Get a configuration variable */
def get(key: String, default: String): String = getOption(key).getOrElse(default)
/** Get a parameter as an Option */
def getOption(key: String): Option[String] = Option(settings.get(key))
/** Get a parameter as an Int */
def getInt(key: String, default: Int) = getOption(key).map(_.toInt).getOrElse(default)
/** Return if the configuration includes this setting */
def contains(key: String): Boolean = settings.containsKey(key)
def sparkSubmit(): String = getOption(SPARK_SUBMIT_KEY).getOrElse("spark-submit")
def sessionKind(): SessionKind = getOption(SESSION_FACTORY_KEY).getOrElse("process") match {
case "process" => Process()
case "yarn" => Yarn()
case kind => throw new IllegalStateException(f"unknown kind $kind")
}
/** Return the filesystem root. Defaults to the local filesystem. */
def filesystemRoot(): String = sessionKind() match {
case Process() => "file://"
case Yarn() => "hdfs://"
}
}
|
<reponame>neuling/fso-livetest-chrome-extension
export const configurableKeyMatcher = () => {
return new RegExp('%([A-Z0-9_-]+)%', 'g');
};
export const interpolateConfiguration = (rule, configuration) => {
return rule.replace(configurableKeyMatcher(), (key) => {
return configuration[key.replace(/%/g, '')] || key;
});
};
|
export * from './tooGoodToGo';
|
import { Client, ClientUser, Message } from "discord.js";
export type MonitorOptions = {
ignoreBots: boolean;
ignoreSelf: boolean;
ignoreOthers: boolean;
ignoreWebhooks: boolean;
ignoreEdits: boolean;
};
export interface Monitor {
options: Readonly<MonitorOptions>;
run(message: Message): Promise<unknown>;
init(client: Client): void;
}
export abstract class MonitorBase implements Monitor {
constructor(public readonly options: Readonly<MonitorOptions>) {}
// eslint-disable-next-line @typescript-eslint/no-unused-vars
init(client: Client): void {
// do nothing
}
abstract run(message: Message): Promise<unknown>;
}
export class MonitorRunner {
constructor(private readonly monitors: Set<Monitor>) {}
edit(newMsg: Message): void {
const self = newMsg.client.user;
if (!self) {
return;
}
this.monitors.forEach((m) => this.mayRunMonitor(newMsg, m, self));
}
create(newMsg: Message): void {
const self = newMsg.client.user;
if (!self) {
return;
}
this.monitors.forEach((m) => this.mayRunMonitor(newMsg, m, self));
}
init(client: Client): void {
this.monitors.forEach((m) => {
try {
m.init(client);
} catch (e) {
console.log(e);
}
});
}
private mayRunMonitor(msg: Message, monitor: Monitor, self: ClientUser) {
const o = monitor.options;
if (o.ignoreBots && msg.author.bot) return;
if (o.ignoreSelf && msg.author === self) return;
if (o.ignoreOthers && msg.author !== self) return;
if (o.ignoreWebhooks && msg.webhookID != null) return;
if (o.ignoreEdits && (msg.editedTimestamp || msg.editedAt)) return;
try {
monitor.run(msg).catch(console.log);
} catch (e) {
console.log(e);
}
}
}
|
class Thermostat:
def __init__(self):
self._temperature = 70 # Default temperature
self._status = "OFF" # Default status
@property
def status(self):
return self._status
@status.setter
def status(self, val):
if val in ["OFF", "HEATING", "COOLING"]:
self._status = val
else:
raise ValueError("Invalid status value")
@property
def temperature(self):
return self._temperature
@temperature.setter
def temperature(self, val):
if 50 <= val <= 90:
self._temperature = val
if val < 60:
self._status = "HEATING"
elif val > 80:
self._status = "COOLING"
else:
self._status = "OFF"
else:
raise ValueError("Temperature out of range (50-90)")
# Example usage
thermostat = Thermostat()
print(thermostat.status) # Output: OFF
print(thermostat.temperature) # Output: 70
thermostat.temperature = 55
print(thermostat.status) # Output: HEATING
thermostat.temperature = 85
print(thermostat.status) # Output: COOLING |
<gh_stars>1-10
package future
import (
"context"
"errors"
"testing"
"time"
"github.com/stretchr/testify/assert"
)
func TestPromise(t *testing.T) {
type Input struct {
V interface{}
Err error
}
type Expect struct {
V interface{}
Err error
}
type Test struct {
Input Input
Expect Expect
}
tests := map[string]Test{
"success": {
Input: Input{
1,
nil,
},
Expect: Expect{
1,
nil,
},
},
"failure": {
Input: Input{
nil,
errors.New("error"),
},
Expect: Expect{
nil,
errors.New("error"),
},
},
"both": {
Input: Input{
1,
errors.New("error"),
},
Expect: Expect{
1,
errors.New("error"),
},
},
}
for title, test := range tests {
t.Run(title, func(t *testing.T) {
assert := assert.New(t)
p := NewPromise()
f := p.Future()
err := p.Complete(test.Input.V, test.Input.Err)
assert.NoError(err)
v, err := f.Wait(context.Background())
assert.Equal(test.Expect.V, v)
assert.Equal(test.Expect.Err, err)
})
}
t.Run("complete twice", func(t *testing.T) {
assert := assert.New(t)
p := NewPromise()
err := p.Complete(1, errors.New("error"))
assert.NoError(err)
err = p.Complete(2, nil)
assert.Equal(ErrAlreadyDone, err)
f := p.Future()
v, err := f.Wait(context.Background())
assert.Equal(1, v)
assert.Equal(errors.New("error"), err)
})
}
func TestGo(t *testing.T) {
type Input struct {
F func() (interface{}, error)
}
type Expect struct {
V interface{}
Err error
}
type Test struct {
Input Input
Expect Expect
}
tests := map[string]Test{
"success": {
Input: Input{func() (interface{}, error) {
return 1, nil
}},
Expect: Expect{
1,
nil,
},
},
"failure": {
Input: Input{func() (interface{}, error) {
return nil, errors.New("error")
}},
Expect: Expect{
nil,
errors.New("error"),
},
},
"both": {
Input: Input{func() (interface{}, error) {
return 1, errors.New("error")
}},
Expect: Expect{
1,
errors.New("error"),
},
},
}
for title, test := range tests {
t.Run(title, func(t *testing.T) {
assert := assert.New(t)
f := Go(test.Input.F)
v, err := f.Wait(context.Background())
assert.Equal(test.Expect.V, v)
assert.Equal(test.Expect.Err, err)
})
}
}
func TestAny(t *testing.T) {
type Input struct {
Futures []Future
}
type Expect struct {
V interface{}
Err error
}
type Test struct {
Input Input
Expect Expect
}
err := errors.New("error")
tests := map[string]Test{
"success": {
Input: Input{[]Future{
Failure(err),
Failure(err),
Success(1),
Failure(err),
}},
Expect: Expect{
1,
nil,
},
},
"failure": {
Input: Input{[]Future{
Failure(err),
Failure(err),
Failure(err),
Failure(err),
}},
Expect: Expect{
nil,
err,
},
},
"select no error": {
Input: Input{[]Future{
Go(func() (interface{}, error) {
return 1, err
}),
Failure(err),
Go(func() (interface{}, error) {
return 2, nil
}),
Failure(err),
}},
Expect: Expect{
2,
nil,
},
},
"fastest one": {
Input: Input{[]Future{
Go(func() (interface{}, error) {
time.Sleep(3 * time.Millisecond)
return 3, nil
}),
Failure(err),
Go(func() (interface{}, error) {
time.Sleep(1 * time.Millisecond)
return 1, nil
}),
Go(func() (interface{}, error) {
time.Sleep(2 * time.Millisecond)
return 2, nil
}),
}},
Expect: Expect{
1,
nil,
},
},
}
for title, test := range tests {
t.Run(title, func(t *testing.T) {
assert := assert.New(t)
f := Any(test.Input.Futures...)
v, err := f.Wait(context.Background())
assert.Equal(test.Expect.V, v)
assert.Equal(test.Expect.Err, err)
})
}
}
|
<gh_stars>1-10
var ctx = new AudioContext();
// サウンドファイルの取得
fetch('drum.mp3').then(function(res) {
return res.arrayBuffer();
}).then(function(arr) {
ctx.decodeAudioData(arr, function(buf) {
ready(buf);
});
});
// サウンドファイルの再生
function ready(audiobuf) {
var osc = ctx.createBufferSource();
osc.buffer = audiobuf;
osc.loop = true;
osc.connect(sensor);
osc.connect(ctx.destination);
osc.start(0);
}
// 音量に応じて反応する処理
var sensor = ctx.createScriptProcessor(1024, 1, 1);
sensor.onaudioprocess = function(event) {
var sin = event.inputBuffer.getChannelData(0);
var level = 0;
for (var i = 0; i < sin.length; i++) {
// 1024サンプルごとに波形の絶対値を合計
level += Math.abs(sin[i]);
}
showChar(level);
};
sensor.connect(ctx.destination);
// 文字列のエフェクト
var left = true;
$('#left' ).jrumble({x:10,y:10,rotation:4});
$('#right').jrumble({x:10,y:10,rotation:4});
function showChar(level) {
if (level > 100) {
var elem;
if (left) {
elem = $('#left');
} else {
elem = $('#right');
}
elem.trigger('startRumble');
setTimeout(function() {
elem.trigger('stopRumble');
left = !left;
}, 300);
}
}
|
package com.java.study.answer.zuo.emiddle.class06;
public class Code03_BestTimetoBuyandSellStockFollow {
public static int maxProfit(int K, int[] prices) {
if (prices == null || prices.length == 0) {
return 0;
}
int N = prices.length;
if (K >= N / 2) {
return allTrans(prices);
}
int[] dp = new int[N];
int ans = 0;
for (int tran = 1; tran <= K; tran++) {
int pre = dp[0];
int best = pre - prices[0];
for (int index = 1; index < N; index++) {
pre = dp[index];
dp[index] = Math.max(dp[index - 1], prices[index] + best);
best = Math.max(best, pre - prices[index]);
ans = Math.max(dp[index], ans);
}
}
return ans;
}
public static int allTrans(int[] prices) {
if (prices == null || prices.length == 0) {
return 0;
}
int ans = 0;
int min = prices[0];
int max = prices[0];
for (int i = 1; i < prices.length; i++) {
if (prices[i] >= prices[i - 1]) {
max = prices[i];
} else {
ans += max - min;
min = prices[i];
max = prices[i];
}
}
return ans + max - min;
}
public static int allTrans(int[] prices, int N) {
int ans = 0;
for (int i = 1; i < N; i++) {
if (prices[i] > prices[i - 1]) {
ans += prices[i] - prices[i - 1];
}
}
return ans;
}
}
|
import './bindHNError.less'
import './bindHNError.html'
|
<filename>src/components/Card/ProjectCard.js
import PropTypes from 'prop-types';
import React, { useState } from 'react';
import { useTheme } from '../../providers/ThemeProvider';
import LinksContainer from '../LinksContainer';
import SVGIcon, { SVGIconsContainer } from '../SVGIcon';
import PageText from '../Text';
import { ProjectCardContainer, ProjectCardHovered } from './styled';
function ProjectCard({ options, ...styles }) {
const [hovered, setHovered] = useState(false);
const [animating, setAnimating] = useState(false);
const { themeState } = useTheme();
const handleAnimations = (type) => {
if (type === 'mouseout') {
setAnimating(false);
setTimeout(() => {
setHovered(false);
}, 800);
} else {
setAnimating(true);
setHovered(true);
}
};
return (
<ProjectCardContainer
themePreference={themeState.themePreference}
toggled={themeState.toggled}
onMouseEnter={() => handleAnimations()}
onMouseLeave={() => handleAnimations('mouseout')}
{...styles}
>
{!hovered ? (
<img
style={{
height: 'inherit',
width: '100%',
padding: '0',
backgroundColor: 'black',
borderRadius: '0.5rem',
objectPosition: 'center center',
objectFit: 'cover'
}}
alt={options.project.imageAltText}
src={options.project.image.childImageSharp.fluid.src}
/>
) : (
<ProjectCardHovered
animating={animating}
themePreference={themeState.themePreference}
toggled={themeState.toggled}
style={{
color: themeState.toggled
? `var(--color-secondary-${themeState.themePreference})`
: 'var(--initial-color-secondary)'
}}
>
<PageText
fontSize="1.2rem"
fontWeight="bolder"
marginTop="0.8rem"
marginLeft="0.2rem"
>
{options.project.name}
</PageText>
<PageText
height="9rem"
fontSize="0.8rem"
marginTop="0.5rem"
marginLeft="0.2rem"
>
{options.project.description}
</PageText>
<SVGIconsContainer>
{options.project.technologies.map((tech, index) => {
const key = index;
return (
<div key={key} style={{ marginRight: '0.8rem' }}>
<SVGIcon name={tech} />
</div>
);
})}
</SVGIconsContainer>
<hr
style={{
color: '#e5ecf4',
backgroundColor: '#e5ecf4',
height: '0.1rem',
border: 'none',
margin: '0.5rem 0 0 0',
width: '100%'
}}
/>
<LinksContainer links={options.project.links} />
</ProjectCardHovered>
)}
</ProjectCardContainer>
);
}
ProjectCard.propTypes = {
options: PropTypes.objectOf(PropTypes.any)
};
ProjectCard.defaultProps = {
options: {}
};
export default ProjectCard;
|
#!/usr/bin/env bash
clean=$(git status -s -uno | wc -l) #Short ignore untracked
if [ $clean -ne 0 ]; then
echo "Current working tree was not clean! This tool only works on clean checkouts"
exit 2
else
echo "Code Formatting Check"
echo "====================="
make format-py > /dev/null 2>&1
valid_format=$(git diff | wc -l)
if [ $valid_format -ne 0 ]; then
echo "FAILED"
echo ""
echo "You *must* make the following changes to match the formatting style"
echo "-------------------------------------------------------------------"
echo ""
git diff
echo ""
echo "Run 'make format-py' to apply these changes"
git reset --hard > /dev/null
exit 1
else
echo "OK"
fi
fi
exit 0
|
<reponame>fleupold/solidity-create2-example
const Factory = artifacts.require('Factory')
// TODO: proper tests
contract('Factory', (accounts) => {
let instance
beforeEach('setup', async () => {
instance = await Factory.new()
})
describe('test deploy', () => {
it('should deploy', async () => {
// TODO
assert.ok(true)
})
})
})
|
#!/bin/bash
# Copyright 2017 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Created on Feb 16, 2013
#
# @author: alfoa
#
# DESCRIPTION:
# This script is in charge for instanciating ray servers in remote nodes
# USES:
# --remote-node-address - Remote node address (ssh into)
# --address - Head node address
# --redis-password - Specify the password for redis (head node password)
# --num-cpus - Number of cpus available/to use in this node
# --num-gpus - Number of gpus available/to use in this node
# --remote-bash-profile - The bash profile to source before executing the tunneling commands
# --python-path - The PYTHONPATH enviroment variable
# --working-dir - The workind directory
# --help - Displays the info above and exits
ECE_SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
function display_usage()
{
echo ''
echo ' ------------------------------------------'
echo ' Default usage:'
echo ' start_remote_servers.sh'
echo ''
echo ' Description:'
echo ' This script is in charge for instanciating ray servers in remote nodes'
echo ' ------------------------------------------'
echo ''
echo ' Options:'
echo ' --help'
echo ' Displays this text and exits'
echo ''
echo ' --remote-node-address'
echo ' Remote node address (ssh into)'
echo ''
echo ' --address'
echo ' Head node address'
echo ''
echo ' --redis-password'
echo ' Specify the password for redis (head node password)'
echo ''
echo ' --num-cpus'
echo ' Number of cpus available/to use in this node'
echo ''
echo ' --num-gpus'
echo ' Number of gpus available/to use in this node'
echo ''
echo ' --remote-bash-profile'
echo ' The bash profile to source before executing the tunneling commands'
echo ''
echo ' --python-path'
echo ' The PYTHONPATH enviroment variable'
echo ''
echo ' --working-dir'
echo ' The workind directory'
echo ''
}
# main
# set control variable
REMOTE_ADDRESS=""
HEAD_ADDRESS=""
REDIS_PASS=""
PYTHONPATH=""
WORKINGDIR=""
# set default
NUM_CPUS=1
NUM_GPUS=-1
REMOTE_BASH=""
# parse command-line arguments
while test $# -gt 0
do
case "$1" in
--help)
display_usage
return
;;
--remote-node-address)
shift
REMOTE_ADDRESS=$1
;;
--address)
shift
HEAD_ADDRESS=$1
;;
--redis-password)
shift
REDIS_PASS=$1
;;
--num-cpus)
shift
NUM_CPUS=$1
;;
--num-gpus)
shift
NUM_GPUS=$1
;;
--remote-bash-profile)
shift
REMOTE_BASH=$1
;;
--python-path)
shift
PYTHONPATH=$1
;;
--working-dir)
shift
WORKINGDIR=$1
;;
esac
shift
done
echo $REMOTE_ADDRESS
if [[ "$REMOTE_ADDRESS" == "" ]];
then
echo ... ERROR: --remote-node-address argument must be inputted !
exit
fi
if [[ "$HEAD_ADDRESS" == "" ]];
then
echo ... ERROR: --address argument must be inputted !
exit
fi
if [[ "$REDIS_PASS" == "" ]];
then
echo ... ERROR: --redis-password argument must be inputted !
exit
fi
if [[ "$PYTHONPATH" == "" ]];
then
echo ... ERROR: --python-path argument must be inputted !
exit
fi
if [[ "$WORKINGDIR" == "" ]];
then
echo ... ERROR: --working-dir argument must be inputted !
exit
fi
# start the script
# ssh in the remote node and run the ray servers
CWD=`pwd`
OUTPUT=$CWD/server_debug_$REMOTE_ADDRESS
if [[ "$REMOTE_BASH" == "" ]];
then
ssh $REMOTE_ADDRESS $ECE_SCRIPT_DIR/server_start.py ${WORKINGDIR} ${OUTPUT} ${PYTHONPATH} "${ECE_SCRIPT_DIR}/start_ray.sh $OUTPUT $HEAD_ADDRESS $REDIS_PASS $NUM_CPUS"
else
ssh $REMOTE_ADDRESS $ECE_SCRIPT_DIR/server_start.py ${WORKINGDIR} ${OUTPUT} ${PYTHONPATH} "${ECE_SCRIPT_DIR}/start_ray.sh $OUTPUT $HEAD_ADDRESS $REDIS_PASS $NUM_CPUS $REMOTE_BASH"
fi
|
export const RABBITMQ_SERVICE = 'rabbit_mq_service';
|
#!/bin/bash
script_directory=`dirname "$0"`; script_name=`basename "$0"`
project_directory_data="$($script_directory/../get_directory.sh data)"; if [ ! $? -eq 0 ]; then echo $project_directory_data; exit 1; fi
data_directory="$project_directory_data/generate_fasta"
prefix=$1
gene_type=$2
if [[ $# -lt 2 ]]; then
echo 'not enough arguments'
echo 'call: ./${script_name} \$prefix \$gene_type'
echo 'example: ./${script_name} default cdna'
echo 'example: ./${script_name} default gene_exon'
echo 'gene_type: 5utr 3utr gene_exon cdna coding'
echo 'for transcript use option: -t, --transcripts'
exit 1
fi
transcript="false"
new_prefix="$prefix"
while [[ $# -gt 0 ]]; do key="$1"; value="$2"; case ${key} in
-r|--rename) new_prefix="$2"; shift; shift; ;;
-t|--transcript|--transcripts) transcript="true"; shift; ;;
-f|--force) echo "not implemented yet!"; shift; ;;
*) shift; ;;
esac; done
trap 'echo oh, I am slain; exit' INT
out_dir=$data_directory/fasta/$new_prefix
mkdir -p $out_dir
# filter by one of these: ensembl_gene_id, ensembl_gene_id_version, ensembl_transcript_id, ensembl_transcript_id_version
if [ "$transcript" = "true" ]; then
FILTER_NAME="ensembl_transcript_id"
regions_dir="$data_directory/counted_transcripts/$prefix"
else
FILTER_NAME="ensembl_gene_id"
regions_dir="$data_directory/filtered_genes/$prefix"
fi
if [ ! -d $regions_dir ]; then echo "no genes data: $regions_dir"; exit 1; fi
echo "#!/bin/bash" > ${out_dir}/_call_biomart.sh
for run in $regions_dir/*.txt
do
# check gene region name
if [[ ${run##*/} != *"_"*"_"* ]]; then continue; fi
run_filename=$(basename $run)
echo "fetch $run_filename"
FILTER_VALUE="$(cat $run | paste -s -d "," -)"
GENE_TYPE=$gene_type
outfile_fasta=$(echo "${run_filename/.txt/.fasta}" | sed -e "s/regions_//")
outfile_query=${outfile_fasta/.fasta/.query.xml}
cat $script_directory/query.xml | sed -e "s/\${FILTER_NAME}/$FILTER_NAME/" | sed -e "s/\${FILTER_VALUE}/$FILTER_VALUE/" | sed -e "s/\${GENE_TYPE}/$GENE_TYPE/" > ${out_dir}/${outfile_query}
if [ -f $out_dir/$outfile_fasta ];
then
echo "file for $run_filename already exists (skipping)"
continue
fi
echo "echo \"download ${outfile_query}\"" >> ${out_dir}/_call_biomart.sh
echo "perl /biomart-perl/scripts/webExample.pl /output/${outfile_query} > /output/${outfile_fasta}.tmp" >> ${out_dir}/_call_biomart.sh
echo "if [ $? -eq 0 ]; then mv /output/${outfile_fasta}.tmp /output/${outfile_fasta}; else echo \"filter short error\"; exit 1; fi" >> ${out_dir}/_call_biomart.sh
echo "[ -s /output/$outfile_fasta ] || echo \"error: file is empty (removing)\" || rm /output/$outfile_fasta" >> ${out_dir}/_call_biomart.sh
done
docker_switches="--rm --init --entrypoint /bin/bash"
docker_volumes="-v $(realpath $out_dir):/output"
chmod +x ${out_dir}/_call_biomart.sh
$script_directory/../run_daemon.sh "$docker_switches" "" "$docker_volumes" "biomart" "/output/_call_biomart.sh" "/dev/null"
echo "#!/bin/bash" > ${out_dir}/_call_fix.sh
for run in $out_dir/*.fasta
do
# check fasta filename (have to contain two underscores)
if [[ ${run##*/} != *"_"*"_"* ]]; then continue; fi
status_file=${run%.*}.info
echo "fix file: $run"
echo "${script_name} $new_prefix $gene_type" >> $status_file
if ! grep -q "empty_sequence_removed" "$status_file"; then
# sometime sequence missing in biomart
$script_directory/fix_no_data_error.sh $run
echo "empty_sequence_removed" >> $status_file
else
echo "empty_sequence_removed already done"
fi
if ! grep -q "duplicate_header_fixed" "$status_file"; then
# remove duplicate headers
cat $run | $script_directory/fix_duplicate_header.awk > $run.temp
cat $run.temp | tac | tail -n +2 | tac > $run.temp2
mv $run.temp2 $run
echo "duplicate_header_fixed" >> $status_file
else
echo "duplicate_header_fixed already done"
fi
if ! grep -q "short_sequence_removed" "$status_file"; then
file=$(basename $run)
echo "cat /output/${file} | seqtk seq -L 8 - > /output/${file}.temp" >> ${out_dir}/_call_fix.sh
echo "if [ \$? -eq 0 ]; then mv /output/${file}.temp /output/${file}; else echo \"filter short error\"; exit 1; fi" >> ${out_dir}/_call_fix.sh
echo "echo \"short_sequence_removed\" >> /output/$(basename ${status_file})" >> ${out_dir}/_call_fix.sh
else
echo "short_sequence_removed already done"
fi
done
docker_switches="--rm --init"
docker_volumes="-v $(realpath $out_dir):/output"
chmod +x ${out_dir}/_call_fix.sh
$script_directory/../run_daemon.sh "$docker_switches" "" "$docker_volumes" "seqtk" "/output/_call_fix.sh" "/dev/null"
# TODO create fasta file list
# query files are great for debugging but thez can be removed in production
echo "done!"
|
<reponame>ch1huizong/learning
if __name__ == '__main__':
class SingleSpam(Singleton):
def __init__(self, s): self.s = s
def __str__(self): return self.s
s1 = SingleSpam('spam')
print id(s1), s1.spam()
s2 = SingleSpam('eggs')
print id(s2), s2.spam()
|
#!/bin/sh
#
# Vivado(TM)
# runme.sh: a Vivado-generated Runs Script for UNIX
# Copyright 1986-2019 Xilinx, Inc. All Rights Reserved.
#
echo "This script was generated under a different operating system."
echo "Please update the PATH and LD_LIBRARY_PATH variables below, before executing this script"
exit
if [ -z "$PATH" ]; then
PATH=C:/Xilinx/SDK/2019.1/bin;C:/Xilinx/Vivado/2019.1/ids_lite/ISE/bin/nt64;C:/Xilinx/Vivado/2019.1/ids_lite/ISE/lib/nt64:C:/Xilinx/Vivado/2019.1/bin
else
PATH=C:/Xilinx/SDK/2019.1/bin;C:/Xilinx/Vivado/2019.1/ids_lite/ISE/bin/nt64;C:/Xilinx/Vivado/2019.1/ids_lite/ISE/lib/nt64:C:/Xilinx/Vivado/2019.1/bin:$PATH
fi
export PATH
if [ -z "$LD_LIBRARY_PATH" ]; then
LD_LIBRARY_PATH=
else
LD_LIBRARY_PATH=:$LD_LIBRARY_PATH
fi
export LD_LIBRARY_PATH
HD_PWD='H:/VivadoProj/COUNTER_IP/COUNTER_IP.runs/impl_1'
cd "$HD_PWD"
HD_LOG=runme.log
/bin/touch $HD_LOG
ISEStep="./ISEWrap.sh"
EAStep()
{
$ISEStep $HD_LOG "$@" >> $HD_LOG 2>&1
if [ $? -ne 0 ]
then
exit
fi
}
# pre-commands:
/bin/touch .init_design.begin.rst
EAStep vivado -log COUNTER_wrapper.vdi -applog -m64 -product Vivado -messageDb vivado.pb -mode batch -source COUNTER_wrapper.tcl -notrace
|
import UIKit
class CustomImageView: UIView {
public var offsetY: CGFloat = 0 // Offset from Top Y
public var offsetBottom: CGFloat = 0 // Offset from Bottom, like margin
public var image: UIImage?
public var selectedImage: UIImage?
public var viewController: UIViewController?
public var isButton = false // If it is a button, set to true
override func draw(_ rect: CGRect) {
super.draw(rect)
// Draw the image or selected image based on the state and handle user interactions if it's a button
if let image = isButton ? selectedImage : self.image {
image.draw(in: rect.insetBy(dx: 0, dy: offsetY))
}
}
override func touchesEnded(_ touches: Set<UITouch>, with event: UIEvent?) {
super.touchesEnded(touches, with: event)
if isButton, let viewController = viewController {
// Handle button tap action, for example, navigate to a new view controller
let newViewController = UIViewController() // Replace with the desired view controller
viewController.navigationController?.pushViewController(newViewController, animated: true)
}
}
} |
#include <vector>
int sum_of_numbers(std::vector<int> &values) {
int sum = 0;
for (int value : values)
sum += value;
return sum;
}
int main(){
std::vector<int> values = {2, 6, 4, -5, 8};
int sum = sum_of_numbers(values);
std::cout << sum << std::endl;
return 0;
} |
import * as AssertionError from "assertion-error";
import * as ts from "ts-morph";
export class ParameterDeclaration {
constructor(private _node: ts.ParameterDeclaration) {}
/** Assert the parameter is an object type. If true, return the parameter. */
isObject(msg = "Expected parameter accept an object type."): this {
if (!this._node.getType().isObject()) {
throw new AssertionError(
msg,
{
actual: this._node.getType().getText(this._node),
expected: "object",
showDiff: false,
},
this.isObject
);
}
return this;
}
/** Assert the parameter is optional. If true, return the parameter. */
isOptional(msg = "Expected parameter to be optional."): this {
if (!this._node.isOptional()) {
throw new AssertionError(
msg,
{
actual: false,
expected: true,
showDiff: false,
},
this.isOptional
);
}
return this;
}
/** Assert the parameter is not optional. If true, return the parameter. */
isNotOptional(msg = "Expected parameter to not be optional."): this {
if (this._node.isOptional()) {
throw new AssertionError(
msg,
{
actual: true,
expected: false,
showDiff: false,
},
this.isNotOptional
);
}
return this;
}
}
|
const { Command } = require('discord-akairo')
class muteCommand extends Command {
constructor() {
super('mute', {
aliases: ['mute'],
args: [{
id: 'user',
type: 'member',
default: null,
prompt: {
start: "Merci, d'indiqué la personne à mute",
retry: "Merci, d'indiqué la personne à mute"
}
}],
description: {
usage: 'mute [user] ',
examples: ['mute', 'mute scricker'],
description: 'rend muet une personne'
},
cooldown: 4000,
ratelimit: 3,
userPermissions: ['MANAGE_ROLES'],
clientPermissions: ['MANAGE_CHANNELS', 'MANAGE_ROLES']
})
}
async exec(message, { user }) {
if (user.roles.cache.find(x => x.name === "Muet")) return message.util.send(`${user.user.username} est deja muet`)
let role = message.guild.roles.cache.find(x => x.name === "Muet")
if (!role) {
role = await message.guild.roles.create({ data: { name: "Muet", color: require("../../Util/Colors").defaultColor } })
await message.guild.channels.cache.map(x => x.createOverwrite(role.id, { SEND_MESSAGES: false }))
}
user.roles.add(role.id)
message.util.send(`${user.user.username} n'a plus le droit à la parole`)
}
}
module.exports = muteCommand |
<filename>input_reader/tests/test_keywords.py
from __future__ import unicode_literals
from input_reader import InputReader, ReaderError, SUPPRESS
from pytest import raises, fixture
from re import search
from textwrap import dedent
@fixture
def setup():
r = InputReader()
s1 = dedent("""\
blue
red
""").split('\n')
s2 = dedent("""\
blue
red
blue
""").split('\n')
return r, s1, s2
def test_suppress_at_class_level():
ir = InputReader(default=SUPPRESS)
assert ir._default is SUPPRESS
b = ir.add_boolean_key('RED')
assert b._default is SUPPRESS
def test_string_default_at_class_level():
ir = InputReader(default='roses')
assert ir._default == 'roses'
b = ir.add_boolean_key('RED')
assert b._default == 'roses'
def test_case_sensitivity_at_class_level():
ir = InputReader(case=True)
assert ir._case
l = ir.add_line_key('RED')
assert l._case
with raises(ValueError):
ir2 = InputReader(case='True')
def test_use_default_values(setup):
r, s1, s2 = setup
b = r.add_boolean_key('red')
assert not b._required
assert b._default is None
assert b._dest is None
assert b._depends is None
assert not b._repeat
def test_dont_use_default_values(setup):
r, s1, s2 = setup
b = r.add_boolean_key('red', True,
required=False,
default=None,
dest=None,
depends=None,
repeat=False)
assert not b._required
assert b._default is None
assert b._dest is None
assert b._depends is None
assert not b._repeat
def test_custom_values(setup):
r, s1, s2 = setup
b = r.add_boolean_key('red', True,
required=True,
default='BANANA',
dest='fruit',
depends='something',
repeat=True)
assert b._required
assert b._default == 'BANANA'
assert b._dest == 'fruit'
assert b._depends == 'something'
assert b._repeat
def test_custom_values_str(setup):
r, s1, s2 = setup
b = r.add_boolean_key('red', True,
required=True,
default=str('BANANA'),
dest=str('fruit'),
depends=str('something'),
repeat=True)
assert b._required
assert b._default == 'BANANA'
assert b._dest == 'fruit'
assert b._depends == 'something'
assert b._repeat
def test_incorrect_options(setup):
# The keyword 'wrong' doesn't exist
r, s1, s2 = setup
with raises(TypeError):
r.add_boolean_key('RED', wrong=True)
# Dest requires a string
with raises(ValueError):
r.add_boolean_key('RED', dest=14)
# Required requires a boolean
with raises(ValueError):
r.add_boolean_key('RED', required=None)
# Repeat requires a boolean
with raises(ValueError):
r.add_boolean_key('RED', repeat=None)
def test_read_custom_default_values(setup):
# Test when individual keys define their own default
r, s1, s2 = setup
r.add_boolean_key('blue')
r.add_boolean_key('red')
r.add_boolean_key('green')
r.add_boolean_key('yellow', default=False)
r.add_boolean_key('white', default=SUPPRESS)
inp = r.read_input(s1)
assert inp.blue
assert inp.red
assert inp.green is None
assert not inp.yellow
assert 'white' not in inp
def test_read_required_works_correctly(setup):
# A keyword that is not required does not appear... OK
r, s1, s2 = setup
r.add_boolean_key('blue', required=False)
r.add_boolean_key('red', required=True)
r.add_boolean_key('green', required=False)
inp = r.read_input(s1)
assert inp.blue
assert inp.red
assert inp.green is None
def test_read_required_fails_when_incorrect(setup):
# A keyword that is required does not appear... NOT OK
r, s1, s2 = setup
r.add_boolean_key('blue', required=True)
r.add_boolean_key('red', required=False)
r.add_boolean_key('green', required=True)
with raises(ReaderError):
inp = r.read_input(s1)
def test_read_keywords_cannot_repeat(setup):
# A keyword appears twice that shouldn't repeat... NOT OK
r, s1, s2 = setup
r.add_boolean_key('blue', repeat=False)
r.add_boolean_key('red', repeat=False)
with raises(ReaderError) as e:
inp = r.read_input(s2)
assert 'appears twice' in str(e.value)
def test_read_keywords_can_repeat(setup):
# A keyword appears twice that can repeat... OK
r, s1, s2 = setup
r.add_boolean_key('blue', repeat=True)
r.add_boolean_key('red', repeat=True)
inp = r.read_input(s2)
assert inp.red == (True,)
assert inp.blue == (True, True)
def test_read_destination_different_from_given_name(setup):
# Keys are sent into an alternate destination
r, s1, s2 = setup
r.add_boolean_key('blue', dest='berries')
r.add_boolean_key('red', dest='apples')
inp = r.read_input(s1)
assert inp.berries
assert inp.apples
def test_read_destination_is_same_and_done_incorrectly(setup):
# Keys are sent into the same alternate destination
r, s1, s2 = setup
r.add_boolean_key('blue', action='blue', dest='colors')
r.add_boolean_key('red', action='red', dest='colors')
# They are both sent to other dest, repeat is needed
with raises(ReaderError) as e:
inp = r.read_input(s1)
assert 'appears twice' in str(e.value)
def test_read_destination_is_same_and_done_correctly(setup):
# Try the above with repeat=True
r, s1, s2 = setup
r.add_boolean_key('blue', action='blue', dest='colors',
repeat=True)
r.add_boolean_key('red', action='red', dest='colors',
repeat=True)
inp = r.read_input(s1)
# Set is used below so that order doesn't matter
assert set(inp.colors) == set(('blue', 'red'))
def test_read_destination_required(setup):
r, s1, s2 = setup
r.add_boolean_key('blue', action='blue', dest='rcolor', required=True)
r.add_boolean_key('red', action='red', dest='bcolor', required=True)
r.add_boolean_key('green', action='green', dest='gcolor', required=True)
with raises(ReaderError) as e:
inp = r.read_input(s1)
assert search('The key "\w+" is required but not found', str(e.value))
def test_read_depends_dependee_present(setup):
# Red depends on blue
r, s1, s2 = setup
r.add_boolean_key('blue')
r.add_boolean_key('red', depends='blue')
inp = r.read_input(s1)
assert inp.blue
assert inp.red
def test_read_depends_dependee_missing(setup):
# Red depends on green, but green isn't present
r, s1, s2 = setup
r.add_boolean_key('blue')
r.add_boolean_key('red', depends='green')
with raises(ReaderError) as e:
inp = r.read_input(s1)
regex = r'The key "\w+" requires that "\w+" is also present'
assert search(regex, str(e.value))
|
/* companion class */
class Demo {
private val hidden = 10
}
/* companion object */
object Demo {
def getHidden(self: Demo) = self.hidden
}
/* driver sngleton: object Main */
object Main {
def main(args: Array[String]): Unit = {
val obj = new Demo
println(Demo.getHidden(obj)) // 10
}
} |
<filename>PythonApplication2/PythonApplication2.py<gh_stars>0
import requests
import json
import pyodbc
url = "https://www.meistertask.com/api/projects"
headers = {
'Accept': "*/*",
'Authorization': "Bearer 8f9e4e59272232742edb40c5416b4012e4730cd00964d4fa6d896e010d32b4f4",
'accept-encoding': "gzip, deflate"
}
response = requests.request("GET", url, headers=headers)
input = response.text
print(type(input))
y = json.loads(input)
#print('parameter count:', len(y))
#tp = y[0]
for tp in y:
for x, y in tp.items():
print(x, y)
def create(conn):
print("Create")
cursor = conn.cursor()
cursor.execute('insert into dummy(a,b) values(?,?);',
('cat',3232)
)
conn.commit()
conn = pyodbc.connect(
"Driver={SQL Server Native Client 11.0};"
"Server=BB\SQLEXPRESS;"
"Database=PyTest;"
"Trusted_Connection=yes;"
)
create(conn)
conn.close()
|
<reponame>zaidmukaddam/linkto<gh_stars>10-100
export function ctl(template: any) {
const trimmed = template.replace(/\s+/gm, " ");
const filtered = trimmed
.split(" ")
.filter((c: any) => c !== "false" && c !== "true" && c !== "undefined")
.join(" ")
.trim();
return filtered;
}
|
#ifndef __PROBE
#define __PROBE
void on_write(long long address, int value);
void write(long long address, int value);
#endif
|
OUTPUT_DIR=${OUTPUT_DIR:-"_output"}
./${OUTPUT_DIR}/conntracker \
--v=3 \
--master="http://172.17.0.1:8080" \
--enable-flow-collector="false" \
--conntrack-port="2223" > "/tmp/kube-conntracker.log" 2>&1 &
|
def generate_download_link(os_type: str, architecture: str) -> str:
download_links = [
('MacOS', 'https://download.sublimetext.com/Sublime%20Text%20Build%203126.dmg', 'sublime/sublime.dmg'),
('Windows (32-bit)', 'https://download.sublimetext.com/Sublime%20Text%20Build%203126%20Setup.exe', 'sublime/sublime-x86.exe'),
('Windows (64-bit)', 'https://download.sublimetext.com/Sublime%20Text%20Build%203126%20x64%20Setup.exe', 'sublime/sublime-amd64.exe'),
('Ubuntu (32-bit)', 'https://download.sublimetext.com/sublime-text_build-3126_i386.deb', 'sublime/sublime-x86.deb'),
('Ubuntu (64-bit)', 'https://download.sublimetext.com/sublime-text_build-3126_amd64.deb', 'sublime/sublime-amd64.deb'),
]
for os, link, _ in download_links:
if os == os_type and os_type in link and architecture in link:
return link
return "Invalid OS type or architecture" |
class Node:
def __init__(self, val):
self.value = val
self.leftChild = None
self.rightChild = None
def binary_tree(arr):
if not arr:
return None
root_index = len(arr) // 2
root_val = arr[root_index]
root_node = Node(root_val)
root_node.leftChild = binary_tree(arr[ : root_index])
root_node.rightChild = binary_tree(arr[root_index + 1 : ])
return root_node |
import java.util.Arrays;
public class ParseString {
public static void main(String[] args) {
String sentence = "This is a sample string";
String[] result = sentence.split("\\s+");
System.out.println(Arrays.toString(result));
}
} |
<filename>catboost/cuda/cuda_lib/cuda_events_provider.h<gh_stars>0
#pragma once
#include "cuda_base.h"
#include <util/system/spinlock.h>
#include <util/generic/vector.h>
namespace NCudaLib {
class TCudaEventsProvider {
private:
TVector<cudaEvent_t> FreeHandles;
TVector<cudaEvent_t> FreeHandlesWithoutTiming;
static const ui64 RequestHandlesBatchSize = 16;
TSpinLock Lock;
void RequestHandle(bool disableTimming = true);
public:
class TCudaEvent: private TNonCopyable {
private:
mutable cudaEvent_t Event;
bool IsWithoutTiming;
TCudaEventsProvider* Owner;
public:
TCudaEvent(cudaEvent_t event,
bool isWithoutTiming,
TCudaEventsProvider* owner)
: Event(event)
, IsWithoutTiming(isWithoutTiming)
, Owner(owner)
{
}
~TCudaEvent() {
TGuard<TSpinLock> lock(Owner->Lock);
if (IsWithoutTiming) {
Owner->FreeHandlesWithoutTiming.push_back(Event);
} else {
Owner->FreeHandles.push_back(Event);
}
}
void Record(const TCudaStream& stream) const {
CUDA_SAFE_CALL(cudaEventRecord(Event, stream.GetStream()));
}
void StreamWait(const TCudaStream& stream) const {
CUDA_SAFE_CALL(cudaStreamWaitEvent(stream.GetStream(), Event, 0));
}
void WaitComplete() const {
CUDA_SAFE_CALL(cudaEventSynchronize(Event));
}
bool IsComplete() const {
cudaError_t errorCode = cudaEventQuery(Event);
if (errorCode == cudaSuccess) {
return true;
}
if (errorCode != cudaErrorNotReady) {
ythrow TCatboostException() << "CUDA error: " << cudaGetErrorString(errorCode) << " " << (int)errorCode;
}
return false;
}
};
public:
using TCudaEventPtr = THolder<TCudaEvent>;
~TCudaEventsProvider() throw (TCatboostException) {
for (auto event : FreeHandles) {
CUDA_SAFE_CALL(cudaEventDestroy(event));
}
for (auto event : FreeHandlesWithoutTiming) {
CUDA_SAFE_CALL(cudaEventDestroy(event));
}
}
TCudaEventPtr Create(bool disableTimming = true);
};
using TCudaEvent = TCudaEventsProvider::TCudaEvent;
using TCudaEventPtr = TCudaEventsProvider::TCudaEventPtr;
inline static TCudaEventsProvider& CudaEventProvider() {
return *FastTlsSingleton<TCudaEventsProvider>();
}
inline TCudaEventPtr CreateCudaEvent(bool disableTimming = true) {
return CudaEventProvider().Create(disableTimming);
}
}
|
<filename>lib/update/update.js
const updateNotifier = require('update-notifier')
const chalk = require('chalk')
const pkg = require('../../package.json')
const notifier = updateNotifier({
pkg,
updateCheckInterval: 1000,
})
function updateVer() {
if (notifier.update) {
console.log(`New version ${chalk.green(notifier.update.latest)} found, please update`)
notifier.notify()
} else {
console.log('There is no updatable version')
}
}
module.exports = updateVer |
#!/bin/bash
cd /Users/sam/Public/eos/contracts/fifa
eosiocpp -g fifa.abi fifa.cpp
cd /Users/sam/Public/eos
#./eosio_build.sh
cd /Users/sam/Public/Eoasis
./run.sh
cd /Users/sam/Public/eos/build/contracts
cleos --wallet-url "http://127.0.0.1:8888" set contract eosio ./fifa -p eosio@active
cd /Users/sam/Public/Eoasis
cleos --wallet-url "http://127.0.0.1:8888" wallet import 5KRwwqFRdZ1v5UNcXPk72Mq3t4ucs7kMmqKx9HLpUnnk74iKWen
cleos --wallet-url "http://127.0.0.1:8888" create account eosio user1 EOS7sBeJzasK9vDCybdg2jmqJeTf42iohxideWTeWtWzNYaaRKMgA EOS7sBeJzasK9vDCybdg2jmqJeTf42iohxideWTeWtWzNYaaRKMgA
cleos --wallet-url "http://127.0.0.1:8888" create account eosio user2 EOS7sBeJzasK9vDCybdg2jmqJeTf42iohxideWTeWtWzNYaaRKMgA EOS7sBeJzasK9vDCybdg2jmqJeTf42iohxideWTeWtWzNYaaRKMgA
sleep 1
cleos --wallet-url "http://127.0.0.1:8888" create account eosio user3 EOS7sBeJzasK9vDCybdg2jmqJeTf42iohxideWTeWtWzNYaaRKMgA EOS7sBeJzasK9vDCybdg2jmqJeTf42iohxideWTeWtWzNYaaRKMgA
sleep 1
cleos --wallet-url "http://127.0.0.1:8888" create account eosio user4 EOS7sBeJzasK9vDCybdg2jmqJeTf42iohxideWTeWtWzNYaaRKMgA EOS7sBeJzasK9vDCybdg2jmqJeTf42iohxideWTeWtWzNYaaRKMgA
sleep 1
cleos --wallet-url "http://127.0.0.1:8888" create account eosio user5 EOS7sBeJzasK9vDCybdg2jmqJeTf42iohxideWTeWtWzNYaaRKMgA EOS7sBeJzasK9vDCybdg2jmqJeTf42iohxideWTeWtWzNYaaRKMgA
cleos --wallet-url "http://127.0.0.1:8888" transfer eosio.token user1 "2000.0000 SYS"
cleos --wallet-url "http://127.0.0.1:8888" transfer eosio.token user2 "2000.0000 SYS"
cleos --wallet-url "http://127.0.0.1:8888" transfer eosio.token user3 "2000.0000 SYS"
cleos --wallet-url "http://127.0.0.1:8888" transfer eosio.token user4 "2000.0000 SYS"
cleos --wallet-url "http://127.0.0.1:8888" transfer eosio.token user5 "2000.0000 SYS"
cd /Users/sam/Public/Eoasis/fifatest
./run.sh
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/DMSGopenpgp/DMSGopenpgp.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/DMSGopenpgp/DMSGopenpgp.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
import { readdirSync } from "fs";
function getFiles(dir: string) {
const files = readdirSync(dir, {
withFileTypes: true,
});
// Array with the files.
let allFiles: [string, string][] = [];
for (const file of files) {
if (file.isDirectory()) {
allFiles = [...allFiles, ...getFiles(`${dir}/${file.name}`)];
} else if (
file.name.endsWith(".js") ||
file.name.endsWith(".ts") ||
!file.name.endsWith("d.ts")
) {
// Getting the file name.
let fileName: string | string[] = file.name
.replace(/\\/g, "/")
.split("/");
fileName = fileName[fileName.length - 1];
fileName = fileName.split(".")[0].toLowerCase();
allFiles.push([`${dir}/${file.name}`, fileName]);
}
}
return allFiles;
}
export = getFiles;
|
<reponame>ckwhen/thsr-app
export const hasAvailableSeat = (seatStatus = '') => (/^(Limited|Available)$/i.test(seatStatus));
export const noAvailableSeat = (seatStatus = '') => (/^Full$/i.test(seatStatus));
export const isRegularTicket = (ticketType = '') => ticketType === '標準';
export const isBusinessTicket = (ticketType = '') => ticketType === '商務';
export default {
hasAvailableSeat,
noAvailableSeat,
isRegularTicket,
isBusinessTicket,
};
|
package poller
import (
"github.com/eddieowens/ranvier/server/app/configuration"
"github.com/eddieowens/ranvier/server/app/model"
"github.com/eddieowens/ranvier/server/app/service"
log "github.com/sirupsen/logrus"
"gopkg.in/src-d/go-git.v4"
"io"
"os"
"path"
"path/filepath"
"regexp"
"time"
)
const GitPollerKey = "GitPoller"
type OnUpdateFunction func(eventType model.EventType, filepath string)
type OnStartFunc func(filepath string)
type GitPoller interface {
Start(onUpdate OnUpdateFunction, onStart OnStartFunc, filters ...regexp.Regexp) error
Stop()
}
type gitPollerImpl struct {
Config configuration.Config `inject:"Config"`
GitService service.GitService `inject:"GitService"`
quitChannel chan bool
repo *git.Repository
branchName string
filters []regexp.Regexp
}
func (g *gitPollerImpl) Stop() {
close(g.quitChannel)
}
func (g *gitPollerImpl) Start(onUpdate OnUpdateFunction, onStart OnStartFunc, filters ...regexp.Regexp) error {
repo, err := g.GitService.Clone(g.Config.Git.Remote, g.Config.Git.Branch, g.Config.Git.Directory)
if err != nil {
return err
}
g.repo = repo
g.branchName = g.Config.Git.Branch
g.filters = filters
err = g.initializeConfig(onStart)
if err != nil {
return err
}
ticker := time.NewTicker(time.Duration(g.Config.Git.PollingInterval) * time.Second)
g.quitChannel = make(chan bool)
go func() {
for {
select {
case <-ticker.C:
changes, err := g.GitService.DiffRemote(g.repo, g.branchName)
if err != nil {
if err == git.NoErrAlreadyUpToDate {
log.Debug("No changes detected in git")
} else {
log.WithError(err).Error()
}
continue
}
log.WithField("changes", changes).
WithField("repo", g.Config.Git.Remote).
Debug("Detected changes in git")
changes = g.filter(changes)
if len(changes) > 0 {
for _, c := range changes {
fp := path.Join(g.Config.Git.Directory, c.Filename)
onUpdate(c.EventType, fp)
}
}
case <-g.quitChannel:
ticker.Stop()
return
}
}
}()
return nil
}
func (g *gitPollerImpl) isDirEmpty(dir string) bool {
f, err := os.Open(dir)
if err != nil {
return false
}
defer f.Close()
_, err = f.Readdirnames(1)
if err == io.EOF {
return true
}
return false
}
func (g *gitPollerImpl) filterFile(file string) bool {
for _, f := range g.filters {
if !f.Match([]byte(file)) {
return false
}
}
return true
}
func (g *gitPollerImpl) filter(gitChanges []model.GitChange) []model.GitChange {
changes := make([]model.GitChange, 0)
for _, gc := range gitChanges {
if g.filterFile(gc.Filename) {
changes = append(changes, gc)
}
}
return changes
}
func (g *gitPollerImpl) initializeConfig(onStart OnStartFunc) error {
return filepath.Walk(g.Config.Git.Directory, func(path string, _ os.FileInfo, err error) error {
if err != nil {
return filepath.SkipDir
}
if g.filterFile(path) {
onStart(path)
}
return nil
})
}
|
<gh_stars>0
package straightWithoutBdd.tests.api;
import io.restassured.response.Response;
import org.testng.Assert;
import org.testng.annotations.BeforeTest;
import org.testng.annotations.Test;
import straightWithoutBdd.api.services.AuthService;
import straightWithoutBdd.api.services.GetQuizzesApi;
import java.io.FileNotFoundException;
import java.util.HashMap;
public class BasicEncapsulation {
String token;
@Test(description = "Validate Quizzes")
public void validateQuizzes() throws FileNotFoundException {
HashMap<String, String> credentials = new HashMap<>();
credentials.put("email", "<EMAIL>");
credentials.put("password", "<PASSWORD>");
token = new AuthService().login(credentials);
Response response = new GetQuizzesApi().getQuizzes(token);
// Assertions
Assert.assertEquals(response.statusCode(), 200);
long id = Thread.currentThread().getId();
System.out.println("Sample test-method 1 " + ". Thread id is: " + id);
}
@Test(description = "Validate Authorization")
public void validateAuth() throws FileNotFoundException {
HashMap<String, String> credentials = new HashMap<>();
credentials.put("email", "<EMAIL>");
credentials.put("password", "<PASSWORD>");
long id = Thread.currentThread().getId();
System.out.println("Sample test-method 1 " + ". Thread id is: " + id);
new AuthService().login(credentials);
}
@Test(description = "Validate Quizzes")
public void validateQuizzes2() throws FileNotFoundException {
HashMap<String, String> credentials = new HashMap<>();
credentials.put("email", "<EMAIL>");
credentials.put("password", "<PASSWORD>");
token = new AuthService().login(credentials);
Response response = new GetQuizzesApi().getQuizzes(token);
// Assertions
Assert.assertEquals(response.statusCode(), 200);
}
} |
<gh_stars>0
package com.pratik.appmodel;
import android.content.Context;
public class AppModel {
public String largeImageUrl;
public String mediumImageUrl;
public String smallImageUrl;
public String userName;
public String fullName;
public String memberSince;
public String hashTags;
public String user_url;
public AppModel(){
super();
}
public String getUser_url() {
return user_url;
}
public void setUser_url(String user_url) {
this.user_url = user_url;
}
public String getLargeImageUrl() {
return largeImageUrl;
}
public void setLargeImageUrl(String largeImageUrl) {
this.largeImageUrl = largeImageUrl;
}
public String getMediumImageUrl() {
return mediumImageUrl;
}
public void setMediumImageUrl(String mediumImageUrl) {
this.mediumImageUrl = mediumImageUrl;
}
public String getSmallImageUrl() {
return smallImageUrl;
}
public void setSmallImageUrl(String smallImageUrl) {
this.smallImageUrl = smallImageUrl;
}
public String getUserName() {
return userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getFullName() {
return fullName;
}
public void setFullName(String fullName) {
this.fullName = fullName;
}
public String getMemberSince() {
return memberSince;
}
public void setMemberSince(String memberSince) {
this.memberSince = memberSince;
}
public String getHashTags() {
return hashTags;
}
public void setHashTags(String hashTags) {
this.hashTags = hashTags;
}
}
|
python train.py \
--n-epochs 150 \
--lr 0.001 \
--iters-per-epoch 71 \
--output-folder results \
--dataset COLLAB \
--n-classes 3 \
--gpu 0 \
--batch-size 128 \
--test-batch-size 1 \
--fold-idx 0 \
--output-file collab_lr001 \
--avgnodenum 75
|
def calculate_health_percentage(health: int, health_max: int, map_type: str, unit_type: str) -> float:
if map_type in ["MMM", "GMMM"] and unit_type == "medivac":
return health / health_max
else:
return health / (2 * health_max) |
package com.tuya.iot.suite.web.connector;
import com.alibaba.fastjson.JSON;
import com.tuya.iot.suite.ability.idaas.connector.PermissionConnector;
import com.tuya.iot.suite.ability.idaas.model.PermissionCreateReq;
import com.tuya.iot.suite.web.BaseTest;
import lombok.TextBlock;
import lombok.TextBlocks;
import lombok.extern.slf4j.Slf4j;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
/**
* @author benguan
*/
@Slf4j
public class PermissionConnectorTest extends BaseTest {
@Value("${project.permission-space-id}")
String spaceId;
@Autowired
private PermissionConnector permissionConnector;
@Test
public void testApplySpace() {
/*
{
"permissionCode": "权限标识",
"type": "BUTTON",
"remark": "备注"
}
*/
@TextBlock String json = TextBlocks.lazyInit();
PermissionCreateReq request = JSON.parseObject(json, PermissionCreateReq.class);
Boolean res = permissionConnector.createPermission(spaceId, request);
log.info("<===", res);
}
}
|
<gh_stars>1-10
package gostart
// Include all packages for build
import (
_ "github.com/ungerik/go-start/config"
_ "github.com/ungerik/go-start/debug"
_ "github.com/ungerik/go-start/errs"
_ "github.com/ungerik/go-start/i18n"
_ "github.com/ungerik/go-start/media"
_ "github.com/ungerik/go-start/model"
_ "github.com/ungerik/go-start/modelext"
_ "github.com/ungerik/go-start/mongo"
_ "github.com/ungerik/go-start/mongoadmin"
_ "github.com/ungerik/go-start/mongomedia"
_ "github.com/ungerik/go-start/reflection"
_ "github.com/ungerik/go-start/states"
_ "github.com/ungerik/go-start/templatesystem"
_ "github.com/ungerik/go-start/user"
_ "github.com/ungerik/go-start/utils"
_ "github.com/ungerik/go-start/view"
)
|
#!/bin/bash
# Copyright 2014 The Kubernetes Authors All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# exit on any error
set -e
# Setup hosts file to support ping by hostname to master
if [ ! "$(cat /etc/hosts | grep $MASTER_NAME)" ]; then
echo "Adding $MASTER_NAME to hosts file"
echo "$MASTER_IP $MASTER_NAME" >> /etc/hosts
fi
# Setup hosts file to support ping by hostname to each minion in the cluster
for (( i=0; i<${#MINION_NAMES[@]}; i++)); do
minion=${MINION_NAMES[$i]}
ip=${MINION_IPS[$i]}
if [ ! "$(cat /etc/hosts | grep $minion)" ]; then
echo "Adding $minion to hosts file"
echo "$ip $minion" >> /etc/hosts
fi
done
# Let the minion know who its master is
# Recover the salt-minion if the salt-master network changes
## auth_timeout - how long we want to wait for a time out
## auth_tries - how many times we will retry before restarting salt-minion
## auth_safemode - if our cert is rejected, we will restart salt minion
## ping_interval - restart the minion if we cannot ping the master after 1 minute
## random_reauth_delay - wait 0-3 seconds when reauthenticating
## recon_default - how long to wait before reconnecting
## recon_max - how long you will wait upper bound
## state_aggregrate - try to do a single yum command to install all referenced packages where possible at once, should improve startup times
##
mkdir -p /etc/salt/minion.d
cat <<EOF >/etc/salt/minion.d/master.conf
master: '$(echo "$MASTER_NAME" | sed -e "s/'/''/g")'
auth_timeout: 10
auth_tries: 2
auth_safemode: True
ping_interval: 1
random_reauth_delay: 3
state_aggregrate:
- pkg
EOF
cat <<EOF >/etc/salt/minion.d/log-level-debug.conf
log_level: debug
log_level_logfile: debug
EOF
# Our minions will have a pool role to distinguish them from the master.
cat <<EOF >/etc/salt/minion.d/grains.conf
grains:
cloud: vagrant
network_mode: openvswitch
node_ip: '$(echo "$MINION_IP" | sed -e "s/'/''/g")'
api_servers: '$(echo "$MASTER_IP" | sed -e "s/'/''/g")'
networkInterfaceName: eth1
roles:
- kubernetes-pool
cbr-cidr: '$(echo "$CONTAINER_SUBNET" | sed -e "s/'/''/g")'
hostname_override: '$(echo "$MINION_IP" | sed -e "s/'/''/g")'
EOF
# we will run provision to update code each time we test, so we do not want to do salt install each time
if ! which salt-minion >/dev/null 2>&1; then
# Install Salt
curl -sS -L --connect-timeout 20 --retry 6 --retry-delay 10 https://bootstrap.saltstack.com | sh -s
else
# Sometimes the minion gets wedged when it comes up along with the master.
# Restarting it here un-wedges it.
systemctl restart salt-minion.service
fi
|
public interface InterfaceA {
public void callSinkIfTrue(Object o, boolean cond);
public void callSinkIfFalse(Object o, boolean cond);
public void localCallSensitivity(Object o, boolean c);
}
|
<filename>src/api/manage.js
import request from '@/utils/request'
import env from '@/config/env'
export function getUserList (parameter) {
return request({
url: env.url + '/user',
method: 'get',
params: parameter
})
}
export function getRoleList (parameter) {
return request({
url: env.url + '/role',
method: 'get',
params: parameter
})
}
export function getServiceList (parameter) {
return request({
url: env.url + '/service',
method: 'get',
params: parameter
})
}
export function getPermissions (parameter) {
return request({
url: env.url + '/permission/no-pager',
method: 'get',
params: parameter
})
}
export function getOrgTree (parameter) {
return request({
url: env.url + '/org/tree',
method: 'get',
params: parameter
})
}
// id == 0 add post
// id != 0 update put
export function saveService (parameter) {
return request({
url: env.url + '/service',
method: parameter.id === 0 ? 'post' : 'put',
data: parameter
})
}
export function saveSub (sub) {
return request({
url: '/sub',
method: sub.id === 0 ? 'post' : 'put',
data: sub
})
}
|
import { TokenDto, GetTokenDto } from './../../models/apiModels/user.dto';
import {
CreateGoogleUserDto,
CreateUserDto,
PinDto,
} from '../../models/apiModels/user.dto';
import { LoginDto } from '../../models/apiModels/login.dto';
import { Public } from './jwt-auth.guard';
import { LocalAuthGuard } from './local-auth.guard';
import {
Controller,
Request,
Post,
UseGuards,
Get,
Body,
Patch,
Param,
} from '@nestjs/common';
import {
ApiBearerAuth,
ApiBody,
ApiOkResponse,
ApiParam,
ApiTags,
} from '@nestjs/swagger/';
import { AuthService } from './auth.service';
@Controller('auth')
@ApiTags('Authentication')
export class AuthController {
constructor(private authService: AuthService) {}
@UseGuards(LocalAuthGuard)
@Post('login')
@ApiBody({ type: LoginDto })
@ApiOkResponse({ description: 'result Token' })
@Public()
async login(@Request() req) {
return await this.authService.login(req.user);
}
@Post('register')
@ApiBody({ type: CreateUserDto })
@ApiOkResponse({ description: 'Created User' })
@Public()
async register(@Body() createUserDto: CreateUserDto) {
return this.authService.registerUser(createUserDto);
}
@Post('google')
@ApiBody({ type: CreateGoogleUserDto })
@ApiOkResponse({ description: 'Created User' })
@Public()
async googleLogin(@Body() createGoogleUserDto: CreateGoogleUserDto) {
return this.authService.googleLogin(createGoogleUserDto);
}
@Get('profile')
@ApiBearerAuth('Bearer')
getProfile(@Request() req) {
return req.user;
}
@Post('pin')
@ApiBody({ type: PinDto })
@ApiBearerAuth('Bearer')
verifyPin(@Request() req, @Body() pinData: PinDto) {
pinData.userId = req.user.id;
return this.authService.getPin(pinData);
}
@Patch('pin')
@ApiBody({ type: PinDto })
setPin(@Request() req, @Body() pinData: PinDto) {
pinData.userId = req.user.id;
return this.authService.setPin(pinData);
}
@Get('pin')
@ApiBearerAuth('Bearer')
checkPinSet(@Request() req) {
return this.authService.checkPinSet(req.user.id);
}
@Patch('token')
@ApiBearerAuth('Bearer')
addNotificationToken(@Request() req, @Body() data: TokenDto) {
return this.authService.addToken(req.user.id, data.token);
}
@Get(':userId/tokens')
@Public()
@ApiParam({ name: 'userId', type: 'string' })
getUserTokens(@Param() param: GetTokenDto) {
return this.authService.getUserTokens(param.userId);
}
}
|
$(document).ready(function(){
$('.small-slides').click(function(e){
e.preventDefault;
var carousel = $('.owl-carousel-featured');
// var id = $(this).data('id');
// // var image = $('.'+id).attr('src');
// // var slideId = $('.main-slides').data('id');
// // console.log(slideId);
// // $('.'+slideId).attr('src', image);
var src = $(this).attr('src');
var index = $(this).data('index');
carousel.trigger("to.owl.carousel", [index, 500, true]);
})
}) |
#!/bin/bash
set -o errexit
set -o pipefail
readonly BASEDIR=$(dirname $(readlink -f $0))
usage () {
cat << EOF
Description: Build lorhammer binaries.
Usage: resources/scripts/buildAllEnv.sh [COMMAND]
Commands:
-light Only compile linux amd64 version.
-full Compile linux, window and mac for 386, amd64 and arm.
-h | -help Display this help.
EOF
}
if [[ -z $1 ]]; then
echo "Error : command empty"
usage
exit 1
fi
if [[ "$1" == "-help" || "$1" == "-h" ]]; then
usage
exit 0
fi
if [[ "$1" == "-light" ]]; then
docker run --rm -v ${BASEDIR}/../..:/go/src/lorhammer registry.gitlab.com/itk.fr/lorhammer/goreleaser --config docker/goreleaser/goreleaser-light.yml "${@:2}";
else
docker run --rm -v ${BASEDIR}/../..:/go/src/lorhammer registry.gitlab.com/itk.fr/lorhammer/goreleaser --config docker/goreleaser/goreleaser-full.yml "${@:2}"
find ${BASEDIR}/../../dist -maxdepth 1 -mindepth 1 -type d -exec rm -rf '{}' \;
fi |
package s3blob
import (
"context"
"flag"
_ "github.com/aaronland/go-cloud-s3blob"
"gocloud.dev/blob"
"io"
"testing"
)
var uri = flag.String("uri", "", "A valid s3blob:// URI.")
func TestOpenBucket(t *testing.T) {
ctx := context.Background()
bucket, err := blob.OpenBucket(ctx, *uri)
if err != nil {
t.Fatal(err)
}
defer bucket.Close()
iter := bucket.List(nil)
for {
obj, err := iter.Next(ctx)
if err == io.EOF {
break
}
if err != nil {
t.Fatal(err)
}
t.Log(obj.Key)
}
}
|
<gh_stars>0
'use strict';
exports.__esModule = true;
var _createBasic = require('../utils/create-basic');
var _createBasic2 = _interopRequireDefault(_createBasic);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
exports.default = (0, _createBasic2.default)({
render: function render() {
var _vm = this;var _h = _vm.$createElement;var _c = _vm._self._c || _h;return _c('div', { staticClass: "van-cell-group", class: { 'van-hairline--top-bottom': _vm.border } }, [_vm._t("default")], 2);
},
name: 'cell-group',
props: {
border: {
type: Boolean,
default: true
}
}
}); |
import numpy as np
def instant_parameters(signal, fs=None):
'''
Instant parameters estimation:
analytic_signal = hilbert(signal)
envelope = np.abs(analytic_signal)
phase = np.unwrap(np.angle(analytic_signal))
frequency = np.diff(phase)
Parameters:
signal : array_like
Input signal.
fs : float, optional
Sampling frequency of the input signal. If provided, the frequency will be in Hz, otherwise in radians/sample.
Returns:
analytic_signal : array_like
The analytic signal of the input.
envelope : array_like
The envelope of the input signal.
phase : array_like
The phase of the input signal.
frequency : array_like
The frequency of the input signal.
'''
analytic_signal = np.fft.hilbert(signal)
envelope = np.abs(analytic_signal)
phase = np.unwrap(np.angle(analytic_signal))
if fs is not None:
frequency = (np.diff(phase) / (2 * np.pi)) * fs
else:
frequency = np.diff(phase)
return analytic_signal, envelope, phase, frequency |
#!/bin/bash
dieharder -d 207 -g 21 -S 92949953
|
'use strict'
let db = require('../')
module.exports = db.bookshelf.model('Article', {
tableName: 'articles',
softDelete: true,
user: function () {
return this.belongsToOne('User')
},
comments: function () {
return this.hasMany('Comment')
},
tags: function () {
return this.belongsToMany('Tag').through('ArticleTag')
},
articlesOrTags: function () {
return this.morphMany('ArticleOrTag', 'source')
}
})
|
#!/bin/bash -xe
sudo rm -rf /var/run/dbus
sudo mkdir /var/run/dbus
sudo dbus-daemon --system
sudo chown -Rv vscode:vscode /var/lib/tpmstate
swtpm_setup --tpm2 \
--tpmstate /var/lib/tpmstate \
--createek --allow-signing --decryption --create-ek-cert \
--create-platform-cert \
--display
swtpm socket --tpm2 \
--tpmstate dir=/var/lib/tpmstate \
--flags startup-clear \
--ctrl type=tcp,port=2322 \
--server type=tcp,port=2321 \
--daemon
tpm2-abrmd \
--logger=stdout \
--tcti=swtpm: \
--flush-all &
|
<reponame>smagill/opensphere-desktop<gh_stars>10-100
/** Classes related to the launcher. */
package io.opensphere.core.launch;
|
import React, { Component } from "react";
import "./resources/styles.css";
import Header from "./components/header_footer/Header";
import Footer from "./components/header_footer/Footer";
import Featured from "./components/featured/Featured";
import Info from "./components/info/Info";
import Highlights from "./components/highlights/Highlights";
import PriceCard from "./components/pricing/PriceCard";
import Map from "./components/map/Map";
import { Element } from "react-scroll";
class App extends Component {
render() {
return (
<div
className="App"
style={{ height: "1500px", background: 'url(images/slide_one.jpg) center center / cover' }}
>
<Header />
<Element name="Featured">
<Featured />
</Element>
<Element name="Info">
<Info />
</Element>
<Element name="Highlights">
<Highlights />
</Element>
<Element name="PriceCard">
<PriceCard />
</Element>
<Element name="Map">
<Map />
</Element>
<Footer />
</div>
);
}
}
export default App;
|
<filename>test/utils/TFCSchemeConfigSpec.scala
/*
* Copyright 2018 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package utils
import org.joda.time.LocalDate
import org.joda.time.format.DateTimeFormat
class TFCSchemeConfigSpec extends FakeCCCalculatorApplication {
"TFC SchemeConfig" should {
"populate end day of the tax year from config file" in {
TFCConfig.taxYearEndDay shouldBe 6
}
"populate end month of the tax year from config file" in {
TFCConfig.taxYearEndMonth shouldBe 4
}
"return max no of children from config file" in {
TFCConfig.maxNoOfChildren shouldBe 25
}
"return max name length from config file" in {
TFCConfig.maxNameLength shouldBe 25
}
"(following year) return the end date of tax year" in {
val pattern = "dd-MM-yyyy"
val formatter = DateTimeFormat.forPattern(pattern)
val now = LocalDate.parse("23-06-2016", formatter)
val endOfTaxYear = TFCConfig.taxYearEndDate(now, "tfc")
endOfTaxYear shouldBe LocalDate.parse("06-04-2017", formatter)
}
"(current year) return the end date of tax year" in {
val pattern = "dd-MM-yyyy"
val formatter = DateTimeFormat.forPattern(pattern)
val now = LocalDate.parse("23-02-2016", formatter)
val endOfTaxYear = TFCConfig.taxYearEndDate(now, "tfc")
endOfTaxYear shouldBe LocalDate.parse("06-04-2016", formatter)
}
"return 2016/2017 TFC taxYear Config for a date 24-07-2016" in {
val pattern = "dd-MM-yyyy"
val formatter = DateTimeFormat.forPattern(pattern)
val fromDate = LocalDate.parse("24-07-2016", formatter)
val config = TFCConfig.getConfig(fromDate)
val tfcTaxYear = TFCTaxYearConfig(
topUpPercent = 20,
maxEligibleChildcareAmount = 2500,
maxEligibleChildcareAmountForDisabled = 5000,
maxGovtContribution = 500,
maxGovtContributionForDisabled = 1000
)
config shouldBe tfcTaxYear
}
"return 2017/2018 TFC taxYear Config for a date 24-07-2017" in {
val pattern = "dd-MM-yyyy"
val formatter = DateTimeFormat.forPattern(pattern)
val fromDate = LocalDate.parse("24-07-2017", formatter)
val config = TFCConfig.getConfig(fromDate)
val tfcTaxYear = TFCTaxYearConfig(
topUpPercent = 20,
maxEligibleChildcareAmount = 2500,
maxEligibleChildcareAmountForDisabled = 5000,
maxGovtContribution = 500,
maxGovtContributionForDisabled = 1000
)
config shouldBe tfcTaxYear
}
"return 2017/2018 TFC taxYear Config for a date 24-07-2018" in {
val pattern = "dd-MM-yyyy"
val formatter = DateTimeFormat.forPattern(pattern)
val fromDate = LocalDate.parse("24-07-2018", formatter)
val config = TFCConfig.getConfig(fromDate)
val tfcTaxYear = TFCTaxYearConfig(
topUpPercent = 20,
maxEligibleChildcareAmount = 2500,
maxEligibleChildcareAmountForDisabled = 5000,
maxGovtContribution = 500,
maxGovtContributionForDisabled = 1000
)
config shouldBe tfcTaxYear
}
"return default TFC taxYear Config for a date 24-07-2015" in {
val pattern = "dd-MM-yyyy"
val formatter = DateTimeFormat.forPattern(pattern)
val fromDate = LocalDate.parse("24-07-2015", formatter)
val config = TFCConfig.getConfig(fromDate)
val tfcTaxYear = TFCTaxYearConfig(
topUpPercent = 20,
maxEligibleChildcareAmount = 2500,
maxEligibleChildcareAmountForDisabled = 5000,
maxGovtContribution = 500,
maxGovtContributionForDisabled = 1000
)
config shouldBe tfcTaxYear
}
"return default TFC taxYear Config for a date 05-04-2016" in {
val pattern = "dd-MM-yyyy"
val formatter = DateTimeFormat.forPattern(pattern)
val fromDate = LocalDate.parse("05-04-2016", formatter)
val config = TFCConfig.getConfig(fromDate)
val tfcTaxYear = TFCTaxYearConfig(
topUpPercent = 20,
maxEligibleChildcareAmount = 2500,
maxEligibleChildcareAmountForDisabled = 5000,
maxGovtContribution = 500,
maxGovtContributionForDisabled = 1000
)
config shouldBe tfcTaxYear
}
"return 2016/2017 TFC taxYear Config for a date 06-04-2016" in {
val pattern = "dd-MM-yyyy"
val formatter = DateTimeFormat.forPattern(pattern)
val fromDate = LocalDate.parse("06-04-2016", formatter)
val config = TFCConfig.getConfig(fromDate)
val tfcTaxYear = TFCTaxYearConfig(
topUpPercent = 20,
maxEligibleChildcareAmount = 2500,
maxEligibleChildcareAmountForDisabled = 5000,
maxGovtContribution = 500,
maxGovtContributionForDisabled = 1000
)
config shouldBe tfcTaxYear
}
"return 2016/2017 TFC taxYear Config for a date 05-04-2017" in {
val pattern = "dd-MM-yyyy"
val formatter = DateTimeFormat.forPattern(pattern)
val fromDate = LocalDate.parse("05-04-2017", formatter)
val config = TFCConfig.getConfig(fromDate)
val tfcTaxYear = TFCTaxYearConfig(
topUpPercent = 20,
maxEligibleChildcareAmount = 2500,
maxEligibleChildcareAmountForDisabled = 5000,
maxGovtContribution = 500,
maxGovtContributionForDisabled = 1000
)
config shouldBe tfcTaxYear
}
"return 2017/2018 TFC taxYear Config for a date 06-04-2017" in {
val pattern = "dd-MM-yyyy"
val formatter = DateTimeFormat.forPattern(pattern)
val fromDate = LocalDate.parse("06-04-2017", formatter)
val config = TFCConfig.getConfig(fromDate)
val tfcTaxYear = TFCTaxYearConfig(
topUpPercent = 20,
maxEligibleChildcareAmount = 2500,
maxEligibleChildcareAmountForDisabled = 5000,
maxGovtContribution = 500,
maxGovtContributionForDisabled = 1000
)
config shouldBe tfcTaxYear
}
"return 2017/2018 TFC taxYear Config for a date 05-04-2018" in {
val pattern = "dd-MM-yyyy"
val formatter = DateTimeFormat.forPattern(pattern)
val fromDate = LocalDate.parse("05-04-2018", formatter)
val config = TFCConfig.getConfig(fromDate)
val tfcTaxYear = TFCTaxYearConfig(
topUpPercent = 20,
maxEligibleChildcareAmount = 2500,
maxEligibleChildcareAmountForDisabled = 5000,
maxGovtContribution = 500,
maxGovtContributionForDisabled = 1000
)
config shouldBe tfcTaxYear
}
"return 2017/2018 TFC taxYear Config for a date 06-04-2018" in {
val pattern = "dd-MM-yyyy"
val formatter = DateTimeFormat.forPattern(pattern)
val fromDate = LocalDate.parse("06-04-2018", formatter)
val config = TFCConfig.getConfig(fromDate)
val tfcTaxYear = TFCTaxYearConfig(
topUpPercent = 20,
maxEligibleChildcareAmount = 2500,
maxEligibleChildcareAmountForDisabled = 5000,
maxGovtContribution = 500,
maxGovtContributionForDisabled = 1000
)
config shouldBe tfcTaxYear
}
}
}
|
package agent
import (
"net/http"
"net/http/httptest"
"testing"
"github.com/hashicorp/nomad/acl"
"github.com/hashicorp/nomad/nomad/mock"
"github.com/hashicorp/nomad/nomad/structs"
"github.com/stretchr/testify/assert"
)
func TestClientStatsRequest(t *testing.T) {
t.Parallel()
httpTest(t, nil, func(s *TestAgent) {
req, err := http.NewRequest("GET", "/v1/client/stats/?since=foo", nil)
if err != nil {
t.Fatalf("err: %v", err)
}
respW := httptest.NewRecorder()
_, err = s.Server.ClientStatsRequest(respW, req)
if err != nil {
t.Fatalf("unexpected err: %v", err)
}
})
}
func TestClientStatsRequest_ACL(t *testing.T) {
t.Parallel()
assert := assert.New(t)
httpACLTest(t, nil, func(s *TestAgent) {
state := s.Agent.server.State()
req, err := http.NewRequest("GET", "/v1/client/stats/", nil)
assert.Nil(err)
// Try request without a token and expect failure
{
respW := httptest.NewRecorder()
_, err := s.Server.ClientStatsRequest(respW, req)
assert.NotNil(err)
assert.Equal(err.Error(), structs.ErrPermissionDenied.Error())
}
// Try request with an invalid token and expect failure
{
respW := httptest.NewRecorder()
token := mock.CreatePolicyAndToken(t, state, 1005, "invalid", mock.NodePolicy(acl.PolicyDeny))
setToken(req, token)
_, err := s.Server.ClientStatsRequest(respW, req)
assert.NotNil(err)
assert.Equal(err.Error(), structs.ErrPermissionDenied.Error())
}
// Try request with a valid token
{
respW := httptest.NewRecorder()
token := mock.CreatePolicyAndToken(t, state, 1007, "valid", mock.NodePolicy(acl.PolicyRead))
setToken(req, token)
_, err := s.Server.ClientStatsRequest(respW, req)
assert.Nil(err)
assert.Equal(http.StatusOK, respW.Code)
}
// Try request with a management token
{
respW := httptest.NewRecorder()
setToken(req, s.RootToken)
_, err := s.Server.ClientStatsRequest(respW, req)
assert.Nil(err)
assert.Equal(http.StatusOK, respW.Code)
}
})
}
|
module.exports = {
ALWAYS: 0,
PARENS_DIVISION: 1,
PARENS: 2,
STRICT_LEGACY: 3
}; |
class Token:
def __init__(self, type, value):
self.type = type
self.value = value
def matches(self, type_, value):
return self.type == type_ and self.value == value
def __repr__(self):
return f"Token({self.type}, {self.value})" |
#!/usr/bin/env bash
# bump_version.sh (show|major|minor|patch|prerelease|build)
set -o nounset
set -o errexit
set -o pipefail
VERSION_FILE=src/findcdn/_version.py
HELP_INFORMATION="bump_version.sh (show|major|minor|patch|prerelease|build|finalize)"
old_version=$(sed -n "s/^__version__ = \"\(.*\)\"$/\1/p" $VERSION_FILE)
if [ $# -ne 1 ]
then
echo "$HELP_INFORMATION"
else
case $1 in
major|minor|patch|prerelease|build)
new_version=$(python -c "import semver; print(semver.bump_$1('$old_version'))")
echo Changing version from "$old_version" to "$new_version"
# A temp file is used to provide compatability with macOS development
# as a result of macOS using the BSD version of sed
tmp_file=/tmp/version.$$
sed "s/$old_version/$new_version/" $VERSION_FILE > $tmp_file
mv $tmp_file $VERSION_FILE
git add $VERSION_FILE
git commit -m"Bump version from $old_version to $new_version"
git push
;;
finalize)
new_version=$(python -c "import semver; print(semver.finalize_version('$old_version'))")
echo Changing version from "$old_version" to "$new_version"
# A temp file is used to provide compatability with macOS development
# as a result of macOS using the BSD version of sed
tmp_file=/tmp/version.$$
sed "s/$old_version/$new_version/" $VERSION_FILE > $tmp_file
mv $tmp_file $VERSION_FILE
git add $VERSION_FILE
git commit -m"Bump version from $old_version to $new_version"
git push
;;
show)
echo "$old_version"
;;
*)
echo "$HELP_INFORMATION"
;;
esac
fi
|
/*
* The MIT License
*
* Copyright (c) 2010, <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.util;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeTrue;
import hudson.Functions;
import hudson.Launcher.LocalLauncher;
import hudson.Launcher.RemoteLauncher;
import hudson.Proc;
import hudson.model.Slave;
import org.apache.tools.ant.util.JavaEnvUtils;
import org.junit.Rule;
import org.junit.Test;
import org.jvnet.hudson.test.Email;
import org.jvnet.hudson.test.JenkinsRule;
import com.google.common.base.Joiner;
import java.io.ByteArrayOutputStream;
import java.io.StringWriter;
import java.net.URL;
/**
* @author <NAME>
*/
public class ArgumentListBuilder2Test {
@Rule
public JenkinsRule j = new JenkinsRule();
/**
* Makes sure {@link RemoteLauncher} properly masks arguments.
*/
@Test
@Email("http://n4.nabble.com/Password-masking-when-running-commands-on-a-slave-tp1753033p1753033.html")
public void slaveMask() throws Exception {
ArgumentListBuilder args = new ArgumentListBuilder();
args.add("java");
args.addMasked("-version");
Slave s = j.createSlave();
s.toComputer().connect(false).get();
StringWriter out = new StringWriter();
assertEquals(0,s.createLauncher(new StreamTaskListener(out)).launch().cmds(args).join());
System.out.println(out);
assertTrue(out.toString().contains("$ java ********"));
}
@Test
public void ensureArgumentsArePassedViaCmdExeUnmodified() throws Exception {
assumeTrue(Functions.isWindows());
String[] specials = new String[] {
"~", "!", "@", "#", "$", "%", "^", "&", "*", "(", ")",
"_", "+", "{", "}", "[", "]", ":", ";", "\"", "'", "\\", "|",
"<", ">", ",", ".", "/", "?", " "
};
String out = echoArgs(specials);
String expected = String.format("%n%s", Joiner.on(" ").join(specials));
assertThat(out, containsString(expected));
}
public String echoArgs(String... arguments) throws Exception {
String testHarnessJar = Class.forName("hudson.util.EchoCommand")
.getProtectionDomain()
.getCodeSource()
.getLocation()
.getFile()
.replaceAll("^/", "");
ArgumentListBuilder args = new ArgumentListBuilder(
JavaEnvUtils.getJreExecutable("java").replaceAll("^\"|\"$", ""),
"-cp", testHarnessJar, "hudson.util.EchoCommand")
.add(arguments)
.toWindowsCommand();
ByteArrayOutputStream out = new ByteArrayOutputStream();
final StreamTaskListener listener = new StreamTaskListener(out);
Proc p = new LocalLauncher(listener)
.launch()
.stderr(System.err)
.stdout(out)
.cmds(args)
.start()
;
int code = p.join();
listener.close();
assertThat(code, equalTo(0));
return out.toString();
}
}
|
<filename>experiments/testing_rewards.py
import numpy as np
import math
# waiting_time_per_lane = [1.0, 0.0, 0.0, 0.0, 1304.0, 1194.0, 1056.0, 0.0, 7.0, 6.0, 0.0, 0.0, 1298.0, 1118.0, 1036.0, 0.0]
# out_lanes = ['TL2N_3', 'TL2E_3', 'TL2S_0', 'TL2W_2', 'TL2W_0', 'TL2N_1', 'TL2S_1', 'TL2N_0', 'TL2E_1', 'TL2W_3', 'TL2E_2', 'TL2E_0', 'TL2S_2', 'TL2W_1', 'TL2N_2', 'TL2S_3']
# lanes = ['N2TL_0', 'N2TL_1', 'N2TL_2', 'N2TL_3', 'E2TL_0', 'E2TL_1', 'E2TL_2', 'E2TL_3', 'S2TL_0', 'S2TL_1', 'S2TL_2', 'S2TL_3', 'W2TL_0', 'W2TL_1', 'W2TL_2', 'W2TL_3']
# getlaststepvehiclenumber = lanes[8, 1, 1, 2, 14, 14, 14, 2, 6, 6, 2, 1, 20, 17, 15, 1]
# getlaststepvehiclenumber = out_lanes[0, 0, 9, 1, 3, 3, 6, 8, 2, 0, 1, 4, 3, 4, 2, 2]
# waiting_time_per_lane = [35.0, 0.0, 0.0, 156.0, 7.0, 2.0, 0.0, 1.0, 26.0, 22.0, 8.0, 166.0, 0.0, 0.0, 0.0, 41.0]
getlaststepvehiclenumber = [8, 1, 1, 2, 14, 14, 14, 2, 6, 6, 2, 1, 20, 17, 15, 1]
waiting_time_per_lane = [
1.0,
0.0,
0.0,
0.0,
1304.0,
1194.0,
1056.0,
0.0,
7.0,
6.0,
0.0,
0.0,
1298.0,
1118.0,
1036.0,
0.0,
]
waiting_time_per_lane = [
0.0,
0.0,
0.0,
7.0,
117.0,
72.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
195.0,
55.0,
33.0,
0.0,
]
vehicle_number_per_lane = [0, 0, 0, 1, 4, 1, 1, 1, 0, 2, 0, 1, 6, 3, 2, 0]
average_sped = [
0,
0,
0,
0.0,
23.60310026706957,
10.0,
0,
27.248231909322854,
14.79342361570752,
0,
27.456183982196258,
0,
15.816712767147465,
26.666716817818703,
0.0,
0,
]
print("tru")
timings = np.random.weibull(2, 10000)
timings = np.sort(timings)
# reshape the distribution to fit the interval 0:max_steps
car_gen_steps = []
min_old = math.floor(timings[1])
max_old = math.ceil(timings[-1])
min_new = 0
max_new = 40000
for value in timings:
car_gen_steps = np.append(
car_gen_steps,
((max_new - min_new) / (max_old - min_old)) * (value - max_old) + max_new,
)
car_gen_steps = np.rint(car_gen_steps)
import matplotlib.pyplot as plt
plt.plot(car_gen_steps, list(range(len(car_gen_steps))))
plt.show()
print("tru")
|
#!/usr/bin/env bash
set -eu +x
NIX_VERSION="2.0.4"
curl https://nixos.org/releases/nix/nix-$NIX_VERSION/install | sh
# no code to install stack2nix yet
stack2nix --cabal2nix-args="--no-hpack" unpack-dir > snapshot.nix
nix build -f snapshot-fixed.nix
|
require 'date'
require 'time'
module ActiveSupport #:nodoc:
module CoreExtensions #:nodoc:
module Time #:nodoc:
# Getting times in different convenient string representations and other objects
module Conversions
DATE_FORMATS = {
:db => "%Y-%m-%d %H:%M:%S",
:short => "%d %b %H:%M",
:long => "%B %d, %Y %H:%M",
:rfc822 => "%a, %d %b %Y %H:%M:%S %z"
}
def self.included(klass)
klass.send(:alias_method, :to_default_s, :to_s)
klass.send(:alias_method, :to_s, :to_formatted_s)
end
def to_formatted_s(format = :default)
DATE_FORMATS[format] ? strftime(DATE_FORMATS[format]).strip : to_default_s
end
def to_date
::Date.new(year, month, day)
end
# To be able to keep Dates and Times interchangeable on conversions
def to_time
self
end
end
end
end
end
|
#!/bin/bash
# this program crashes and needs to be relaunched until every country is created
if test -e $PWD/png; then
echo "png folder exists."
else
echo "creating png folder "
mkdir "$PWD"/png
fi
for file in $PWD/svg/*.svg
do
filename=$(basename "$file" | cut -d. -f1)
if test -f "$PWD/png/$filename.png"; then
echo "$filename exists."
else
echo "creating $filename "
inkscape "$file" --export-type=png --export-filename="$PWD"/png/"${filename%}.png" -w 128
fi
done
echo 'all done'; |
<gh_stars>1-10
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe SidebarItem::SidebarItem, type: :component do
subject { render_inline(described_class.new(**params)) }
let(:params) { {} }
it { should have_css('article.SidebarItem') }
context 'if active' do
let(:params) { { active: true } }
it { should have_css('article.SidebarItem--active') }
end
end
|
<reponame>aliemteam/wp-graphql<filename>src/models/schema.ts<gh_stars>10-100
import { GraphQLObjectType, GraphQLSchema } from 'graphql';
import mutations from './mutations';
import queries from './queries';
export default new GraphQLSchema({
query: new GraphQLObjectType({
name: 'Query',
description: 'The root query.',
fields: () => queries,
}),
mutation: new GraphQLObjectType({
name: 'Mutation',
description: 'The root mutation.',
fields: () => mutations,
}),
});
|
package root
type rootOptions struct {
address string
port int
debug bool
}
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-shuffled-N/13-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-shuffled-N/13-1024+0+512-SS-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_sentences_first_two_thirds_sixth --eval_function last_sixth_eval |
#!/bin/bash
# Copyright 2016, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
flags="-max_total_time=$runtime -artifact_prefix=fuzzer_output/ -max_len=128 -timeout=120"
if [ "$jobs" != "1" ]
then
flags="-jobs=$jobs -workers=$jobs $flags"
fi
if [ "$config" == "asan-trace-cmp" ]
then
flags="-use_traces=1 $flags"
fi
bins/$config/nanopb_fuzzer_serverlist_test $flags fuzzer_output test/core/nanopb/corpus_serverlist
|
const state = {
user: {}
};
const getters = {};
const actions = {
getUser({ commit }) {
axios.get("/api/v1/user/current").then(res => {
commit("setUser", res.data);
});
},
loginUser({}, user) {
axios
.post("/api/v1/user/login", {
email: user.email,
password: <PASSWORD>
})
.then(res => {
if (res.data.access_token) {
localStorage.set("blog_token", res.data.access_token);
}
window.location.replace("/home");
});
}
};
const mutations = {
setUser(state, data) {
state.user = data;
}
};
export default {
namespaces: true,
state,
getters,
actions,
mutations
};
|
# Generated by Powerlevel10k configuration wizard on 2021-10-13 at 12:18 CEST.
# Based on romkatv/powerlevel10k/config/p10k-lean.zsh, checksum 54401.
# Wizard options: nerdfont-complete + powerline, small icons, unicode, lean, 24h time,
# 1 line, compact, many icons, concise, transient_prompt, instant_prompt=verbose.
# Type `p10k configure` to generate another config.
#
# Config for Powerlevel10k with lean prompt style. Type `p10k configure` to generate
# your own config based on it.
#
# Tip: Looking for a nice color? Here's a one-liner to print colormap.
#
# for i in {0..255}; do print -Pn "%K{$i} %k%F{$i}${(l:3::0:)i}%f " ${${(M)$((i%6)):#3}:+$'\n'}; done
# Temporarily change options.
'builtin' 'local' '-a' 'p10k_config_opts'
[[ ! -o 'aliases' ]] || p10k_config_opts+=('aliases')
[[ ! -o 'sh_glob' ]] || p10k_config_opts+=('sh_glob')
[[ ! -o 'no_brace_expand' ]] || p10k_config_opts+=('no_brace_expand')
'builtin' 'setopt' 'no_aliases' 'no_sh_glob' 'brace_expand'
() {
emulate -L zsh -o extended_glob
# Unset all configuration options. This allows you to apply configuration changes without
# restarting zsh. Edit ~/.p10k.zsh and type `source ~/.p10k.zsh`.
unset -m '(POWERLEVEL9K_*|DEFAULT_USER)~POWERLEVEL9K_GITSTATUS_DIR'
# Zsh >= 5.1 is required.
autoload -Uz is-at-least && is-at-least 5.1 || return
# The list of segments shown on the left. Fill it with the most important segments.
typeset -g POWERLEVEL9K_LEFT_PROMPT_ELEMENTS=(
os_icon # os identifier
dir # current directory
vcs # git status
prompt_char # prompt symbol
)
# The list of segments shown on the right. Fill it with less important segments.
# Right prompt on the last prompt line (where you are typing your commands) gets
# automatically hidden when the input line reaches it. Right prompt above the
# last prompt line gets hidden if it would overlap with left prompt.
typeset -g POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS=(
status # exit code of the last command
command_execution_time # duration of the last command
background_jobs # presence of background jobs
direnv # direnv status (https://direnv.net/)
asdf # asdf version manager (https://github.com/asdf-vm/asdf)
virtualenv # python virtual environment (https://docs.python.org/3/library/venv.html)
anaconda # conda environment (https://conda.io/)
pyenv # python environment (https://github.com/pyenv/pyenv)
goenv # go environment (https://github.com/syndbg/goenv)
nodenv # node.js version from nodenv (https://github.com/nodenv/nodenv)
nvm # node.js version from nvm (https://github.com/nvm-sh/nvm)
nodeenv # node.js environment (https://github.com/ekalinin/nodeenv)
# node_version # node.js version
# go_version # go version (https://golang.org)
# rust_version # rustc version (https://www.rust-lang.org)
# dotnet_version # .NET version (https://dotnet.microsoft.com)
# php_version # php version (https://www.php.net/)
# laravel_version # laravel php framework version (https://laravel.com/)
# java_version # java version (https://www.java.com/)
# package # name@version from package.json (https://docs.npmjs.com/files/package.json)
rbenv # ruby version from rbenv (https://github.com/rbenv/rbenv)
rvm # ruby version from rvm (https://rvm.io)
fvm # flutter version management (https://github.com/leoafarias/fvm)
luaenv # lua version from luaenv (https://github.com/cehoffman/luaenv)
jenv # java version from jenv (https://github.com/jenv/jenv)
plenv # perl version from plenv (https://github.com/tokuhirom/plenv)
phpenv # php version from phpenv (https://github.com/phpenv/phpenv)
scalaenv # scala version from scalaenv (https://github.com/scalaenv/scalaenv)
haskell_stack # haskell version from stack (https://haskellstack.org/)
kubecontext # current kubernetes context (https://kubernetes.io/)
terraform # terraform workspace (https://www.terraform.io)
# terraform_version # terraform version (https://www.terraform.io)
aws # aws profile (https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-profiles.html)
aws_eb_env # aws elastic beanstalk environment (https://aws.amazon.com/elasticbeanstalk/)
azure # azure account name (https://docs.microsoft.com/en-us/cli/azure)
gcloud # google cloud cli account and project (https://cloud.google.com/)
google_app_cred # google application credentials (https://cloud.google.com/docs/authentication/production)
toolbox # toolbox name (https://github.com/containers/toolbox)
context # user@hostname
nordvpn # nordvpn connection status, linux only (https://nordvpn.com/)
ranger # ranger shell (https://github.com/ranger/ranger)
nnn # nnn shell (https://github.com/jarun/nnn)
xplr # xplr shell (https://github.com/sayanarijit/xplr)
vim_shell # vim shell indicator (:sh)
midnight_commander # midnight commander shell (https://midnight-commander.org/)
nix_shell # nix shell (https://nixos.org/nixos/nix-pills/developing-with-nix-shell.html)
# vpn_ip # virtual private network indicator
# load # CPU load
# disk_usage # disk usage
# ram # free RAM
# swap # used swap
todo # todo items (https://github.com/todotxt/todo.txt-cli)
timewarrior # timewarrior tracking status (https://timewarrior.net/)
taskwarrior # taskwarrior task count (https://taskwarrior.org/)
time # current time
# ip # ip address and bandwidth usage for a specified network interface
# public_ip # public IP address
# proxy # system-wide http/https/ftp proxy
# battery # internal battery
# wifi # wifi speed
# example # example user-defined segment (see prompt_example function below)
)
# Defines character set used by powerlevel10k. It's best to let `p10k configure` set it for you.
typeset -g POWERLEVEL9K_MODE=nerdfont-complete
# When set to `moderate`, some icons will have an extra space after them. This is meant to avoid
# icon overlap when using non-monospace fonts. When set to `none`, spaces are not added.
typeset -g POWERLEVEL9K_ICON_PADDING=none
# Basic style options that define the overall look of your prompt. You probably don't want to
# change them.
typeset -g POWERLEVEL9K_BACKGROUND= # transparent background
typeset -g POWERLEVEL9K_{LEFT,RIGHT}_{LEFT,RIGHT}_WHITESPACE= # no surrounding whitespace
typeset -g POWERLEVEL9K_{LEFT,RIGHT}_SUBSEGMENT_SEPARATOR=' ' # separate segments with a space
typeset -g POWERLEVEL9K_{LEFT,RIGHT}_SEGMENT_SEPARATOR= # no end-of-line symbol
# When set to true, icons appear before content on both sides of the prompt. When set
# to false, icons go after content. If empty or not set, icons go before content in the left
# prompt and after content in the right prompt.
#
# You can also override it for a specific segment:
#
# POWERLEVEL9K_STATUS_ICON_BEFORE_CONTENT=false
#
# Or for a specific segment in specific state:
#
# POWERLEVEL9K_DIR_NOT_WRITABLE_ICON_BEFORE_CONTENT=false
typeset -g POWERLEVEL9K_ICON_BEFORE_CONTENT=true
# Add an empty line before each prompt.
typeset -g POWERLEVEL9K_PROMPT_ADD_NEWLINE=false
# Connect left prompt lines with these symbols.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_PREFIX=
typeset -g POWERLEVEL9K_MULTILINE_NEWLINE_PROMPT_PREFIX=
typeset -g POWERLEVEL9K_MULTILINE_LAST_PROMPT_PREFIX=
# Connect right prompt lines with these symbols.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_SUFFIX=
typeset -g POWERLEVEL9K_MULTILINE_NEWLINE_PROMPT_SUFFIX=
typeset -g POWERLEVEL9K_MULTILINE_LAST_PROMPT_SUFFIX=
# The left end of left prompt.
typeset -g POWERLEVEL9K_LEFT_PROMPT_FIRST_SEGMENT_START_SYMBOL=
# The right end of right prompt.
typeset -g POWERLEVEL9K_RIGHT_PROMPT_LAST_SEGMENT_END_SYMBOL=
# Ruler, a.k.a. the horizontal line before each prompt. If you set it to true, you'll
# probably want to set POWERLEVEL9K_PROMPT_ADD_NEWLINE=false above and
# POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_CHAR=' ' below.
typeset -g POWERLEVEL9K_SHOW_RULER=false
typeset -g POWERLEVEL9K_RULER_CHAR='─' # reasonable alternative: '·'
typeset -g POWERLEVEL9K_RULER_FOREGROUND=242
# Filler between left and right prompt on the first prompt line. You can set it to '·' or '─'
# to make it easier to see the alignment between left and right prompt and to separate prompt
# from command output. It serves the same purpose as ruler (see above) without increasing
# the number of prompt lines. You'll probably want to set POWERLEVEL9K_SHOW_RULER=false
# if using this. You might also like POWERLEVEL9K_PROMPT_ADD_NEWLINE=false for more compact
# prompt.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_CHAR=' '
if [[ $POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_CHAR != ' ' ]]; then
# The color of the filler.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_FOREGROUND=242
# Add a space between the end of left prompt and the filler.
typeset -g POWERLEVEL9K_LEFT_PROMPT_LAST_SEGMENT_END_SYMBOL=' '
# Add a space between the filler and the start of right prompt.
typeset -g POWERLEVEL9K_RIGHT_PROMPT_FIRST_SEGMENT_START_SYMBOL=' '
# Start filler from the edge of the screen if there are no left segments on the first line.
typeset -g POWERLEVEL9K_EMPTY_LINE_LEFT_PROMPT_FIRST_SEGMENT_END_SYMBOL='%{%}'
# End filler on the edge of the screen if there are no right segments on the first line.
typeset -g POWERLEVEL9K_EMPTY_LINE_RIGHT_PROMPT_FIRST_SEGMENT_START_SYMBOL='%{%}'
fi
#################################[ os_icon: os identifier ]##################################
# OS identifier color.
typeset -g POWERLEVEL9K_OS_ICON_FOREGROUND=
# Custom icon.
# typeset -g POWERLEVEL9K_OS_ICON_CONTENT_EXPANSION='⭐'
################################[ prompt_char: prompt symbol ]################################
# Green prompt symbol if the last command succeeded.
typeset -g POWERLEVEL9K_PROMPT_CHAR_OK_{VIINS,VICMD,VIVIS,VIOWR}_FOREGROUND=76
# Red prompt symbol if the last command failed.
typeset -g POWERLEVEL9K_PROMPT_CHAR_ERROR_{VIINS,VICMD,VIVIS,VIOWR}_FOREGROUND=196
# Default prompt symbol.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIINS_CONTENT_EXPANSION='❯'
# Prompt symbol in command vi mode.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VICMD_CONTENT_EXPANSION='❮'
# Prompt symbol in visual vi mode.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIVIS_CONTENT_EXPANSION='V'
# Prompt symbol in overwrite vi mode.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIOWR_CONTENT_EXPANSION='▶'
typeset -g POWERLEVEL9K_PROMPT_CHAR_OVERWRITE_STATE=true
# No line terminator if prompt_char is the last segment.
typeset -g POWERLEVEL9K_PROMPT_CHAR_LEFT_PROMPT_LAST_SEGMENT_END_SYMBOL=''
# No line introducer if prompt_char is the first segment.
typeset -g POWERLEVEL9K_PROMPT_CHAR_LEFT_PROMPT_FIRST_SEGMENT_START_SYMBOL=
##################################[ dir: current directory ]##################################
# Default current directory color.
typeset -g POWERLEVEL9K_DIR_FOREGROUND=31
# If directory is too long, shorten some of its segments to the shortest possible unique
# prefix. The shortened directory can be tab-completed to the original.
typeset -g POWERLEVEL9K_SHORTEN_STRATEGY=truncate_to_unique
# Replace removed segment suffixes with this symbol.
typeset -g POWERLEVEL9K_SHORTEN_DELIMITER=
# Color of the shortened directory segments.
typeset -g POWERLEVEL9K_DIR_SHORTENED_FOREGROUND=103
# Color of the anchor directory segments. Anchor segments are never shortened. The first
# segment is always an anchor.
typeset -g POWERLEVEL9K_DIR_ANCHOR_FOREGROUND=39
# Display anchor directory segments in bold.
typeset -g POWERLEVEL9K_DIR_ANCHOR_BOLD=true
# Don't shorten directories that contain any of these files. They are anchors.
local anchor_files=(
.bzr
.citc
.git
.hg
.node-version
.python-version
.go-version
.ruby-version
.lua-version
.java-version
.perl-version
.php-version
.tool-version
.shorten_folder_marker
.svn
.terraform
CVS
Cargo.toml
composer.json
go.mod
package.json
stack.yaml
)
typeset -g POWERLEVEL9K_SHORTEN_FOLDER_MARKER="(${(j:|:)anchor_files})"
# If set to "first" ("last"), remove everything before the first (last) subdirectory that contains
# files matching $POWERLEVEL9K_SHORTEN_FOLDER_MARKER. For example, when the current directory is
# /foo/bar/git_repo/nested_git_repo/baz, prompt will display git_repo/nested_git_repo/baz (first)
# or nested_git_repo/baz (last). This assumes that git_repo and nested_git_repo contain markers
# and other directories don't.
#
# Optionally, "first" and "last" can be followed by ":<offset>" where <offset> is an integer.
# This moves the truncation point to the right (positive offset) or to the left (negative offset)
# relative to the marker. Plain "first" and "last" are equivalent to "first:0" and "last:0"
# respectively.
typeset -g POWERLEVEL9K_DIR_TRUNCATE_BEFORE_MARKER=false
# Don't shorten this many last directory segments. They are anchors.
typeset -g POWERLEVEL9K_SHORTEN_DIR_LENGTH=1
# Shorten directory if it's longer than this even if there is space for it. The value can
# be either absolute (e.g., '80') or a percentage of terminal width (e.g, '50%'). If empty,
# directory will be shortened only when prompt doesn't fit or when other parameters demand it
# (see POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS and POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT below).
# If set to `0`, directory will always be shortened to its minimum length.
typeset -g POWERLEVEL9K_DIR_MAX_LENGTH=80
# When `dir` segment is on the last prompt line, try to shorten it enough to leave at least this
# many columns for typing commands.
typeset -g POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS=40
# When `dir` segment is on the last prompt line, try to shorten it enough to leave at least
# COLUMNS * POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT * 0.01 columns for typing commands.
typeset -g POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT=50
# If set to true, embed a hyperlink into the directory. Useful for quickly
# opening a directory in the file manager simply by clicking the link.
# Can also be handy when the directory is shortened, as it allows you to see
# the full directory that was used in previous commands.
typeset -g POWERLEVEL9K_DIR_HYPERLINK=false
# Enable special styling for non-writable and non-existent directories. See POWERLEVEL9K_LOCK_ICON
# and POWERLEVEL9K_DIR_CLASSES below.
typeset -g POWERLEVEL9K_DIR_SHOW_WRITABLE=v3
# The default icon shown next to non-writable and non-existent directories when
# POWERLEVEL9K_DIR_SHOW_WRITABLE is set to v3.
# typeset -g POWERLEVEL9K_LOCK_ICON='⭐'
# POWERLEVEL9K_DIR_CLASSES allows you to specify custom icons and colors for different
# directories. It must be an array with 3 * N elements. Each triplet consists of:
#
# 1. A pattern against which the current directory ($PWD) is matched. Matching is done with
# extended_glob option enabled.
# 2. Directory class for the purpose of styling.
# 3. An empty string.
#
# Triplets are tried in order. The first triplet whose pattern matches $PWD wins.
#
# If POWERLEVEL9K_DIR_SHOW_WRITABLE is set to v3, non-writable and non-existent directories
# acquire class suffix _NOT_WRITABLE and NON_EXISTENT respectively.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_DIR_CLASSES=(
# '~/work(|/*)' WORK ''
# '~(|/*)' HOME ''
# '*' DEFAULT '')
#
# Whenever the current directory is ~/work or a subdirectory of ~/work, it gets styled with one
# of the following classes depending on its writability and existence: WORK, WORK_NOT_WRITABLE or
# WORK_NON_EXISTENT.
#
# Simply assigning classes to directories doesn't have any visible effects. It merely gives you an
# option to define custom colors and icons for different directory classes.
#
# # Styling for WORK.
# typeset -g POWERLEVEL9K_DIR_WORK_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_DIR_WORK_FOREGROUND=31
# typeset -g POWERLEVEL9K_DIR_WORK_SHORTENED_FOREGROUND=103
# typeset -g POWERLEVEL9K_DIR_WORK_ANCHOR_FOREGROUND=39
#
# # Styling for WORK_NOT_WRITABLE.
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_FOREGROUND=31
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_SHORTENED_FOREGROUND=103
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_ANCHOR_FOREGROUND=39
#
# # Styling for WORK_NON_EXISTENT.
# typeset -g POWERLEVEL9K_DIR_WORK_NON_EXISTENT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_DIR_WORK_NON_EXISTENT_FOREGROUND=31
# typeset -g POWERLEVEL9K_DIR_WORK_NON_EXISTENT_SHORTENED_FOREGROUND=103
# typeset -g POWERLEVEL9K_DIR_WORK_NON_EXISTENT_ANCHOR_FOREGROUND=39
#
# If a styling parameter isn't explicitly defined for some class, it falls back to the classless
# parameter. For example, if POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_FOREGROUND is not set, it falls
# back to POWERLEVEL9K_DIR_FOREGROUND.
#
# typeset -g POWERLEVEL9K_DIR_CLASSES=()
# Custom prefix.
# typeset -g POWERLEVEL9K_DIR_PREFIX='%fin '
#####################################[ vcs: git status ]######################################
# Branch icon. Set this parameter to '\uF126 ' for the popular Powerline branch icon.
typeset -g POWERLEVEL9K_VCS_BRANCH_ICON='\uF126 '
# Untracked files icon. It's really a question mark, your font isn't broken.
# Change the value of this parameter to show a different icon.
typeset -g POWERLEVEL9K_VCS_UNTRACKED_ICON='?'
# Formatter for Git status.
#
# Example output: master wip ⇣42⇡42 *42 merge ~42 +42 !42 ?42.
#
# You can edit the function to customize how Git status looks.
#
# VCS_STATUS_* parameters are set by gitstatus plugin. See reference:
# https://github.com/romkatv/gitstatus/blob/master/gitstatus.plugin.zsh.
function my_git_formatter() {
emulate -L zsh
if [[ -n $P9K_CONTENT ]]; then
# If P9K_CONTENT is not empty, use it. It's either "loading" or from vcs_info (not from
# gitstatus plugin). VCS_STATUS_* parameters are not available in this case.
typeset -g my_git_format=$P9K_CONTENT
return
fi
if (( $1 )); then
# Styling for up-to-date Git status.
local meta='%f' # default foreground
local clean='%76F' # green foreground
local modified='%178F' # yellow foreground
local untracked='%39F' # blue foreground
local conflicted='%196F' # red foreground
else
# Styling for incomplete and stale Git status.
local meta='%244F' # grey foreground
local clean='%244F' # grey foreground
local modified='%244F' # grey foreground
local untracked='%244F' # grey foreground
local conflicted='%244F' # grey foreground
fi
local res
if [[ -n $VCS_STATUS_LOCAL_BRANCH ]]; then
local branch=${(V)VCS_STATUS_LOCAL_BRANCH}
# If local branch name is at most 32 characters long, show it in full.
# Otherwise show the first 12 … the last 12.
# Tip: To always show local branch name in full without truncation, delete the next line.
(( $#branch > 32 )) && branch[13,-13]="…" # <-- this line
res+="${clean}${(g::)POWERLEVEL9K_VCS_BRANCH_ICON}${branch//\%/%%}"
fi
if [[ -n $VCS_STATUS_TAG
# Show tag only if not on a branch.
# Tip: To always show tag, delete the next line.
&& -z $VCS_STATUS_LOCAL_BRANCH # <-- this line
]]; then
local tag=${(V)VCS_STATUS_TAG}
# If tag name is at most 32 characters long, show it in full.
# Otherwise show the first 12 … the last 12.
# Tip: To always show tag name in full without truncation, delete the next line.
(( $#tag > 32 )) && tag[13,-13]="…" # <-- this line
res+="${meta}#${clean}${tag//\%/%%}"
fi
# Display the current Git commit if there is no branch and no tag.
# Tip: To always display the current Git commit, delete the next line.
[[ -z $VCS_STATUS_LOCAL_BRANCH && -z $VCS_STATUS_TAG ]] && # <-- this line
res+="${meta}@${clean}${VCS_STATUS_COMMIT[1,8]}"
# Show tracking branch name if it differs from local branch.
if [[ -n ${VCS_STATUS_REMOTE_BRANCH:#$VCS_STATUS_LOCAL_BRANCH} ]]; then
res+="${meta}:${clean}${(V)VCS_STATUS_REMOTE_BRANCH//\%/%%}"
fi
# Display "wip" if the latest commit's summary contains "wip" or "WIP".
if [[ $VCS_STATUS_COMMIT_SUMMARY == (|*[^[:alnum:]])(wip|WIP)(|[^[:alnum:]]*) ]]; then
res+=" ${modified}wip"
fi
# ⇣42 if behind the remote.
(( VCS_STATUS_COMMITS_BEHIND )) && res+=" ${clean}⇣${VCS_STATUS_COMMITS_BEHIND}"
# ⇡42 if ahead of the remote; no leading space if also behind the remote: ⇣42⇡42.
(( VCS_STATUS_COMMITS_AHEAD && !VCS_STATUS_COMMITS_BEHIND )) && res+=" "
(( VCS_STATUS_COMMITS_AHEAD )) && res+="${clean}⇡${VCS_STATUS_COMMITS_AHEAD}"
# ⇠42 if behind the push remote.
(( VCS_STATUS_PUSH_COMMITS_BEHIND )) && res+=" ${clean}⇠${VCS_STATUS_PUSH_COMMITS_BEHIND}"
(( VCS_STATUS_PUSH_COMMITS_AHEAD && !VCS_STATUS_PUSH_COMMITS_BEHIND )) && res+=" "
# ⇢42 if ahead of the push remote; no leading space if also behind: ⇠42⇢42.
(( VCS_STATUS_PUSH_COMMITS_AHEAD )) && res+="${clean}⇢${VCS_STATUS_PUSH_COMMITS_AHEAD}"
# *42 if have stashes.
(( VCS_STATUS_STASHES )) && res+=" ${clean}*${VCS_STATUS_STASHES}"
# 'merge' if the repo is in an unusual state.
[[ -n $VCS_STATUS_ACTION ]] && res+=" ${conflicted}${VCS_STATUS_ACTION}"
# ~42 if have merge conflicts.
(( VCS_STATUS_NUM_CONFLICTED )) && res+=" ${conflicted}~${VCS_STATUS_NUM_CONFLICTED}"
# +42 if have staged changes.
(( VCS_STATUS_NUM_STAGED )) && res+=" ${modified}+${VCS_STATUS_NUM_STAGED}"
# !42 if have unstaged changes.
(( VCS_STATUS_NUM_UNSTAGED )) && res+=" ${modified}!${VCS_STATUS_NUM_UNSTAGED}"
# ?42 if have untracked files. It's really a question mark, your font isn't broken.
# See POWERLEVEL9K_VCS_UNTRACKED_ICON above if you want to use a different icon.
# Remove the next line if you don't want to see untracked files at all.
(( VCS_STATUS_NUM_UNTRACKED )) && res+=" ${untracked}${(g::)POWERLEVEL9K_VCS_UNTRACKED_ICON}${VCS_STATUS_NUM_UNTRACKED}"
# "─" if the number of unstaged files is unknown. This can happen due to
# POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY (see below) being set to a non-negative number lower
# than the number of files in the Git index, or due to bash.showDirtyState being set to false
# in the repository config. The number of staged and untracked files may also be unknown
# in this case.
(( VCS_STATUS_HAS_UNSTAGED == -1 )) && res+=" ${modified}─"
typeset -g my_git_format=$res
}
functions -M my_git_formatter 2>/dev/null
# Don't count the number of unstaged, untracked and conflicted files in Git repositories with
# more than this many files in the index. Negative value means infinity.
#
# If you are working in Git repositories with tens of millions of files and seeing performance
# sagging, try setting POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY to a number lower than the output
# of `git ls-files | wc -l`. Alternatively, add `bash.showDirtyState = false` to the repository's
# config: `git config bash.showDirtyState false`.
typeset -g POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY=-1
# Don't show Git status in prompt for repositories whose workdir matches this pattern.
# For example, if set to '~', the Git repository at $HOME/.git will be ignored.
# Multiple patterns can be combined with '|': '~(|/foo)|/bar/baz/*'.
typeset -g POWERLEVEL9K_VCS_DISABLED_WORKDIR_PATTERN='~'
# Disable the default Git status formatting.
typeset -g POWERLEVEL9K_VCS_DISABLE_GITSTATUS_FORMATTING=true
# Install our own Git status formatter.
typeset -g POWERLEVEL9K_VCS_CONTENT_EXPANSION='${$((my_git_formatter(1)))+${my_git_format}}'
typeset -g POWERLEVEL9K_VCS_LOADING_CONTENT_EXPANSION='${$((my_git_formatter(0)))+${my_git_format}}'
# Enable counters for staged, unstaged, etc.
typeset -g POWERLEVEL9K_VCS_{STAGED,UNSTAGED,UNTRACKED,CONFLICTED,COMMITS_AHEAD,COMMITS_BEHIND}_MAX_NUM=-1
# Icon color.
typeset -g POWERLEVEL9K_VCS_VISUAL_IDENTIFIER_COLOR=76
typeset -g POWERLEVEL9K_VCS_LOADING_VISUAL_IDENTIFIER_COLOR=244
# Custom icon.
# typeset -g POWERLEVEL9K_VCS_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Custom prefix.
# typeset -g POWERLEVEL9K_VCS_PREFIX='%fon '
# Show status of repositories of these types. You can add svn and/or hg if you are
# using them. If you do, your prompt may become slow even when your current directory
# isn't in an svn or hg reposotiry.
typeset -g POWERLEVEL9K_VCS_BACKENDS=(git)
# These settings are used for repositories other than Git or when gitstatusd fails and
# Powerlevel10k has to fall back to using vcs_info.
typeset -g POWERLEVEL9K_VCS_CLEAN_FOREGROUND=76
typeset -g POWERLEVEL9K_VCS_UNTRACKED_FOREGROUND=76
typeset -g POWERLEVEL9K_VCS_MODIFIED_FOREGROUND=178
##########################[ status: exit code of the last command ]###########################
# Enable OK_PIPE, ERROR_PIPE and ERROR_SIGNAL status states to allow us to enable, disable and
# style them independently from the regular OK and ERROR state.
typeset -g POWERLEVEL9K_STATUS_EXTENDED_STATES=true
# Status on success. No content, just an icon. No need to show it if prompt_char is enabled as
# it will signify success by turning green.
typeset -g POWERLEVEL9K_STATUS_OK=false
typeset -g POWERLEVEL9K_STATUS_OK_FOREGROUND=70
typeset -g POWERLEVEL9K_STATUS_OK_VISUAL_IDENTIFIER_EXPANSION='✔'
# Status when some part of a pipe command fails but the overall exit status is zero. It may look
# like this: 1|0.
typeset -g POWERLEVEL9K_STATUS_OK_PIPE=true
typeset -g POWERLEVEL9K_STATUS_OK_PIPE_FOREGROUND=70
typeset -g POWERLEVEL9K_STATUS_OK_PIPE_VISUAL_IDENTIFIER_EXPANSION='✔'
# Status when it's just an error code (e.g., '1'). No need to show it if prompt_char is enabled as
# it will signify error by turning red.
typeset -g POWERLEVEL9K_STATUS_ERROR=false
typeset -g POWERLEVEL9K_STATUS_ERROR_FOREGROUND=160
typeset -g POWERLEVEL9K_STATUS_ERROR_VISUAL_IDENTIFIER_EXPANSION='✘'
# Status when the last command was terminated by a signal.
typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL=true
typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL_FOREGROUND=160
# Use terse signal names: "INT" instead of "SIGINT(2)".
typeset -g POWERLEVEL9K_STATUS_VERBOSE_SIGNAME=false
typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL_VISUAL_IDENTIFIER_EXPANSION='✘'
# Status when some part of a pipe command fails and the overall exit status is also non-zero.
# It may look like this: 1|0.
typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE=true
typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE_FOREGROUND=160
typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE_VISUAL_IDENTIFIER_EXPANSION='✘'
###################[ command_execution_time: duration of the last command ]###################
# Show duration of the last command if takes at least this many seconds.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_THRESHOLD=3
# Show this many fractional digits. Zero means round to seconds.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_PRECISION=0
# Execution time color.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_FOREGROUND=101
# Duration format: 1d 2h 3m 4s.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_FORMAT='d h m s'
# Custom icon.
# typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Custom prefix.
# typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_PREFIX='%ftook '
#######################[ background_jobs: presence of background jobs ]#######################
# Don't show the number of background jobs.
typeset -g POWERLEVEL9K_BACKGROUND_JOBS_VERBOSE=false
# Background jobs color.
typeset -g POWERLEVEL9K_BACKGROUND_JOBS_FOREGROUND=70
# Custom icon.
# typeset -g POWERLEVEL9K_BACKGROUND_JOBS_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######################[ direnv: direnv status (https://direnv.net/) ]########################
# Direnv color.
typeset -g POWERLEVEL9K_DIRENV_FOREGROUND=178
# Custom icon.
# typeset -g POWERLEVEL9K_DIRENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
###############[ asdf: asdf version manager (https://github.com/asdf-vm/asdf) ]###############
# Default asdf color. Only used to display tools for which there is no color override (see below).
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_FOREGROUND.
typeset -g POWERLEVEL9K_ASDF_FOREGROUND=66
# There are four parameters that can be used to hide asdf tools. Each parameter describes
# conditions under which a tool gets hidden. Parameters can hide tools but not unhide them. If at
# least one parameter decides to hide a tool, that tool gets hidden. If no parameter decides to
# hide a tool, it gets shown.
#
# Special note on the difference between POWERLEVEL9K_ASDF_SOURCES and
# POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW. Consider the effect of the following commands:
#
# asdf local python 3.8.1
# asdf global python 3.8.1
#
# After running both commands the current python version is 3.8.1 and its source is "local" as
# it takes precedence over "global". If POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW is set to false,
# it'll hide python version in this case because 3.8.1 is the same as the global version.
# POWERLEVEL9K_ASDF_SOURCES will hide python version only if the value of this parameter doesn't
# contain "local".
# Hide tool versions that don't come from one of these sources.
#
# Available sources:
#
# - shell `asdf current` says "set by ASDF_${TOOL}_VERSION environment variable"
# - local `asdf current` says "set by /some/not/home/directory/file"
# - global `asdf current` says "set by /home/username/file"
#
# Note: If this parameter is set to (shell local global), it won't hide tools.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_SOURCES.
typeset -g POWERLEVEL9K_ASDF_SOURCES=(shell local global)
# If set to false, hide tool versions that are the same as global.
#
# Note: The name of this parameter doesn't reflect its meaning at all.
# Note: If this parameter is set to true, it won't hide tools.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_PROMPT_ALWAYS_SHOW.
typeset -g POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW=false
# If set to false, hide tool versions that are equal to "system".
#
# Note: If this parameter is set to true, it won't hide tools.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_SHOW_SYSTEM.
typeset -g POWERLEVEL9K_ASDF_SHOW_SYSTEM=true
# If set to non-empty value, hide tools unless there is a file matching the specified file pattern
# in the current directory, or its parent directory, or its grandparent directory, and so on.
#
# Note: If this parameter is set to empty value, it won't hide tools.
# Note: SHOW_ON_UPGLOB isn't specific to asdf. It works with all prompt segments.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_SHOW_ON_UPGLOB.
#
# Example: Hide nodejs version when there is no package.json and no *.js files in the current
# directory, in `..`, in `../..` and so on.
#
# typeset -g POWERLEVEL9K_ASDF_NODEJS_SHOW_ON_UPGLOB='*.js|package.json'
typeset -g POWERLEVEL9K_ASDF_SHOW_ON_UPGLOB=
# Ruby version from asdf.
typeset -g POWERLEVEL9K_ASDF_RUBY_FOREGROUND=168
# typeset -g POWERLEVEL9K_ASDF_RUBY_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_RUBY_SHOW_ON_UPGLOB='*.foo|*.bar'
# Python version from asdf.
typeset -g POWERLEVEL9K_ASDF_PYTHON_FOREGROUND=37
# typeset -g POWERLEVEL9K_ASDF_PYTHON_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_PYTHON_SHOW_ON_UPGLOB='*.foo|*.bar'
# Go version from asdf.
typeset -g POWERLEVEL9K_ASDF_GOLANG_FOREGROUND=37
# typeset -g POWERLEVEL9K_ASDF_GOLANG_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_GOLANG_SHOW_ON_UPGLOB='*.foo|*.bar'
# Node.js version from asdf.
typeset -g POWERLEVEL9K_ASDF_NODEJS_FOREGROUND=70
# typeset -g POWERLEVEL9K_ASDF_NODEJS_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_NODEJS_SHOW_ON_UPGLOB='*.foo|*.bar'
# Rust version from asdf.
typeset -g POWERLEVEL9K_ASDF_RUST_FOREGROUND=37
# typeset -g POWERLEVEL9K_ASDF_RUST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_RUST_SHOW_ON_UPGLOB='*.foo|*.bar'
# .NET Core version from asdf.
typeset -g POWERLEVEL9K_ASDF_DOTNET_CORE_FOREGROUND=134
# typeset -g POWERLEVEL9K_ASDF_DOTNET_CORE_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_DOTNET_SHOW_ON_UPGLOB='*.foo|*.bar'
# Flutter version from asdf.
typeset -g POWERLEVEL9K_ASDF_FLUTTER_FOREGROUND=38
# typeset -g POWERLEVEL9K_ASDF_FLUTTER_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_FLUTTER_SHOW_ON_UPGLOB='*.foo|*.bar'
# Lua version from asdf.
typeset -g POWERLEVEL9K_ASDF_LUA_FOREGROUND=32
# typeset -g POWERLEVEL9K_ASDF_LUA_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_LUA_SHOW_ON_UPGLOB='*.foo|*.bar'
# Java version from asdf.
typeset -g POWERLEVEL9K_ASDF_JAVA_FOREGROUND=32
# typeset -g POWERLEVEL9K_ASDF_JAVA_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_JAVA_SHOW_ON_UPGLOB='*.foo|*.bar'
# Perl version from asdf.
typeset -g POWERLEVEL9K_ASDF_PERL_FOREGROUND=67
# typeset -g POWERLEVEL9K_ASDF_PERL_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_PERL_SHOW_ON_UPGLOB='*.foo|*.bar'
# Erlang version from asdf.
typeset -g POWERLEVEL9K_ASDF_ERLANG_FOREGROUND=125
# typeset -g POWERLEVEL9K_ASDF_ERLANG_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_ERLANG_SHOW_ON_UPGLOB='*.foo|*.bar'
# Elixir version from asdf.
typeset -g POWERLEVEL9K_ASDF_ELIXIR_FOREGROUND=129
# typeset -g POWERLEVEL9K_ASDF_ELIXIR_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_ELIXIR_SHOW_ON_UPGLOB='*.foo|*.bar'
# Postgres version from asdf.
typeset -g POWERLEVEL9K_ASDF_POSTGRES_FOREGROUND=31
# typeset -g POWERLEVEL9K_ASDF_POSTGRES_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_POSTGRES_SHOW_ON_UPGLOB='*.foo|*.bar'
# PHP version from asdf.
typeset -g POWERLEVEL9K_ASDF_PHP_FOREGROUND=99
# typeset -g POWERLEVEL9K_ASDF_PHP_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_PHP_SHOW_ON_UPGLOB='*.foo|*.bar'
# Haskell version from asdf.
typeset -g POWERLEVEL9K_ASDF_HASKELL_FOREGROUND=172
# typeset -g POWERLEVEL9K_ASDF_HASKELL_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_HASKELL_SHOW_ON_UPGLOB='*.foo|*.bar'
# Julia version from asdf.
typeset -g POWERLEVEL9K_ASDF_JULIA_FOREGROUND=70
# typeset -g POWERLEVEL9K_ASDF_JULIA_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_JULIA_SHOW_ON_UPGLOB='*.foo|*.bar'
##########[ nordvpn: nordvpn connection status, linux only (https://nordvpn.com/) ]###########
# NordVPN connection indicator color.
typeset -g POWERLEVEL9K_NORDVPN_FOREGROUND=39
# Hide NordVPN connection indicator when not connected.
typeset -g POWERLEVEL9K_NORDVPN_{DISCONNECTED,CONNECTING,DISCONNECTING}_CONTENT_EXPANSION=
typeset -g POWERLEVEL9K_NORDVPN_{DISCONNECTED,CONNECTING,DISCONNECTING}_VISUAL_IDENTIFIER_EXPANSION=
# Custom icon.
# typeset -g POWERLEVEL9K_NORDVPN_VISUAL_IDENTIFIER_EXPANSION='⭐'
#################[ ranger: ranger shell (https://github.com/ranger/ranger) ]##################
# Ranger shell color.
typeset -g POWERLEVEL9K_RANGER_FOREGROUND=178
# Custom icon.
# typeset -g POWERLEVEL9K_RANGER_VISUAL_IDENTIFIER_EXPANSION='⭐'
######################[ nnn: nnn shell (https://github.com/jarun/nnn) ]#######################
# Nnn shell color.
typeset -g POWERLEVEL9K_NNN_FOREGROUND=72
# Custom icon.
# typeset -g POWERLEVEL9K_NNN_VISUAL_IDENTIFIER_EXPANSION='⭐'
##################[ xplr: xplr shell (https://github.com/sayanarijit/xplr) ]##################
# xplr shell color.
typeset -g POWERLEVEL9K_XPLR_FOREGROUND=72
# Custom icon.
# typeset -g POWERLEVEL9K_XPLR_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########################[ vim_shell: vim shell indicator (:sh) ]###########################
# Vim shell indicator color.
typeset -g POWERLEVEL9K_VIM_SHELL_FOREGROUND=34
# Custom icon.
# typeset -g POWERLEVEL9K_VIM_SHELL_VISUAL_IDENTIFIER_EXPANSION='⭐'
######[ midnight_commander: midnight commander shell (https://midnight-commander.org/) ]######
# Midnight Commander shell color.
typeset -g POWERLEVEL9K_MIDNIGHT_COMMANDER_FOREGROUND=178
# Custom icon.
# typeset -g POWERLEVEL9K_MIDNIGHT_COMMANDER_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ nix_shell: nix shell (https://nixos.org/nixos/nix-pills/developing-with-nix-shell.html) ]##
# Nix shell color.
typeset -g POWERLEVEL9K_NIX_SHELL_FOREGROUND=74
# Tip: If you want to see just the icon without "pure" and "impure", uncomment the next line.
# typeset -g POWERLEVEL9K_NIX_SHELL_CONTENT_EXPANSION=
# Custom icon.
# typeset -g POWERLEVEL9K_NIX_SHELL_VISUAL_IDENTIFIER_EXPANSION='⭐'
##################################[ disk_usage: disk usage ]##################################
# Colors for different levels of disk usage.
typeset -g POWERLEVEL9K_DISK_USAGE_NORMAL_FOREGROUND=35
typeset -g POWERLEVEL9K_DISK_USAGE_WARNING_FOREGROUND=220
typeset -g POWERLEVEL9K_DISK_USAGE_CRITICAL_FOREGROUND=160
# Thresholds for different levels of disk usage (percentage points).
typeset -g POWERLEVEL9K_DISK_USAGE_WARNING_LEVEL=90
typeset -g POWERLEVEL9K_DISK_USAGE_CRITICAL_LEVEL=95
# If set to true, hide disk usage when below $POWERLEVEL9K_DISK_USAGE_WARNING_LEVEL percent.
typeset -g POWERLEVEL9K_DISK_USAGE_ONLY_WARNING=false
# Custom icon.
# typeset -g POWERLEVEL9K_DISK_USAGE_VISUAL_IDENTIFIER_EXPANSION='⭐'
######################################[ ram: free RAM ]#######################################
# RAM color.
typeset -g POWERLEVEL9K_RAM_FOREGROUND=66
# Custom icon.
# typeset -g POWERLEVEL9K_RAM_VISUAL_IDENTIFIER_EXPANSION='⭐'
#####################################[ swap: used swap ]######################################
# Swap color.
typeset -g POWERLEVEL9K_SWAP_FOREGROUND=96
# Custom icon.
# typeset -g POWERLEVEL9K_SWAP_VISUAL_IDENTIFIER_EXPANSION='⭐'
######################################[ load: CPU load ]######################################
# Show average CPU load over this many last minutes. Valid values are 1, 5 and 15.
typeset -g POWERLEVEL9K_LOAD_WHICH=5
# Load color when load is under 50%.
typeset -g POWERLEVEL9K_LOAD_NORMAL_FOREGROUND=66
# Load color when load is between 50% and 70%.
typeset -g POWERLEVEL9K_LOAD_WARNING_FOREGROUND=178
# Load color when load is over 70%.
typeset -g POWERLEVEL9K_LOAD_CRITICAL_FOREGROUND=166
# Custom icon.
# typeset -g POWERLEVEL9K_LOAD_VISUAL_IDENTIFIER_EXPANSION='⭐'
################[ todo: todo items (https://github.com/todotxt/todo.txt-cli) ]################
# Todo color.
typeset -g POWERLEVEL9K_TODO_FOREGROUND=110
# Hide todo when the total number of tasks is zero.
typeset -g POWERLEVEL9K_TODO_HIDE_ZERO_TOTAL=true
# Hide todo when the number of tasks after filtering is zero.
typeset -g POWERLEVEL9K_TODO_HIDE_ZERO_FILTERED=false
# Todo format. The following parameters are available within the expansion.
#
# - P9K_TODO_TOTAL_TASK_COUNT The total number of tasks.
# - P9K_TODO_FILTERED_TASK_COUNT The number of tasks after filtering.
#
# These variables correspond to the last line of the output of `todo.sh -p ls`:
#
# TODO: 24 of 42 tasks shown
#
# Here 24 is P9K_TODO_FILTERED_TASK_COUNT and 42 is P9K_TODO_TOTAL_TASK_COUNT.
#
# typeset -g POWERLEVEL9K_TODO_CONTENT_EXPANSION='$P9K_TODO_FILTERED_TASK_COUNT'
# Custom icon.
# typeset -g POWERLEVEL9K_TODO_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ timewarrior: timewarrior tracking status (https://timewarrior.net/) ]############
# Timewarrior color.
typeset -g POWERLEVEL9K_TIMEWARRIOR_FOREGROUND=110
# If the tracked task is longer than 24 characters, truncate and append "…".
# Tip: To always display tasks without truncation, delete the following parameter.
# Tip: To hide task names and display just the icon when time tracking is enabled, set the
# value of the following parameter to "".
typeset -g POWERLEVEL9K_TIMEWARRIOR_CONTENT_EXPANSION='${P9K_CONTENT:0:24}${${P9K_CONTENT:24}:+…}'
# Custom icon.
# typeset -g POWERLEVEL9K_TIMEWARRIOR_VISUAL_IDENTIFIER_EXPANSION='⭐'
##############[ taskwarrior: taskwarrior task count (https://taskwarrior.org/) ]##############
# Taskwarrior color.
typeset -g POWERLEVEL9K_TASKWARRIOR_FOREGROUND=74
# Taskwarrior segment format. The following parameters are available within the expansion.
#
# - P9K_TASKWARRIOR_PENDING_COUNT The number of pending tasks: `task +PENDING count`.
# - P9K_TASKWARRIOR_OVERDUE_COUNT The number of overdue tasks: `task +OVERDUE count`.
#
# Zero values are represented as empty parameters.
#
# The default format:
#
# '${P9K_TASKWARRIOR_OVERDUE_COUNT:+"!$P9K_TASKWARRIOR_OVERDUE_COUNT/"}$P9K_TASKWARRIOR_PENDING_COUNT'
#
# typeset -g POWERLEVEL9K_TASKWARRIOR_CONTENT_EXPANSION='$P9K_TASKWARRIOR_PENDING_COUNT'
# Custom icon.
# typeset -g POWERLEVEL9K_TASKWARRIOR_VISUAL_IDENTIFIER_EXPANSION='⭐'
##################################[ context: user@hostname ]##################################
# Context color when running with privileges.
typeset -g POWERLEVEL9K_CONTEXT_ROOT_FOREGROUND=178
# Context color in SSH without privileges.
typeset -g POWERLEVEL9K_CONTEXT_{REMOTE,REMOTE_SUDO}_FOREGROUND=180
# Default context color (no privileges, no SSH).
typeset -g POWERLEVEL9K_CONTEXT_FOREGROUND=180
# Context format when running with privileges: bold user@hostname.
typeset -g POWERLEVEL9K_CONTEXT_ROOT_TEMPLATE='%B%n@%m'
# Context format when in SSH without privileges: user@hostname.
typeset -g POWERLEVEL9K_CONTEXT_{REMOTE,REMOTE_SUDO}_TEMPLATE='%n@%m'
# Default context format (no privileges, no SSH): user@hostname.
typeset -g POWERLEVEL9K_CONTEXT_TEMPLATE='%n@%m'
# Don't show context unless running with privileges or in SSH.
# Tip: Remove the next line to always show context.
typeset -g POWERLEVEL9K_CONTEXT_{DEFAULT,SUDO}_{CONTENT,VISUAL_IDENTIFIER}_EXPANSION=
# Custom icon.
# typeset -g POWERLEVEL9K_CONTEXT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Custom prefix.
# typeset -g POWERLEVEL9K_CONTEXT_PREFIX='%fwith '
###[ virtualenv: python virtual environment (https://docs.python.org/3/library/venv.html) ]###
# Python virtual environment color.
typeset -g POWERLEVEL9K_VIRTUALENV_FOREGROUND=37
# Don't show Python version next to the virtual environment name.
typeset -g POWERLEVEL9K_VIRTUALENV_SHOW_PYTHON_VERSION=false
# If set to "false", won't show virtualenv if pyenv is already shown.
# If set to "if-different", won't show virtualenv if it's the same as pyenv.
typeset -g POWERLEVEL9K_VIRTUALENV_SHOW_WITH_PYENV=false
# Separate environment name from Python version only with a space.
typeset -g POWERLEVEL9K_VIRTUALENV_{LEFT,RIGHT}_DELIMITER=
# Custom icon.
# typeset -g POWERLEVEL9K_VIRTUALENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
#####################[ anaconda: conda environment (https://conda.io/) ]######################
# Anaconda environment color.
typeset -g POWERLEVEL9K_ANACONDA_FOREGROUND=37
# Anaconda segment format. The following parameters are available within the expansion.
#
# - CONDA_PREFIX Absolute path to the active Anaconda/Miniconda environment.
# - CONDA_DEFAULT_ENV Name of the active Anaconda/Miniconda environment.
# - CONDA_PROMPT_MODIFIER Configurable prompt modifier (see below).
# - P9K_ANACONDA_PYTHON_VERSION Current python version (python --version).
#
# CONDA_PROMPT_MODIFIER can be configured with the following command:
#
# conda config --set env_prompt '({default_env}) '
#
# The last argument is a Python format string that can use the following variables:
#
# - prefix The same as CONDA_PREFIX.
# - default_env The same as CONDA_DEFAULT_ENV.
# - name The last segment of CONDA_PREFIX.
# - stacked_env Comma-separated list of names in the environment stack. The first element is
# always the same as default_env.
#
# Note: '({default_env}) ' is the default value of env_prompt.
#
# The default value of POWERLEVEL9K_ANACONDA_CONTENT_EXPANSION expands to $CONDA_PROMPT_MODIFIER
# without the surrounding parentheses, or to the last path component of CONDA_PREFIX if the former
# is empty.
typeset -g POWERLEVEL9K_ANACONDA_CONTENT_EXPANSION='${${${${CONDA_PROMPT_MODIFIER#\(}% }%\)}:-${CONDA_PREFIX:t}}'
# Custom icon.
# typeset -g POWERLEVEL9K_ANACONDA_VISUAL_IDENTIFIER_EXPANSION='⭐'
################[ pyenv: python environment (https://github.com/pyenv/pyenv) ]################
# Pyenv color.
typeset -g POWERLEVEL9K_PYENV_FOREGROUND=37
# Hide python version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_PYENV_SOURCES=(shell local global)
# If set to false, hide python version if it's the same as global:
# $(pyenv version-name) == $(pyenv global).
typeset -g POWERLEVEL9K_PYENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide python version if it's equal to "system".
typeset -g POWERLEVEL9K_PYENV_SHOW_SYSTEM=true
# Pyenv segment format. The following parameters are available within the expansion.
#
# - P9K_CONTENT Current pyenv environment (pyenv version-name).
# - P9K_PYENV_PYTHON_VERSION Current python version (python --version).
#
# The default format has the following logic:
#
# 1. Display just "$P9K_CONTENT" if it's equal to "$P9K_PYENV_PYTHON_VERSION" or
# starts with "$P9K_PYENV_PYTHON_VERSION/".
# 2. Otherwise display "$P9K_CONTENT $P9K_PYENV_PYTHON_VERSION".
typeset -g POWERLEVEL9K_PYENV_CONTENT_EXPANSION='${P9K_CONTENT}${${P9K_CONTENT:#$P9K_PYENV_PYTHON_VERSION(|/*)}:+ $P9K_PYENV_PYTHON_VERSION}'
# Custom icon.
# typeset -g POWERLEVEL9K_PYENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
################[ goenv: go environment (https://github.com/syndbg/goenv) ]################
# Goenv color.
typeset -g POWERLEVEL9K_GOENV_FOREGROUND=37
# Hide go version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_GOENV_SOURCES=(shell local global)
# If set to false, hide go version if it's the same as global:
# $(goenv version-name) == $(goenv global).
typeset -g POWERLEVEL9K_GOENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide go version if it's equal to "system".
typeset -g POWERLEVEL9K_GOENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_GOENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ nodenv: node.js version from nodenv (https://github.com/nodenv/nodenv) ]##########
# Nodenv color.
typeset -g POWERLEVEL9K_NODENV_FOREGROUND=70
# Hide node version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_NODENV_SOURCES=(shell local global)
# If set to false, hide node version if it's the same as global:
# $(nodenv version-name) == $(nodenv global).
typeset -g POWERLEVEL9K_NODENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide node version if it's equal to "system".
typeset -g POWERLEVEL9K_NODENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_NODENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##############[ nvm: node.js version from nvm (https://github.com/nvm-sh/nvm) ]###############
# Nvm color.
typeset -g POWERLEVEL9K_NVM_FOREGROUND=70
# Custom icon.
# typeset -g POWERLEVEL9K_NVM_VISUAL_IDENTIFIER_EXPANSION='⭐'
############[ nodeenv: node.js environment (https://github.com/ekalinin/nodeenv) ]############
# Nodeenv color.
typeset -g POWERLEVEL9K_NODEENV_FOREGROUND=70
# Don't show Node version next to the environment name.
typeset -g POWERLEVEL9K_NODEENV_SHOW_NODE_VERSION=false
# Separate environment name from Node version only with a space.
typeset -g POWERLEVEL9K_NODEENV_{LEFT,RIGHT}_DELIMITER=
# Custom icon.
# typeset -g POWERLEVEL9K_NODEENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##############################[ node_version: node.js version ]###############################
# Node version color.
typeset -g POWERLEVEL9K_NODE_VERSION_FOREGROUND=70
# Show node version only when in a directory tree containing package.json.
typeset -g POWERLEVEL9K_NODE_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_NODE_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######################[ go_version: go version (https://golang.org) ]########################
# Go version color.
typeset -g POWERLEVEL9K_GO_VERSION_FOREGROUND=37
# Show go version only when in a go project subdirectory.
typeset -g POWERLEVEL9K_GO_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_GO_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#################[ rust_version: rustc version (https://www.rust-lang.org) ]##################
# Rust version color.
typeset -g POWERLEVEL9K_RUST_VERSION_FOREGROUND=37
# Show rust version only when in a rust project subdirectory.
typeset -g POWERLEVEL9K_RUST_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_RUST_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
###############[ dotnet_version: .NET version (https://dotnet.microsoft.com) ]################
# .NET version color.
typeset -g POWERLEVEL9K_DOTNET_VERSION_FOREGROUND=134
# Show .NET version only when in a .NET project subdirectory.
typeset -g POWERLEVEL9K_DOTNET_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_DOTNET_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#####################[ php_version: php version (https://www.php.net/) ]######################
# PHP version color.
typeset -g POWERLEVEL9K_PHP_VERSION_FOREGROUND=99
# Show PHP version only when in a PHP project subdirectory.
typeset -g POWERLEVEL9K_PHP_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_PHP_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ laravel_version: laravel php framework version (https://laravel.com/) ]###########
# Laravel version color.
typeset -g POWERLEVEL9K_LARAVEL_VERSION_FOREGROUND=161
# Custom icon.
# typeset -g POWERLEVEL9K_LARAVEL_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
####################[ java_version: java version (https://www.java.com/) ]####################
# Java version color.
typeset -g POWERLEVEL9K_JAVA_VERSION_FOREGROUND=32
# Show java version only when in a java project subdirectory.
typeset -g POWERLEVEL9K_JAVA_VERSION_PROJECT_ONLY=true
# Show brief version.
typeset -g POWERLEVEL9K_JAVA_VERSION_FULL=false
# Custom icon.
# typeset -g POWERLEVEL9K_JAVA_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
###[ package: name@version from package.json (https://docs.npmjs.com/files/package.json) ]####
# Package color.
typeset -g POWERLEVEL9K_PACKAGE_FOREGROUND=117
# Package format. The following parameters are available within the expansion.
#
# - P9K_PACKAGE_NAME The value of `name` field in package.json.
# - P9K_PACKAGE_VERSION The value of `version` field in package.json.
#
# typeset -g POWERLEVEL9K_PACKAGE_CONTENT_EXPANSION='${P9K_PACKAGE_NAME//\%/%%}@${P9K_PACKAGE_VERSION//\%/%%}'
# Custom icon.
# typeset -g POWERLEVEL9K_PACKAGE_VISUAL_IDENTIFIER_EXPANSION='⭐'
#############[ rbenv: ruby version from rbenv (https://github.com/rbenv/rbenv) ]##############
# Rbenv color.
typeset -g POWERLEVEL9K_RBENV_FOREGROUND=168
# Hide ruby version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_RBENV_SOURCES=(shell local global)
# If set to false, hide ruby version if it's the same as global:
# $(rbenv version-name) == $(rbenv global).
typeset -g POWERLEVEL9K_RBENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide ruby version if it's equal to "system".
typeset -g POWERLEVEL9K_RBENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_RBENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######################[ rvm: ruby version from rvm (https://rvm.io) ]########################
# Rvm color.
typeset -g POWERLEVEL9K_RVM_FOREGROUND=168
# Don't show @gemset at the end.
typeset -g POWERLEVEL9K_RVM_SHOW_GEMSET=false
# Don't show ruby- at the front.
typeset -g POWERLEVEL9K_RVM_SHOW_PREFIX=false
# Custom icon.
# typeset -g POWERLEVEL9K_RVM_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ fvm: flutter version management (https://github.com/leoafarias/fvm) ]############
# Fvm color.
typeset -g POWERLEVEL9K_FVM_FOREGROUND=38
# Custom icon.
# typeset -g POWERLEVEL9K_FVM_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ luaenv: lua version from luaenv (https://github.com/cehoffman/luaenv) ]###########
# Lua color.
typeset -g POWERLEVEL9K_LUAENV_FOREGROUND=32
# Hide lua version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_LUAENV_SOURCES=(shell local global)
# If set to false, hide lua version if it's the same as global:
# $(luaenv version-name) == $(luaenv global).
typeset -g POWERLEVEL9K_LUAENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide lua version if it's equal to "system".
typeset -g POWERLEVEL9K_LUAENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_LUAENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
###############[ jenv: java version from jenv (https://github.com/jenv/jenv) ]################
# Java color.
typeset -g POWERLEVEL9K_JENV_FOREGROUND=32
# Hide java version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_JENV_SOURCES=(shell local global)
# If set to false, hide java version if it's the same as global:
# $(jenv version-name) == $(jenv global).
typeset -g POWERLEVEL9K_JENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide java version if it's equal to "system".
typeset -g POWERLEVEL9K_JENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_JENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ plenv: perl version from plenv (https://github.com/tokuhirom/plenv) ]############
# Perl color.
typeset -g POWERLEVEL9K_PLENV_FOREGROUND=67
# Hide perl version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_PLENV_SOURCES=(shell local global)
# If set to false, hide perl version if it's the same as global:
# $(plenv version-name) == $(plenv global).
typeset -g POWERLEVEL9K_PLENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide perl version if it's equal to "system".
typeset -g POWERLEVEL9K_PLENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_PLENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
############[ phpenv: php version from phpenv (https://github.com/phpenv/phpenv) ]############
# PHP color.
typeset -g POWERLEVEL9K_PHPENV_FOREGROUND=99
# Hide php version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_PHPENV_SOURCES=(shell local global)
# If set to false, hide php version if it's the same as global:
# $(phpenv version-name) == $(phpenv global).
typeset -g POWERLEVEL9K_PHPENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide php version if it's equal to "system".
typeset -g POWERLEVEL9K_PHPENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_PHPENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######[ scalaenv: scala version from scalaenv (https://github.com/scalaenv/scalaenv) ]#######
# Scala color.
typeset -g POWERLEVEL9K_SCALAENV_FOREGROUND=160
# Hide scala version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_SCALAENV_SOURCES=(shell local global)
# If set to false, hide scala version if it's the same as global:
# $(scalaenv version-name) == $(scalaenv global).
typeset -g POWERLEVEL9K_SCALAENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide scala version if it's equal to "system".
typeset -g POWERLEVEL9K_SCALAENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_SCALAENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ haskell_stack: haskell version from stack (https://haskellstack.org/) ]###########
# Haskell color.
typeset -g POWERLEVEL9K_HASKELL_STACK_FOREGROUND=172
# Hide haskell version if it doesn't come from one of these sources.
#
# shell: version is set by STACK_YAML
# local: version is set by stack.yaml up the directory tree
# global: version is set by the implicit global project (~/.stack/global-project/stack.yaml)
typeset -g POWERLEVEL9K_HASKELL_STACK_SOURCES=(shell local)
# If set to false, hide haskell version if it's the same as in the implicit global project.
typeset -g POWERLEVEL9K_HASKELL_STACK_ALWAYS_SHOW=true
# Custom icon.
# typeset -g POWERLEVEL9K_HASKELL_STACK_VISUAL_IDENTIFIER_EXPANSION='⭐'
#############[ kubecontext: current kubernetes context (https://kubernetes.io/) ]#############
# Show kubecontext only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show kubecontext.
typeset -g POWERLEVEL9K_KUBECONTEXT_SHOW_ON_COMMAND='kubectl|helm|kubens|kubectx|oc|istioctl|kogito|k9s|helmfile|flux|fluxctl|stern'
# Kubernetes context classes for the purpose of using different colors, icons and expansions with
# different contexts.
#
# POWERLEVEL9K_KUBECONTEXT_CLASSES is an array with even number of elements. The first element
# in each pair defines a pattern against which the current kubernetes context gets matched.
# More specifically, it's P9K_CONTENT prior to the application of context expansion (see below)
# that gets matched. If you unset all POWERLEVEL9K_KUBECONTEXT_*CONTENT_EXPANSION parameters,
# you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_KUBECONTEXT_CLASSES defines the context class. Patterns are tried in order. The
# first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_KUBECONTEXT_CLASSES=(
# '*prod*' PROD
# '*test*' TEST
# '*' DEFAULT)
#
# If your current kubernetes context is "deathray-testing/default", its class is TEST
# because "deathray-testing/default" doesn't match the pattern '*prod*' but does match '*test*'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_FOREGROUND=28
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <'
typeset -g POWERLEVEL9K_KUBECONTEXT_CLASSES=(
# '*prod*' PROD # These values are examples that are unlikely
# '*test*' TEST # to match your needs. Customize them as needed.
'*' DEFAULT)
typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_FOREGROUND=134
# typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Use POWERLEVEL9K_KUBECONTEXT_CONTENT_EXPANSION to specify the content displayed by kubecontext
# segment. Parameter expansions are very flexible and fast, too. See reference:
# http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion.
#
# Within the expansion the following parameters are always available:
#
# - P9K_CONTENT The content that would've been displayed if there was no content
# expansion defined.
# - P9K_KUBECONTEXT_NAME The current context's name. Corresponds to column NAME in the
# output of `kubectl config get-contexts`.
# - P9K_KUBECONTEXT_CLUSTER The current context's cluster. Corresponds to column CLUSTER in the
# output of `kubectl config get-contexts`.
# - P9K_KUBECONTEXT_NAMESPACE The current context's namespace. Corresponds to column NAMESPACE
# in the output of `kubectl config get-contexts`. If there is no
# namespace, the parameter is set to "default".
# - P9K_KUBECONTEXT_USER The current context's user. Corresponds to column AUTHINFO in the
# output of `kubectl config get-contexts`.
#
# If the context points to Google Kubernetes Engine (GKE) or Elastic Kubernetes Service (EKS),
# the following extra parameters are available:
#
# - P9K_KUBECONTEXT_CLOUD_NAME Either "gke" or "eks".
# - P9K_KUBECONTEXT_CLOUD_ACCOUNT Account/project ID.
# - P9K_KUBECONTEXT_CLOUD_ZONE Availability zone.
# - P9K_KUBECONTEXT_CLOUD_CLUSTER Cluster.
#
# P9K_KUBECONTEXT_CLOUD_* parameters are derived from P9K_KUBECONTEXT_CLUSTER. For example,
# if P9K_KUBECONTEXT_CLUSTER is "gke_my-account_us-east1-a_my-cluster-01":
#
# - P9K_KUBECONTEXT_CLOUD_NAME=gke
# - P9K_KUBECONTEXT_CLOUD_ACCOUNT=my-account
# - P9K_KUBECONTEXT_CLOUD_ZONE=us-east1-a
# - P9K_KUBECONTEXT_CLOUD_CLUSTER=my-cluster-01
#
# If P9K_KUBECONTEXT_CLUSTER is "arn:aws:eks:us-east-1:123456789012:cluster/my-cluster-01":
#
# - P9K_KUBECONTEXT_CLOUD_NAME=eks
# - P9K_KUBECONTEXT_CLOUD_ACCOUNT=123456789012
# - P9K_KUBECONTEXT_CLOUD_ZONE=us-east-1
# - P9K_KUBECONTEXT_CLOUD_CLUSTER=my-cluster-01
typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION=
# Show P9K_KUBECONTEXT_CLOUD_CLUSTER if it's not empty and fall back to P9K_KUBECONTEXT_NAME.
POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION+='${P9K_KUBECONTEXT_CLOUD_CLUSTER:-${P9K_KUBECONTEXT_NAME}}'
# Append the current context's namespace if it's not "default".
POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION+='${${:-/$P9K_KUBECONTEXT_NAMESPACE}:#/default}'
# Custom prefix.
# typeset -g POWERLEVEL9K_KUBECONTEXT_PREFIX='%fat '
################[ terraform: terraform workspace (https://www.terraform.io) ]#################
# Don't show terraform workspace if it's literally "default".
typeset -g POWERLEVEL9K_TERRAFORM_SHOW_DEFAULT=false
# POWERLEVEL9K_TERRAFORM_CLASSES is an array with even number of elements. The first element
# in each pair defines a pattern against which the current terraform workspace gets matched.
# More specifically, it's P9K_CONTENT prior to the application of context expansion (see below)
# that gets matched. If you unset all POWERLEVEL9K_TERRAFORM_*CONTENT_EXPANSION parameters,
# you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_TERRAFORM_CLASSES defines the workspace class. Patterns are tried in order. The
# first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_TERRAFORM_CLASSES=(
# '*prod*' PROD
# '*test*' TEST
# '*' OTHER)
#
# If your current terraform workspace is "project_test", its class is TEST because "project_test"
# doesn't match the pattern '*prod*' but does match '*test*'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_TERRAFORM_TEST_FOREGROUND=28
# typeset -g POWERLEVEL9K_TERRAFORM_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_TERRAFORM_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <'
typeset -g POWERLEVEL9K_TERRAFORM_CLASSES=(
# '*prod*' PROD # These values are examples that are unlikely
# '*test*' TEST # to match your needs. Customize them as needed.
'*' OTHER)
typeset -g POWERLEVEL9K_TERRAFORM_OTHER_FOREGROUND=38
# typeset -g POWERLEVEL9K_TERRAFORM_OTHER_VISUAL_IDENTIFIER_EXPANSION='⭐'
#############[ terraform_version: terraform version (https://www.terraform.io) ]##############
# Terraform version color.
typeset -g POWERLEVEL9K_TERRAFORM_VERSION_FOREGROUND=38
# Custom icon.
# typeset -g POWERLEVEL9K_TERRAFORM_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ aws: aws profile (https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-profiles.html) ]#
# Show aws only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show aws.
typeset -g POWERLEVEL9K_AWS_SHOW_ON_COMMAND='aws|awless|terraform|pulumi|terragrunt'
# POWERLEVEL9K_AWS_CLASSES is an array with even number of elements. The first element
# in each pair defines a pattern against which the current AWS profile gets matched.
# More specifically, it's P9K_CONTENT prior to the application of context expansion (see below)
# that gets matched. If you unset all POWERLEVEL9K_AWS_*CONTENT_EXPANSION parameters,
# you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_AWS_CLASSES defines the profile class. Patterns are tried in order. The
# first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_AWS_CLASSES=(
# '*prod*' PROD
# '*test*' TEST
# '*' DEFAULT)
#
# If your current AWS profile is "company_test", its class is TEST
# because "company_test" doesn't match the pattern '*prod*' but does match '*test*'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_AWS_TEST_FOREGROUND=28
# typeset -g POWERLEVEL9K_AWS_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_AWS_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <'
typeset -g POWERLEVEL9K_AWS_CLASSES=(
# '*prod*' PROD # These values are examples that are unlikely
# '*test*' TEST # to match your needs. Customize them as needed.
'*' DEFAULT)
typeset -g POWERLEVEL9K_AWS_DEFAULT_FOREGROUND=208
# typeset -g POWERLEVEL9K_AWS_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# AWS segment format. The following parameters are available within the expansion.
#
# - P9K_AWS_PROFILE The name of the current AWS profile.
# - P9K_AWS_REGION The region associated with the current AWS profile.
typeset -g POWERLEVEL9K_AWS_CONTENT_EXPANSION='${P9K_AWS_PROFILE//\%/%%}${P9K_AWS_REGION:+ ${P9K_AWS_REGION//\%/%%}}'
#[ aws_eb_env: aws elastic beanstalk environment (https://aws.amazon.com/elasticbeanstalk/) ]#
# AWS Elastic Beanstalk environment color.
typeset -g POWERLEVEL9K_AWS_EB_ENV_FOREGROUND=70
# Custom icon.
# typeset -g POWERLEVEL9K_AWS_EB_ENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ azure: azure account name (https://docs.microsoft.com/en-us/cli/azure) ]##########
# Show azure only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show azure.
typeset -g POWERLEVEL9K_AZURE_SHOW_ON_COMMAND='az|terraform|pulumi|terragrunt'
# Azure account name color.
typeset -g POWERLEVEL9K_AZURE_FOREGROUND=32
# Custom icon.
# typeset -g POWERLEVEL9K_AZURE_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ gcloud: google cloud account and project (https://cloud.google.com/) ]###########
# Show gcloud only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show gcloud.
typeset -g POWERLEVEL9K_GCLOUD_SHOW_ON_COMMAND='gcloud|gcs'
# Google cloud color.
typeset -g POWERLEVEL9K_GCLOUD_FOREGROUND=32
# Google cloud format. Change the value of POWERLEVEL9K_GCLOUD_PARTIAL_CONTENT_EXPANSION and/or
# POWERLEVEL9K_GCLOUD_COMPLETE_CONTENT_EXPANSION if the default is too verbose or not informative
# enough. You can use the following parameters in the expansions. Each of them corresponds to the
# output of `gcloud` tool.
#
# Parameter | Source
# -------------------------|--------------------------------------------------------------------
# P9K_GCLOUD_CONFIGURATION | gcloud config configurations list --format='value(name)'
# P9K_GCLOUD_ACCOUNT | gcloud config get-value account
# P9K_GCLOUD_PROJECT_ID | gcloud config get-value project
# P9K_GCLOUD_PROJECT_NAME | gcloud projects describe $P9K_GCLOUD_PROJECT_ID --format='value(name)'
#
# Note: ${VARIABLE//\%/%%} expands to ${VARIABLE} with all occurrences of '%' replaced with '%%'.
#
# Obtaining project name requires sending a request to Google servers. This can take a long time
# and even fail. When project name is unknown, P9K_GCLOUD_PROJECT_NAME is not set and gcloud
# prompt segment is in state PARTIAL. When project name gets known, P9K_GCLOUD_PROJECT_NAME gets
# set and gcloud prompt segment transitions to state COMPLETE.
#
# You can customize the format, icon and colors of gcloud segment separately for states PARTIAL
# and COMPLETE. You can also hide gcloud in state PARTIAL by setting
# POWERLEVEL9K_GCLOUD_PARTIAL_VISUAL_IDENTIFIER_EXPANSION and
# POWERLEVEL9K_GCLOUD_PARTIAL_CONTENT_EXPANSION to empty.
typeset -g POWERLEVEL9K_GCLOUD_PARTIAL_CONTENT_EXPANSION='${P9K_GCLOUD_PROJECT_ID//\%/%%}'
typeset -g POWERLEVEL9K_GCLOUD_COMPLETE_CONTENT_EXPANSION='${P9K_GCLOUD_PROJECT_NAME//\%/%%}'
# Send a request to Google (by means of `gcloud projects describe ...`) to obtain project name
# this often. Negative value disables periodic polling. In this mode project name is retrieved
# only when the current configuration, account or project id changes.
typeset -g POWERLEVEL9K_GCLOUD_REFRESH_PROJECT_NAME_SECONDS=60
# Custom icon.
# typeset -g POWERLEVEL9K_GCLOUD_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ google_app_cred: google application credentials (https://cloud.google.com/docs/authentication/production) ]#
# Show google_app_cred only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show google_app_cred.
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_SHOW_ON_COMMAND='terraform|pulumi|terragrunt'
# Google application credentials classes for the purpose of using different colors, icons and
# expansions with different credentials.
#
# POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES is an array with even number of elements. The first
# element in each pair defines a pattern against which the current kubernetes context gets
# matched. More specifically, it's P9K_CONTENT prior to the application of context expansion
# (see below) that gets matched. If you unset all POWERLEVEL9K_GOOGLE_APP_CRED_*CONTENT_EXPANSION
# parameters, you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES defines the context class. Patterns are tried in order.
# The first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES=(
# '*:*prod*:*' PROD
# '*:*test*:*' TEST
# '*' DEFAULT)
#
# If your current Google application credentials is "service_account deathray-testing x@y.com",
# its class is TEST because it doesn't match the pattern '* *prod* *' but does match '* *test* *'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_FOREGROUND=28
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_CONTENT_EXPANSION='$P9K_GOOGLE_APP_CRED_PROJECT_ID'
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES=(
# '*:*prod*:*' PROD # These values are examples that are unlikely
# '*:*test*:*' TEST # to match your needs. Customize them as needed.
'*' DEFAULT)
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_FOREGROUND=32
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Use POWERLEVEL9K_GOOGLE_APP_CRED_CONTENT_EXPANSION to specify the content displayed by
# google_app_cred segment. Parameter expansions are very flexible and fast, too. See reference:
# http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion.
#
# You can use the following parameters in the expansion. Each of them corresponds to one of the
# fields in the JSON file pointed to by GOOGLE_APPLICATION_CREDENTIALS.
#
# Parameter | JSON key file field
# ---------------------------------+---------------
# P9K_GOOGLE_APP_CRED_TYPE | type
# P9K_GOOGLE_APP_CRED_PROJECT_ID | project_id
# P9K_GOOGLE_APP_CRED_CLIENT_EMAIL | client_email
#
# Note: ${VARIABLE//\%/%%} expands to ${VARIABLE} with all occurrences of '%' replaced by '%%'.
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_CONTENT_EXPANSION='${P9K_GOOGLE_APP_CRED_PROJECT_ID//\%/%%}'
##############[ toolbox: toolbox name (https://github.com/containers/toolbox) ]###############
# Toolbox color.
typeset -g POWERLEVEL9K_TOOLBOX_FOREGROUND=178
# Don't display the name of the toolbox if it matches fedora-toolbox-*.
typeset -g POWERLEVEL9K_TOOLBOX_CONTENT_EXPANSION='${P9K_TOOLBOX_NAME:#fedora-toolbox-*}'
# Custom icon.
# typeset -g POWERLEVEL9K_TOOLBOX_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Custom prefix.
# typeset -g POWERLEVEL9K_TOOLBOX_PREFIX='%fin '
###############################[ public_ip: public IP address ]###############################
# Public IP color.
typeset -g POWERLEVEL9K_PUBLIC_IP_FOREGROUND=94
# Custom icon.
# typeset -g POWERLEVEL9K_PUBLIC_IP_VISUAL_IDENTIFIER_EXPANSION='⭐'
########################[ vpn_ip: virtual private network indicator ]#########################
# VPN IP color.
typeset -g POWERLEVEL9K_VPN_IP_FOREGROUND=81
# When on VPN, show just an icon without the IP address.
# Tip: To display the private IP address when on VPN, remove the next line.
typeset -g POWERLEVEL9K_VPN_IP_CONTENT_EXPANSION=
# Regular expression for the VPN network interface. Run `ifconfig` or `ip -4 a show` while on VPN
# to see the name of the interface.
typeset -g POWERLEVEL9K_VPN_IP_INTERFACE='(gpd|wg|(.*tun)|tailscale)[0-9]*'
# If set to true, show one segment per matching network interface. If set to false, show only
# one segment corresponding to the first matching network interface.
# Tip: If you set it to true, you'll probably want to unset POWERLEVEL9K_VPN_IP_CONTENT_EXPANSION.
typeset -g POWERLEVEL9K_VPN_IP_SHOW_ALL=false
# Custom icon.
# typeset -g POWERLEVEL9K_VPN_IP_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ ip: ip address and bandwidth usage for a specified network interface ]###########
# IP color.
typeset -g POWERLEVEL9K_IP_FOREGROUND=38
# The following parameters are accessible within the expansion:
#
# Parameter | Meaning
# ----------------------+-------------------------------------------
# P9K_IP_IP | IP address
# P9K_IP_INTERFACE | network interface
# P9K_IP_RX_BYTES | total number of bytes received
# P9K_IP_TX_BYTES | total number of bytes sent
# P9K_IP_RX_BYTES_DELTA | number of bytes received since last prompt
# P9K_IP_TX_BYTES_DELTA | number of bytes sent since last prompt
# P9K_IP_RX_RATE | receive rate (since last prompt)
# P9K_IP_TX_RATE | send rate (since last prompt)
typeset -g POWERLEVEL9K_IP_CONTENT_EXPANSION='$P9K_IP_IP${P9K_IP_RX_RATE:+ %70F⇣$P9K_IP_RX_RATE}${P9K_IP_TX_RATE:+ %215F⇡$P9K_IP_TX_RATE}'
# Show information for the first network interface whose name matches this regular expression.
# Run `ifconfig` or `ip -4 a show` to see the names of all network interfaces.
typeset -g POWERLEVEL9K_IP_INTERFACE='[ew].*'
# Custom icon.
# typeset -g POWERLEVEL9K_IP_VISUAL_IDENTIFIER_EXPANSION='⭐'
#########################[ proxy: system-wide http/https/ftp proxy ]##########################
# Proxy color.
typeset -g POWERLEVEL9K_PROXY_FOREGROUND=68
# Custom icon.
# typeset -g POWERLEVEL9K_PROXY_VISUAL_IDENTIFIER_EXPANSION='⭐'
################################[ battery: internal battery ]#################################
# Show battery in red when it's below this level and not connected to power supply.
typeset -g POWERLEVEL9K_BATTERY_LOW_THRESHOLD=20
typeset -g POWERLEVEL9K_BATTERY_LOW_FOREGROUND=160
# Show battery in green when it's charging or fully charged.
typeset -g POWERLEVEL9K_BATTERY_{CHARGING,CHARGED}_FOREGROUND=70
# Show battery in yellow when it's discharging.
typeset -g POWERLEVEL9K_BATTERY_DISCONNECTED_FOREGROUND=178
# Battery pictograms going from low to high level of charge.
typeset -g POWERLEVEL9K_BATTERY_STAGES='\uf58d\uf579\uf57a\uf57b\uf57c\uf57d\uf57e\uf57f\uf580\uf581\uf578'
# Don't show the remaining time to charge/discharge.
typeset -g POWERLEVEL9K_BATTERY_VERBOSE=false
#####################################[ wifi: wifi speed ]#####################################
# WiFi color.
typeset -g POWERLEVEL9K_WIFI_FOREGROUND=68
# Custom icon.
# typeset -g POWERLEVEL9K_WIFI_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Use different colors and icons depending on signal strength ($P9K_WIFI_BARS).
#
# # Wifi colors and icons for different signal strength levels (low to high).
# typeset -g my_wifi_fg=(68 68 68 68 68) # <-- change these values
# typeset -g my_wifi_icon=('WiFi' 'WiFi' 'WiFi' 'WiFi' 'WiFi') # <-- change these values
#
# typeset -g POWERLEVEL9K_WIFI_CONTENT_EXPANSION='%F{${my_wifi_fg[P9K_WIFI_BARS+1]}}$P9K_WIFI_LAST_TX_RATE Mbps'
# typeset -g POWERLEVEL9K_WIFI_VISUAL_IDENTIFIER_EXPANSION='%F{${my_wifi_fg[P9K_WIFI_BARS+1]}}${my_wifi_icon[P9K_WIFI_BARS+1]}'
#
# The following parameters are accessible within the expansions:
#
# Parameter | Meaning
# ----------------------+---------------
# P9K_WIFI_SSID | service set identifier, a.k.a. network name
# P9K_WIFI_LINK_AUTH | authentication protocol such as "wpa2-psk" or "none"; empty if unknown
# P9K_WIFI_LAST_TX_RATE | wireless transmit rate in megabits per second
# P9K_WIFI_RSSI | signal strength in dBm, from -120 to 0
# P9K_WIFI_NOISE | noise in dBm, from -120 to 0
# P9K_WIFI_BARS | signal strength in bars, from 0 to 4 (derived from P9K_WIFI_RSSI and P9K_WIFI_NOISE)
####################################[ time: current time ]####################################
# Current time color.
typeset -g POWERLEVEL9K_TIME_FOREGROUND=66
# Format for the current time: 09:51:02. See `man 3 strftime`.
typeset -g POWERLEVEL9K_TIME_FORMAT='%D{%H:%M:%S}'
# If set to true, time will update when you hit enter. This way prompts for the past
# commands will contain the start times of their commands as opposed to the default
# behavior where they contain the end times of their preceding commands.
typeset -g POWERLEVEL9K_TIME_UPDATE_ON_COMMAND=false
# Custom icon.
# typeset -g POWERLEVEL9K_TIME_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Custom prefix.
# typeset -g POWERLEVEL9K_TIME_PREFIX='%fat '
# Example of a user-defined prompt segment. Function prompt_example will be called on every
# prompt if `example` prompt segment is added to POWERLEVEL9K_LEFT_PROMPT_ELEMENTS or
# POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS. It displays an icon and orange text greeting the user.
#
# Type `p10k help segment` for documentation and a more sophisticated example.
function prompt_example() {
p10k segment -f 208 -i '⭐' -t 'hello, %n'
}
# User-defined prompt segments may optionally provide an instant_prompt_* function. Its job
# is to generate the prompt segment for display in instant prompt. See
# https://github.com/romkatv/powerlevel10k/blob/master/README.md#instant-prompt.
#
# Powerlevel10k will call instant_prompt_* at the same time as the regular prompt_* function
# and will record all `p10k segment` calls it makes. When displaying instant prompt, Powerlevel10k
# will replay these calls without actually calling instant_prompt_*. It is imperative that
# instant_prompt_* always makes the same `p10k segment` calls regardless of environment. If this
# rule is not observed, the content of instant prompt will be incorrect.
#
# Usually, you should either not define instant_prompt_* or simply call prompt_* from it. If
# instant_prompt_* is not defined for a segment, the segment won't be shown in instant prompt.
function instant_prompt_example() {
# Since prompt_example always makes the same `p10k segment` calls, we can call it from
# instant_prompt_example. This will give us the same `example` prompt segment in the instant
# and regular prompts.
prompt_example
}
# User-defined prompt segments can be customized the same way as built-in segments.
# typeset -g POWERLEVEL9K_EXAMPLE_FOREGROUND=208
# typeset -g POWERLEVEL9K_EXAMPLE_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Transient prompt works similarly to the builtin transient_rprompt option. It trims down prompt
# when accepting a command line. Supported values:
#
# - off: Don't change prompt when accepting a command line.
# - always: Trim down prompt when accepting a command line.
# - same-dir: Trim down prompt when accepting a command line unless this is the first command
# typed after changing current working directory.
typeset -g POWERLEVEL9K_TRANSIENT_PROMPT=always
# Instant prompt mode.
#
# - off: Disable instant prompt. Choose this if you've tried instant prompt and found
# it incompatible with your zsh configuration files.
# - quiet: Enable instant prompt and don't print warnings when detecting console output
# during zsh initialization. Choose this if you've read and understood
# https://github.com/romkatv/powerlevel10k/blob/master/README.md#instant-prompt.
# - verbose: Enable instant prompt and print a warning when detecting console output during
# zsh initialization. Choose this if you've never tried instant prompt, haven't
# seen the warning, or if you are unsure what this all means.
typeset -g POWERLEVEL9K_INSTANT_PROMPT=verbose
# Hot reload allows you to change POWERLEVEL9K options after Powerlevel10k has been initialized.
# For example, you can type POWERLEVEL9K_BACKGROUND=red and see your prompt turn red. Hot reload
# can slow down prompt by 1-2 milliseconds, so it's better to keep it turned off unless you
# really need it.
typeset -g POWERLEVEL9K_DISABLE_HOT_RELOAD=true
# If p10k is already loaded, reload configuration.
# This works even with POWERLEVEL9K_DISABLE_HOT_RELOAD=true.
(( ! $+functions[p10k] )) || p10k reload
}
# Tell `p10k configure` which file it should overwrite.
typeset -g POWERLEVEL9K_CONFIG_FILE=${${(%):-%x}:a}
(( ${#p10k_config_opts} )) && setopt ${p10k_config_opts[@]}
'builtin' 'unset' 'p10k_config_opts'
|
# Import libraries
import numpy as np
from sklearn.svm import SVR
from sklearn.model_selection import GridSearchCV
# Define data
X = np.array([[1000], [700], [500], [200]])
y = np.array([1, 0.75, 0.5, 0.25])
# Define model
model = SVR(kernel='rbf', gamma='scale')
# Define hyperparameters
params = {'C' : [0.1, 1, 10, 100],
'epsilon': [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0]}
# Grid search for best hyperparameters
model_cv = GridSearchCV(model, param_grid=params, cv=3, scoring='neg_mean_squared_error')
# Fit the model
model_cv.fit(X, y)
# Best hyperparameters
print("Best hyperparameters:", model_cv.best_params_)
# Best score
print("Best score:", model_cv.best_score_) |
#!/usr/bin/env bash
set -e
. tools/lib/lib.sh
if [[ -z "${CLOUDREPO_USER}" ]]; then
echo 'CLOUDREPO_USER env var not set. Please retrieve the user email from the CloudRepo lastpass secret and run export CLOUDREPO_USER=<user_from_secret>.';
exit 1;
fi
if [[ -z "${CLOUDREPO_PASSWORD}" ]]; then
echo 'CLOUDREPO_PASSWORD env var not set. Please retrieve the user email from the CloudRepo lastpass secret and run export CLOUDREPO_PASSWORD=<password_from_secret>.';
exit 1;
fi
if [[ -z "${DOCKER_PASSWORD}" ]]; then
echo 'DOCKER_PASSWORD for airbytebot not set.';
exit 1;
fi
docker login -u airbytebot -p "${DOCKER_PASSWORD}"
source ./tools/bin/bump_version.sh
echo "Building and publishing PLATFORM version $NEW_VERSION for git revision $GIT_REVISION..."
VERSION=$NEW_VERSION SUB_BUILD=PLATFORM ./gradlew clean build
SUB_BUILD=PLATFORM ./gradlew publish
VERSION=$NEW_VERSION GIT_REVISION=$GIT_REVISION docker-compose -f docker-compose.build.yaml push
echo "Completed building and publishing PLATFORM..."
|
<reponame>trunksbomb/Cyclic<filename>src/main/java/com/lothrazar/cyclicmagic/block/anvilmagma/BlockAnvilMagma.java
/*******************************************************************************
* The MIT License (MIT)
*
* Copyright (C) 2014-2018 <NAME> (aka Lothrazar)
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
******************************************************************************/
package com.lothrazar.cyclicmagic.block.anvilmagma;
import com.lothrazar.cyclicmagic.IContent;
import com.lothrazar.cyclicmagic.block.anvil.BlockAnvilAuto;
import com.lothrazar.cyclicmagic.block.core.BlockBaseHasTile;
import com.lothrazar.cyclicmagic.data.IHasRecipe;
import com.lothrazar.cyclicmagic.gui.ForgeGuiHandler;
import com.lothrazar.cyclicmagic.guide.GuideCategory;
import com.lothrazar.cyclicmagic.registry.BlockRegistry;
import com.lothrazar.cyclicmagic.registry.RecipeRegistry;
import com.lothrazar.cyclicmagic.util.Const;
import com.lothrazar.cyclicmagic.util.UtilChat;
import net.minecraft.block.SoundType;
import net.minecraft.block.material.Material;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.init.Blocks;
import net.minecraft.item.ItemStack;
import net.minecraft.item.crafting.IRecipe;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.EnumHand;
import net.minecraft.util.math.AxisAlignedBB;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.IBlockAccess;
import net.minecraft.world.World;
import net.minecraftforge.common.config.Configuration;
import net.minecraftforge.fluids.FluidUtil;
import net.minecraftforge.fml.common.registry.GameRegistry;
public class BlockAnvilMagma extends BlockBaseHasTile implements IContent, IHasRecipe {
//block rotation in json http://www.minecraftforge.net/forum/index.php?topic=32753.0
public BlockAnvilMagma() {
super(Material.ANVIL);
this.setSoundType(SoundType.ANVIL);
super.setGuiId(ForgeGuiHandler.GUI_INDEX_ANVILMAGMA);
this.setHardness(3.0F).setResistance(3.0F);
this.setTranslucent();
}
@Override
public AxisAlignedBB getBoundingBox(IBlockState state, IBlockAccess source, BlockPos pos) {
return BlockAnvilAuto.Z_AXIS_AABB;
}
@Override
public boolean isFullCube(IBlockState state) {
return false;
}
@Override
public TileEntity createTileEntity(World worldIn, IBlockState state) {
return new TileEntityAnvilMagma();
}
@Override
public void register() {
BlockRegistry.registerBlock(this, "block_anvil_magma", GuideCategory.BLOCKMACHINE);
GameRegistry.registerTileEntity(TileEntityAnvilMagma.class, Const.MODID + "block_anvil_magma_te");
}
private boolean enabled;
@Override
public boolean enabled() {
return enabled;
}
@Override
public void syncConfig(Configuration config) {
enabled = config.getBoolean("block_anvil_magma", Const.ConfigCategory.content, true, Const.ConfigCategory.contentDefaultText);
TileEntityAnvilMagma.FLUID_COST = config.getInt("block_anvil_magma_lava", Const.ConfigCategory.fuelCost, 100, 1, 10000, "Lava cost per damage unit");
}
@Override
public IRecipe addRecipe() {
return RecipeRegistry.addShapedRecipe(new ItemStack(this),
"ddd",
"geg",
"iii",
'g', "blockGold",
'i', "cobblestone",
'e', Blocks.ANVIL,
'd', Blocks.MAGMA);
}
@Override
public boolean onBlockActivated(World world, BlockPos pos, IBlockState state, EntityPlayer player, EnumHand hand, EnumFacing side, float hitX, float hitY, float hitZ) {
// check the TE
TileEntityAnvilMagma te = (TileEntityAnvilMagma) world.getTileEntity(pos);
boolean success = FluidUtil.interactWithFluidHandler(player, hand, world, pos, side);
if (te != null) {
if (!world.isRemote) {
int currentFluid = te.getCurrentFluidStackAmount();
UtilChat.sendStatusMessage(player, UtilChat.lang("cyclic.fluid.amount") + currentFluid);
}
}
// otherwise return true if it is a fluid handler to prevent in world placement
return success || FluidUtil.getFluidHandler(player.getHeldItem(hand)) != null || super.onBlockActivated(world, pos, state, player, hand, side, hitX, hitY, hitZ);
}
}
|
#!/bin/bash
#rsync -r --progress /opt/nvme/pak/ilsvrc12_train_lmdb_full /opt/fio/pak/ilsvrc12/ilsvrc12_train_lmdb_full/
#rsync -r --progress /opt/nvme/pak/ilsvrc12_val_lmdb_full /opt/fio/pak/ilsvrc12/ilsvrc12_val_lmdb_full/
cd ../../../../kernel/ && ./reinsert-mod.sh
cd -
for enable_dragon in "false" # "true"
do
for readahead_type in "norm" #"aggr"
do
for num in 18 #32 50 101 152
do
echo "===> Running ResNet-${num} nvmgpu ${enable_dragon} ${readahead_type}"
sed "s/{{ INPUT_LAYER_TYPE }}/DragonData/g" resnet_${num}/resnet_${num}.prototxt.template > resnet_${num}/resnet_${num}.prototxt
dmesg -C
../../../../experiments/drop-caches.sh
#time DRAGON_READAHEAD_TYPE=aggr DRAGON_NR_RESERVED_PAGES=4194304 ../../build/tools/caffe train --solver=resnet_${num}/resnet_${num}_solver.prototxt -enable_dragon=true -dragon_tmp_folder=/mnt/nvme/pak/tmp/
time DRAGON_READAHEAD_TYPE=${readahead_type} ../../build/tools/caffe train --solver=resnet_${num}/resnet_${num}_solver.prototxt -enable_dragon=${enable_dragon} -dragon_tmp_folder=/mnt/nvme/pak/tmp/
#time ../../build/tools/caffe train --solver=resnet_${num}/resnet_${num}_solver.prototxt
dmesg
done
done
done
|
#!/bin/bash
# Script to run particle filter!
#
# Written by Tiffany Huang, 12/14/2016
#
# Run particle filter
./clean.sh
./build.sh
cd ./build
./particle_filter
|
import React, { useState } from 'react';
const Counter = () => {
const [count, setCount] = useState(0);
const handleIncrement = () => {
setCount(count + 1);
};
const handleDecrement = () => {
setCount(Math.max(0, count - 1));
};
return (
<div>
<h1>Count: {count}</h1>
<button onClick={handleIncrement}>Increment</button>
<button onClick={handleDecrement}>Decrement</button>
</div>
);
};
export default Counter; |
package manager
import (
"github.com/Jeffail/benthos/v3/lib/stream"
yaml "gopkg.in/yaml.v3"
)
//------------------------------------------------------------------------------
// ConfigSet is a map of stream configurations mapped by ID, which can be YAML
// parsed without losing default values inside the stream configs.
type ConfigSet map[string]stream.Config
// UnmarshalYAML ensures that when parsing configs that are in a map or slice
// the default values are still applied.
func (c ConfigSet) UnmarshalYAML(unmarshal func(interface{}) error) error {
tmpSet := map[string]interface{}{}
if err := unmarshal(&tmpSet); err != nil {
return err
}
for k, v := range tmpSet {
conf := stream.NewConfig()
confBytes, err := yaml.Marshal(v)
if err != nil {
return err
}
if err = yaml.Unmarshal(confBytes, &conf); err != nil {
return err
}
c[k] = conf
}
return nil
}
//------------------------------------------------------------------------------
|
<filename>src/icons/legacy/Bluetooth.tsx<gh_stars>1-10
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import BluetoothSvg from '@rsuite/icon-font/lib/legacy/Bluetooth';
const Bluetooth = createSvgIcon({
as: BluetoothSvg,
ariaLabel: 'bluetooth',
category: 'legacy',
displayName: 'Bluetooth'
});
export default Bluetooth;
|
import { Injectable } from '@angular/core';
import { noop } from 'rxjs';
import { CardViewHeaderService } from './card-view-header.service';
@Injectable({ providedIn: 'root' })
export class StubCardViewHeaderService implements Partial<CardViewHeaderService> {
setHeader = noop;
}
export const stubCardViewHeaderServiceProvider = {
provide: CardViewHeaderService,
useClass: StubCardViewHeaderService,
};
|
#!/bin/sh
if [ -z "$FUSE_HOME" ]; then
export FUSE_HOME=/opt/fuse;
fi
sudo rm -rf $FUSE_HOME/data
sudo rm -rf /opt/jboss-fuse-6.2.0.redhat-133/data
[ -f $FUSE_HOME/lock ] && sudo rm $FUSE_HOME/lock
sudo rm -rf /tmp/fakerepo
sudo rm -rf /tmp/hsperfdata_root
sudo rm -rf /tmp/hsperfdata_$USER
sudo rm -rf /opt/tetra/fs-repo/hs/assessment/dev
sudo rm -rf /opt/tetra/fs-repo/water/assessment/dev
sudo rm -rf $HOME/.karaf
sudo rm -rf $HOME/.m2/repository/com/strikersoft
sudo rm -rf /root/.m2/repository/com/strikersoft
if [ -d $FUSE_HOME/data ] ; then
echo "Clean problem"
else #if needed #also: elif [new condition]
echo "JBoss Fuse & Maven repo is cleaned"
fi
|
<reponame>jamacanbacn/splits-io<filename>app/controllers/api/v3/users/runs_controller.rb
class Api::V3::Users::RunsController < Api::V3::ApplicationController
before_action :set_user
before_action :set_runs, only: [:index]
def index
runs = paginate @runs
render json: Api::V3::RunBlueprint.render(runs, root: :runs)
end
private
def set_user
@user = User.find_by(name: params[:user_id]) || User.find(params[:user_id])
rescue ActiveRecord::RecordNotFound
render status: :not_found, json: {status: 404, message: "User with name or id '#{params[:user_id]}' not found."}
end
def set_runs
@runs = @user.runs.includes(:game, :category)
end
end
|
<gh_stars>1-10
from .pool import ConnPool
__all__ = ["ConnPool"] |
<reponame>ministryofjustice/mtp-api<gh_stars>1-10
import collections
import datetime
import logging
import re
from django import forms
from django.conf import settings
from django.contrib.admin.widgets import AdminFileWidget
from django.core.exceptions import ValidationError
from django.db import transaction
from django.utils import timezone
from django.utils.dateparse import parse_datetime
from django.utils.functional import cached_property
from django.utils.translation import gettext_lazy as _
from core.excel import ExcelWorkbook, ExcelWorksheet
logger = logging.getLogger('mtp')
class DigitalTakeupUploadForm(forms.Form):
excel_file = forms.FileField(label=_('Excel file'), widget=AdminFileWidget)
error_messages = {
'cannot_read': _('Please upload a Microsoft Excel .xls or .xlsx file'),
'invalid': _('The spreadsheet does not contain the expected structure'),
'invalid_date': _('The report data should be for one day only'),
'unknown_prison': _('Cannot look up prison ‘%(prison_name)s’'),
}
credit_types = {
'POST': ('credits_by_post', 'amount_by_post'),
'MTDS': ('credits_by_mtp', 'amount_by_mtp'),
'MRPR': ('credits_by_mtp', 'amount_by_mtp'), # to allow for legacy report uploading
}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.date = None
self.credits_by_prison = {}
from prison.models import Prison
self.re_whitespace = re.compile(r'\s+')
self.re_prison_converters = (
Prison.re_prefixes, # standard prefixes
re.compile(r'(\(.*?\))?$'), # any parenthesised suffixes
re.compile(r'HMP/YOI$'), # another variation
re.compile(r'IMMIGRATION REMOVAL CENTRE$', flags=re.IGNORECASE), # another variation
)
@cached_property
def prison_name_map(self):
from prison.models import Prison
prisons = list(Prison.objects.values('nomis_id', 'name'))
prison_name_map = {
self.re_whitespace.sub('', Prison.shorten_name(prison['name'])).upper(): prison['nomis_id']
for prison in prisons
}
if len(prison_name_map) != len(prisons):
logger.error('Generated prison name map does not have expected number of prisons')
return {}
return prison_name_map
def parse_prison(self, prison_str):
prison_str = prison_str.strip()
for converter in self.re_prison_converters:
prison_str = converter.sub('', prison_str)
prison_str = self.re_whitespace.sub('', prison_str).upper()
return self.prison_name_map[prison_str]
@classmethod
def find_worksheet_start(cls, sheet: ExcelWorksheet):
# find beginning of spreadsheet as blank rows/columns has appeared over the years
for row in range(3):
for col in range(3):
if (sheet.cell_value(row, col) or '').strip() == 'Parameters':
return row, col
def parse_workbook(self, workbook: ExcelWorkbook):
sheet = workbook.get_sheet(0)
start_row, start_col = self.find_worksheet_start(sheet)
date_formats = ['%d/%m/%Y'] + list(settings.DATE_INPUT_FORMATS)
def parse_date(date_str, alt_date_str):
date_str = date_str.split(':', 1)[1].strip().lstrip('0') or alt_date_str.strip().lstrip('0')
for date_format in date_formats:
try:
return datetime.datetime.strptime(date_str, date_format).date()
except ValueError:
continue
raise ValueError('Cannot parse date header %s' % date_str)
start_date = parse_date(
sheet.cell_value(start_row + 1, start_col),
sheet.cell_value(start_row + 1, start_col + 1),
)
end_date = parse_date(
sheet.cell_value(start_row + 2, start_col),
sheet.cell_value(start_row + 2, start_col + 1),
)
if start_date != end_date:
raise ValidationError(self.error_messages['invalid_date'], code='invalid_date')
self.date = start_date
row = start_row + 5
while row < sheet.row_count:
prison_name = sheet.cell_value(row, start_col)
if not prison_name:
break
try:
nomis_id = self.parse_prison(prison_name)
except KeyError:
raise ValidationError(self.error_messages['unknown_prison'], code='unknown_prison',
params={'prison_name': prison_name})
if nomis_id not in self.credits_by_prison:
self.credits_by_prison[nomis_id] = {
'credits_by_post': 0,
'credits_by_mtp': 0,
'amount_by_post': 0,
'amount_by_mtp': 0,
}
if sheet.cell_value(row, start_col + 1):
credit_type = sheet.cell_value(row, start_col + 1).upper()
count = sheet.cell_value(row, start_col + 2)
amount = sheet.cell_value(row, start_col + 4)
else:
credit_type = sheet.cell_value(row, start_col + 2).upper()
count = sheet.cell_value(row, start_col + 3)
amount = sheet.cell_value(row, start_col + 5)
if credit_type == 'CHEQ':
row += 1
continue
if credit_type not in self.credit_types:
raise ValueError('Cannot parse credit type %s in row %d' % (credit_type, row))
credits_key, amount_key = self.credit_types[credit_type]
self.credits_by_prison[nomis_id][credits_key] = int(count)
self.credits_by_prison[nomis_id][amount_key] = int(amount * 100)
row += 1
def clean_excel_file(self):
excel_file = self.cleaned_data.get('excel_file')
if excel_file:
try:
with ExcelWorkbook.open_workbook(excel_file) as workbook:
self.parse_workbook(workbook)
except TypeError:
# raised when file cannot be read
raise ValidationError(self.error_messages['cannot_read'], code='cannot_read')
except (ValueError, IndexError):
# raised when file can be read but has unexpected structure/contents
logger.warning('Cannot parse spreadsheet', exc_info=True)
raise ValidationError(self.error_messages['invalid'], code='invalid')
return None
@transaction.atomic
def save(self):
from performance.models import DigitalTakeup
for nomis_id, credit_by_prison in self.credits_by_prison.items():
DigitalTakeup.objects.update_or_create(
defaults=credit_by_prison,
date=self.date,
prison_id=nomis_id,
)
class UserSatisfactionUploadForm(forms.Form):
csv_file = forms.FileField(label=_('CSV file'), widget=AdminFileWidget)
error_messages = {
'cannot_read': _('Please upload a .csv file'),
'invalid': _('The CSV file does not contain the expected structure'),
}
_rating_prefix = 'Rating of '
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.records = collections.defaultdict(lambda: collections.defaultdict(int))
# record date range in uploaded file
self.date_min = None
self.date_max = None
def parse_record(self, record):
try:
# filter out only aggregated daily ratings
if record['type'] != 'aggregated-service-feedback':
return None
# get local date
date = parse_datetime(record['creation date'])
if not date:
raise ValueError('Cannot parse creation date')
date = timezone.make_aware(date).date()
# get rating and count
feedback = record['feedback']
assert feedback.startswith(self._rating_prefix)
feedback = feedback[len(self._rating_prefix):]
rating, count = feedback.split(':', 1)
rating = int(rating.strip())
assert rating in range(1, 6)
count = int(count.strip())
assert count >= 0
except (KeyError, IndexError, ValueError, AssertionError):
raise ValidationError(self.error_messages['invalid'], 'invalid')
return date, rating, count
def clean_csv_file(self):
csv_file = self.cleaned_data.get('csv_file')
if csv_file:
import csv
import io
reader = csv.DictReader(io.TextIOWrapper(csv_file))
try:
for record in reader:
parsed = self.parse_record(record)
if not parsed:
continue
date, rating, count = parsed
self.records[date][rating] = count
except ValueError:
raise ValidationError(self.error_messages['cannot_read'], code='cannot_read')
return None
@transaction.atomic
def save(self):
from performance.models import UserSatisfaction
self.date_min = list(self.records)[0]
self.date_max = self.date_min
for date, record in self.records.items():
UserSatisfaction.objects.update_or_create(
defaults={
f'rated_{rating}': record[rating]
for rating in range(1, 6)
},
date=date,
)
# Keep track of records date range
if date < self.date_min:
self.date_min = date
if date > self.date_max:
self.date_max = date
|
#!/bin/bash
tmux new -d -s htop
tmux send-keys "./launch_openpilot.sh" ENTER
tmux neww
tmux send-keys "./bridge.py $*" ENTER
tmux a
|
<filename>ffi/bolt/auto_releasable.rb
# frozen_string_literal: true
module Bolt
module AutoReleasable
def attach_function(name, func, args, returns = nil, options = nil)
return super unless returns == :auto_pointer
super(name, func, args, :pointer, options)
singleton_class.prepend with_auto_releaser(name, options&.dig(:releaser))
end
private
def with_auto_releaser(method, releaser)
Module.new do
define_method(method) do |*args|
FFI::AutoPointer.new(super(*args), releaser || self.method(:destroy))
end
end
end
end
end
|
# Get twilio-ruby from twilio.com/docs/ruby/install
require 'twilio-ruby'
# Get your Account SID and Auth Token from twilio.com/console
# To set up environmental variables, see http://twil.io/secure
account_sid = ENV['TWILIO_ACCOUNT_SID']
auth_token = ENV['TWILIO_AUTH_TOKEN']
workspace_sid = 'WSXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
taskqueue_sid = 'WQXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
capability = Twilio::JWT::TaskRouterCapability.new(
(account_sid, auth_token),
workspace_sid, taskqueue_sid
)
allow_fetch_subresources = Twilio::JWT::TaskRouterCapability::Policy.new(
Twilio::JWT::TaskRouterCapability::TaskRouterUtils
.all_task_queues(workspace_sid), 'GET', true
)
capability.add_policy(allow_fetch_subresources)
allow_updates = Twilio::JWT::TaskRouterCapability::Policy.new(
Twilio::JWT::TaskRouterCapability::TaskRouterUtils
.all_task_queues(workspace_sid), 'POST', true
)
capability.add_policy(allow_updates)
puts capability.to_s
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.