text stringlengths 1 1.05M |
|---|
# coding: utf-8
# ## Test out UGRID-0.9 compliant unstructured grid model datasets with PYUGRID
# In[12]:
import datetime as dt
import netCDF4
import pyugrid
import matplotlib.tri as tri
import matplotlib.pyplot as plt
import numpy as np
get_ipython().magic(u'matplotlib inline')
# In[13]:
#FVCOM
#url = 'http://comt.sura.org/thredds/dodsC/data/comt_1_archive/inundation_tropical/UND_ADCIRC/Hurricane_Ike_3D_final_run_with_waves
url = 'http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_GOM3_FORECAST.nc'
#url = 'http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_GOM2_FORECAST.nc'
zvar = 'zeta'
#ADCIRC
url = 'http://geoport-dev.whoi.edu/thredds/dodsC/estofs/atlantic/nc4'
#url = 'http://comt.sura.org/thredds/dodsC/data/comt_1_archive/inundation_tropical/UND_ADCIRC/Hurricane_Ike_3D_final_run_with_waves'
zvar = 'zeta'
# SELFE
#url = 'http://comt.sura.org/thredds/dodsC/data/comt_1_archive/inundation_tropical/VIMS_SELFE/Hurricane_Ike_2D_final_run_with_waves'
#url='http://amb6400b.stccmop.org:8080/thredds/dodsC/model_data/forecast.nc'
#zvar = 'elev'
# In[14]:
# Desired time for snapshot
# ....right now (or some number of hours from now) ...
start = dt.datetime.utcnow() + dt.timedelta(hours=6)
# ... or specific time (UTC)
#start = dt.datetime(2013,3,2,15,0,0)
print start
# In[15]:
ug = pyugrid.UGrid.from_ncfile(url)
# What's in there?
print "There are %i nodes"%ug.nodes.shape[0]
#print "There are %i edges"%ug.edges.shape[0]
print "There are %i faces"%ug.faces.shape[0]
# In[ ]:
lon = ug.nodes[:,0]
lat = ug.nodes[:,1]
nv = ug.faces[:]
# In[ ]:
triang = tri.Triangulation(lon,lat,triangles=nv)
# In[ ]:
nc = netCDF4.Dataset(url)
ncv = nc.variables
# Get desired time step
time_var = ncv['time']
print 'number of time steps:',len(time_var)
itime = netCDF4.date2index(start,time_var,select='nearest')
start_time = netCDF4.num2date(time_var[0],time_var.units)
stop_time = netCDF4.num2date(time_var[-1],time_var.units)
print 'start time:',start_time.strftime('%Y-%b-%d %H:%M')
print 'stop time:',stop_time.strftime('%Y-%b-%d %H:%M')
dtime = netCDF4.num2date(time_var[itime],time_var.units)
daystr = dtime.strftime('%Y-%b-%d %H:%M')
print 'time selected:', daystr
# In[ ]:
z = ncv[zvar][itime,:]
# In[ ]:
fig = plt.figure(figsize=(12,12))
levs = np.arange(-1,5,.2)
plt.gca().set_aspect(1./np.cos(lat.mean()*np.pi/180))
plt.tricontourf(triang, z,levels=levs)
plt.colorbar()
plt.tricontour(triang, z, colors='k',levels=levs)
plt.title('%s: Elevation (m): %s' % (nc.title,daystr));
# In[ ]:
# In[ ]:
|
package resolver
import (
"github.com/pkg/errors"
"github.com/smartcontractkit/chainlink/core/chains/evm/types"
)
type ChainType string
const (
ChainTypeArbitrum ChainType = "ARBITRUM"
ChainTypeExChain ChainType = "EXCHAIN"
ChainTypeOptimism ChainType = "OPTIMISM"
ChainTypeXDAI ChainType = "XDAI"
)
func ToChainType(s string) (ChainType, error) {
switch s {
case "arbitrum":
return ChainTypeArbitrum, nil
case "exchain":
return ChainTypeExChain, nil
case "optimism":
return ChainTypeOptimism, nil
case "xdai":
return ChainTypeXDAI, nil
default:
return "", errors.New("invalid chain type")
}
}
type GasEstimatorMode string
const (
GasEstimatorModeBlockHistory GasEstimatorMode = "BLOCK_HISTORY"
GasEstimatorModeFixedPrice GasEstimatorMode = "FIXED_PRICE"
GasEstimatorModeOptimism GasEstimatorMode = "OPTIMISM"
GasEstimatorModeOptimism2 GasEstimatorMode = "OPTIMISM2"
)
func ToGasEstimatorMode(s string) (GasEstimatorMode, error) {
switch s {
case "BlockHistory":
return GasEstimatorModeBlockHistory, nil
case "FixedPrice":
return GasEstimatorModeFixedPrice, nil
case "Optimism":
return GasEstimatorModeOptimism, nil
case "Optimism2":
return GasEstimatorModeOptimism2, nil
default:
return "", errors.New("invalid gas estimator mode")
}
}
type ChainConfigResolver struct {
cfg types.ChainCfg
}
func NewChainConfig(cfg types.ChainCfg) *ChainConfigResolver {
return &ChainConfigResolver{cfg}
}
type KeySpecificChainConfigResolver struct {
address string
ChainConfigResolver
}
func NewKeySpecificChainConfig(address string, cfg types.ChainCfg) *KeySpecificChainConfigResolver {
return &KeySpecificChainConfigResolver{
address: address,
ChainConfigResolver: ChainConfigResolver{cfg: cfg},
}
}
func (r *ChainConfigResolver) BlockHistoryEstimatorBlockDelay() *int32 {
if r.cfg.BlockHistoryEstimatorBlockDelay.Valid {
val := r.cfg.BlockHistoryEstimatorBlockDelay.Int64
intVal := int32(val)
return &intVal
}
return nil
}
func (r *ChainConfigResolver) BlockHistoryEstimatorBlockHistorySize() *int32 {
if r.cfg.BlockHistoryEstimatorBlockHistorySize.Valid {
val := r.cfg.BlockHistoryEstimatorBlockHistorySize.Int64
intVal := int32(val)
return &intVal
}
return nil
}
func (r *ChainConfigResolver) EthTxReaperThreshold() *string {
if r.cfg.EthTxReaperThreshold != nil {
threshold := r.cfg.EthTxReaperThreshold.Duration().String()
return &threshold
}
return nil
}
func (r *ChainConfigResolver) EthTxResendAfterThreshold() *string {
if r.cfg.EthTxResendAfterThreshold != nil {
threshold := r.cfg.EthTxResendAfterThreshold.Duration().String()
return &threshold
}
return nil
}
func (r *ChainConfigResolver) EvmEIP1559DynamicFees() *bool {
if r.cfg.EvmEIP1559DynamicFees.Valid {
return r.cfg.EvmEIP1559DynamicFees.Ptr()
}
return nil
}
func (r *ChainConfigResolver) EvmFinalityDepth() *int32 {
if r.cfg.EvmFinalityDepth.Valid {
val := r.cfg.EvmFinalityDepth.Int64
intVal := int32(val)
return &intVal
}
return nil
}
func (r *ChainConfigResolver) EvmGasBumpPercent() *int32 {
if r.cfg.EvmGasBumpPercent.Valid {
val := r.cfg.EvmGasBumpPercent.Int64
intVal := int32(val)
return &intVal
}
return nil
}
func (r *ChainConfigResolver) EvmGasBumpTxDepth() *int32 {
if r.cfg.EvmGasBumpTxDepth.Valid {
val := r.cfg.EvmGasBumpTxDepth.Int64
intVal := int32(val)
return &intVal
}
return nil
}
func (r *ChainConfigResolver) EvmGasBumpWei() *string {
if r.cfg.EvmGasBumpWei != nil {
value := r.cfg.EvmGasBumpWei.String()
return &value
}
return nil
}
func (r *ChainConfigResolver) EvmGasLimitDefault() *int32 {
if r.cfg.EvmGasLimitDefault.Valid {
val := r.cfg.EvmGasLimitDefault.Int64
intVal := int32(val)
return &intVal
}
return nil
}
func (r *ChainConfigResolver) EvmGasLimitMultiplier() *float64 {
if r.cfg.EvmGasLimitMultiplier.Valid {
return r.cfg.EvmGasLimitMultiplier.Ptr()
}
return nil
}
func (r *ChainConfigResolver) EvmGasPriceDefault() *string {
if r.cfg.EvmGasPriceDefault != nil {
value := r.cfg.EvmGasPriceDefault.String()
return &value
}
return nil
}
func (r *ChainConfigResolver) EvmGasTipCapDefault() *string {
if r.cfg.EvmGasTipCapDefault != nil {
value := r.cfg.EvmGasTipCapDefault.String()
return &value
}
return nil
}
func (r *ChainConfigResolver) EvmGasTipCapMinimum() *string {
if r.cfg.EvmGasTipCapMinimum != nil {
value := r.cfg.EvmGasTipCapMinimum.String()
return &value
}
return nil
}
func (r *ChainConfigResolver) EvmHeadTrackerHistoryDepth() *int32 {
if r.cfg.EvmHeadTrackerHistoryDepth.Valid {
val := r.cfg.EvmHeadTrackerHistoryDepth.Int64
intVal := int32(val)
return &intVal
}
return nil
}
func (r *ChainConfigResolver) EvmHeadTrackerMaxBufferSize() *int32 {
if r.cfg.EvmHeadTrackerMaxBufferSize.Valid {
val := r.cfg.EvmHeadTrackerMaxBufferSize.Int64
intVal := int32(val)
return &intVal
}
return nil
}
func (r *ChainConfigResolver) EvmHeadTrackerSamplingInterval() *string {
if r.cfg.EvmHeadTrackerSamplingInterval != nil {
interval := r.cfg.EvmHeadTrackerSamplingInterval.Duration().String()
return &interval
}
return nil
}
func (r *ChainConfigResolver) EvmLogBackfillBatchSize() *int32 {
if r.cfg.EvmLogBackfillBatchSize.Valid {
val := r.cfg.EvmLogBackfillBatchSize.Int64
intVal := int32(val)
return &intVal
}
return nil
}
func (r *ChainConfigResolver) EvmMaxGasPriceWei() *string {
if r.cfg.EvmMaxGasPriceWei != nil {
value := r.cfg.EvmMaxGasPriceWei.String()
return &value
}
return nil
}
func (r *ChainConfigResolver) EvmNonceAutoSync() *bool {
if r.cfg.EvmNonceAutoSync.Valid {
return r.cfg.EvmNonceAutoSync.Ptr()
}
return nil
}
func (r *ChainConfigResolver) EvmRPCDefaultBatchSize() *int32 {
if r.cfg.EvmRPCDefaultBatchSize.Valid {
val := r.cfg.EvmRPCDefaultBatchSize.Int64
intVal := int32(val)
return &intVal
}
return nil
}
func (r *ChainConfigResolver) FlagsContractAddress() *string {
if r.cfg.FlagsContractAddress.Valid {
value := r.cfg.FlagsContractAddress.String
return &value
}
return nil
}
func (r *ChainConfigResolver) GasEstimatorMode() *GasEstimatorMode {
if r.cfg.GasEstimatorMode.Valid {
value, err := ToGasEstimatorMode(r.cfg.GasEstimatorMode.String)
if err != nil {
return nil
}
return &value
}
return nil
}
func (r *ChainConfigResolver) ChainType() *ChainType {
if r.cfg.ChainType.Valid {
value, err := ToChainType(r.cfg.ChainType.String)
if err != nil {
return nil
}
return &value
}
return nil
}
func (r *ChainConfigResolver) MinIncomingConfirmations() *int32 {
if r.cfg.MinIncomingConfirmations.Valid {
val := r.cfg.MinIncomingConfirmations.Int64
intVal := int32(val)
return &intVal
}
return nil
}
func (r *ChainConfigResolver) MinRequiredOutgoingConfirmations() *int32 {
if r.cfg.MinRequiredOutgoingConfirmations.Valid {
val := r.cfg.MinRequiredOutgoingConfirmations.Int64
intVal := int32(val)
return &intVal
}
return nil
}
func (r *ChainConfigResolver) MinimumContractPayment() *string {
if r.cfg.MinimumContractPayment != nil {
value := r.cfg.MinimumContractPayment.String()
return &value
}
return nil
}
func (r *ChainConfigResolver) OCRObservationTimeout() *string {
if r.cfg.OCRObservationTimeout != nil {
timeout := r.cfg.OCRObservationTimeout.Duration().String()
return &timeout
}
return nil
}
func (r *ChainConfigResolver) KeySpecificConfigs() []*KeySpecificChainConfigResolver {
var resolvers []*KeySpecificChainConfigResolver
for addr, cfg := range r.cfg.KeySpecific {
resolvers = append(resolvers, NewKeySpecificChainConfig(addr, cfg))
}
return resolvers
}
func (r *KeySpecificChainConfigResolver) Address() string {
return r.address
}
|
<filename>src/sort/Boj17862.java
package sort;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.Arrays;
import java.util.StringTokenizer;
/**
*
* @author exponential-e
* 백준 17862번: 나의 학점은?
*
* @see https://www.acmicpc.net/problem/17862/
*
*/
public class Boj17862 {
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
int[] score = new int[50];
for(int i = 0; i < score.length; i++) {
score[i] = Integer.parseInt(st.nextToken());
}
int target = Integer.parseInt(br.readLine());
System.out.println(result(score, target));
}
private static String result(int[] arr, int t) {
int count = 1;
Arrays.sort(arr);
for(int i = arr.length - 1; i >= 0; i--) {
if(arr[i] == t) break;
count++;
}
if(count >= 1 && count <= 5) return "A+";
else if(count >= 6 && count <= 15) return "A0";
else if(count >= 16 && count <= 30) return "B+";
else if(count >= 31 && count <= 35) return "B0";
else if(count >= 36 && count <= 45) return "C+";
else if(count >= 46 && count <= 48) return "C0";
else return "F";
}
}
|
<filename>snapx/snapx/utils/decorators.py
from .decorator import decorator
def nodes_or_number(which_args):
"""PORTED FROM NETWORKX
Decorator to allow number of nodes or container of nodes.
Parameters
----------
which_args : int or sequence of ints
Location of the node arguments in args. Even if the argument is a
named positional argument (with a default value), you must specify its
index as a positional argument.
If more than one node argument is allowed, can be a list of locations.
Returns
-------
_nodes_or_numbers : function
Function which replaces int args with ranges.
Examples
--------
Decorate functions like this::
@nodes_or_number(0)
def empty_graph(nodes):
pass
@nodes_or_number([0,1])
def grid_2d_graph(m1, m2, periodic=False):
pass
@nodes_or_number(1)
def full_rary_tree(r, n)
# r is a number. n can be a number of a list of nodes
pass
"""
@decorator
def _nodes_or_number(func_to_be_decorated, *args, **kw):
# form tuple of arg positions to be converted.
try:
iter_wa = iter(which_args)
except TypeError:
iter_wa = (which_args,)
# change each argument in turn
new_args = list(args)
for i in iter_wa:
n = args[i]
try:
nodes = list(range(n))
except TypeError:
nodes = tuple(n)
else:
if n < 0:
msg = "Negative number of nodes not valid: {}".format(n)
raise sx.SnapXError(msg)
new_args[i] = (n, nodes)
return func_to_be_decorated(*new_args, **kw)
return _nodes_or_number
|
# Define the SQLAlchemy model for Subscription
class Subscription(db.Model):
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
active = db.Column(db.Boolean)
# Implement a route to calculate the total number of active subscriptions
@app.route('/active_subscriptions', methods=['GET'])
def get_active_subscriptions():
total_active_subscriptions = db.session.query(func.count(Subscription.id)).filter(Subscription.active == True).scalar()
return jsonify({'total_active_subscriptions': total_active_subscriptions})
# Log the total number of active subscriptions to the system log file
def log_active_subscriptions():
total_active_subscriptions = db.session.query(func.count(Subscription.id)).filter(Subscription.active == True).scalar()
logger.info(f'Total active subscriptions: {total_active_subscriptions}')
# Call the log_active_subscriptions function to log the count
log_active_subscriptions() |
<reponame>OSADP/C2C-RI
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.fhwa.c2cri.gui;
import java.util.ArrayList;
import javax.swing.JTable;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.table.AbstractTableModel;
import org.fhwa.c2cri.testmodel.NRTM;
import org.fhwa.c2cri.testmodel.OtherRequirement;
import org.fhwa.c2cri.testmodel.OtherRequirementsInterface;
import org.fhwa.c2cri.testmodel.Requirement;
/**
* The Class OtherRequirementsTableModel.
*
* @author TransCore ITS, LLC
* Last Updated: 1/8/2014
*/
public class OtherRequirementsTableModel extends AbstractTableModel implements ListSelectionListener {
/** The Constant ReqID_Col. */
public static final int ReqID_Col = 0;
/** The Constant Text_Col. */
public static final int Text_Col = 1;
/** The Constant Value_Col. */
public static final int Value_Col = 2;
// private OtherRequirements otherRequirements;
/** The nrtm. */
private NRTM nrtm;
/** The other requirements. */
private ArrayList<OtherRequirement> otherRequirements = new ArrayList<OtherRequirement>();
/** The current need id. */
private String currentNeedID;
/** The need list table. */
private JTable needListTable;
/** The requirement list table. */
private JTable requirementListTable;
/** The column names. */
private String[] columnNames = {OtherRequirementsInterface.ReqID_Header,
OtherRequirementsInterface.OtherRequirement_Header,
OtherRequirementsInterface.Value_Header};
/**
* Instantiates a new other requirements table model.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*/
private OtherRequirementsTableModel() {
}
/**
* Instantiates a new other requirements table model.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param nrtm the nrtm
*/
public OtherRequirementsTableModel(NRTM nrtm) {
super();
this.nrtm = nrtm;
// this.otherRequirements = otherRequirements;
}
/* (non-Javadoc)
* @see javax.swing.table.TableModel#getColumnCount()
*/
public int getColumnCount() {
return columnNames.length;
}
/* (non-Javadoc)
* @see javax.swing.table.TableModel#getRowCount()
*/
public int getRowCount() {
// System.out.println(" The number of rows was = " + otherRequirements.otherRequirements.size());
return otherRequirements.size();
}
/* (non-Javadoc)
* @see javax.swing.table.AbstractTableModel#getColumnName(int)
*/
public String getColumnName(int col) {
return columnNames[col];
}
/* (non-Javadoc)
* @see javax.swing.table.TableModel#getValueAt(int, int)
*/
public Object getValueAt(int row, int col) {
OtherRequirement thisRequirement = otherRequirements.get(row);
switch (col) {
case ReqID_Col:
return thisRequirement.getReqID();
case Text_Col:
return thisRequirement.getOtherRequirement();
case Value_Col:
return thisRequirement.getValue();
}
throw new IllegalArgumentException("Illegal column: "
+ col);
}
/*
* JTable uses this method to determine the default renderer/
* editor for each cell. If we didn't implement this method,
* then the last column would contain text ("true"/"false"),
* rather than a check box.
*/
/* (non-Javadoc)
* @see javax.swing.table.AbstractTableModel#getColumnClass(int)
*/
public Class getColumnClass(int c) {
return getValueAt(0, c).getClass();
}
/*
* Don't need to implement this method unless your table's
* editable.
*/
/* (non-Javadoc)
* @see javax.swing.table.AbstractTableModel#isCellEditable(int, int)
*/
public boolean isCellEditable(int row, int col) {
//Note that the data/cell address is constant,
//no matter where the cell appears onscreen.
if (col != 2) {
return false;
} else {
return true;
}
}
/*
* Don't need to implement this method unless your table's
* data can change.
*/
/* (non-Javadoc)
* @see javax.swing.table.AbstractTableModel#setValueAt(java.lang.Object, int, int)
*/
public void setValueAt(Object value, int row, int col) {
if ((row > -1) && (col == Value_Col)) {
otherRequirements.get(row).setValue((String) value);
System.out.println(" Firing Row " + row);
fireTableCellUpdated(row, col);
}
}
/**
* Sets the need list selection table.
*
* @param needTable the new need list selection table
*/
public void setNeedListSelectionTable(JTable needTable) {
this.needListTable = needTable;
}
/**
* Sets the requirement list selection table.
*
* @param requirementTable the new requirement list selection table
*/
public void setRequirementListSelectionTable(JTable requirementTable) {
this.requirementListTable = requirementTable;
}
/* (non-Javadoc)
* @see javax.swing.event.ListSelectionListener#valueChanged(javax.swing.event.ListSelectionEvent)
*/
@Override
public void valueChanged(ListSelectionEvent e) {
// Row selection changed
// int row = e.getLastIndex();
int row;
if (!e.getValueIsAdjusting()) {
if (e.getSource() == needListTable.getSelectionModel()) { // Needs Table
row = needListTable.getSelectedRow();
if (row >= 0) {
ArrayList<OtherRequirement> blankList = new ArrayList<OtherRequirement>();
otherRequirements = blankList;
currentNeedID = nrtm.getUserNeeds().needs.get(row).getTitle();
System.out.println("OtherRequirementsTableModel2: Selected Need " + currentNeedID + " should show 0 requirements.");
this.fireTableDataChanged();
}
} else { // Requirements Table
row = requirementListTable.getSelectedRow();
if ((row >= 0) && (currentNeedID != null) && (!currentNeedID.isEmpty())) {
Requirement thisRequirement = (Requirement)nrtm.getUserNeeds().getNeed(currentNeedID).getProjectRequirements().requirements.get(row);
otherRequirements = new ArrayList(thisRequirement.getOtherRequirements().otherRequirements);
this.fireTableDataChanged();
System.out.println("OtherRequirementsTableModel2: Reqrow=" + row + " Selected Need " + currentNeedID + " and Requirement " + nrtm.getRequirementsList(currentNeedID).get(row) + " should show " + otherRequirements.size() + " otherRequirements.");
} else {
ArrayList<OtherRequirement> blankList = new ArrayList<OtherRequirement>();
otherRequirements = blankList;
this.fireTableDataChanged();
System.out.println("OtherRequirementsTableModel2: Reqrow=" + row + " No Need Selected");
}
}
}
}
}
|
import turtle
# Define a function to get the coordinates of the mouse click
def get_mouse_click_coor(x, y):
# Implement the logic to check if the click is within the boundaries of a state
# If the click is within a state, reveal the name of the state on the map
pass # Placeholder for the actual implementation
# Set up the initial state of the game
screen = turtle.Screen()
screen.title("U.S States Game")
image = "blank_states_img.gif" # Assume the image file "blank_states_img.gif" contains the map of the U.S.
# Load the map image onto the screen
screen.addshape(image)
turtle.shape(image)
# Set up the event listener to capture mouse clicks
turtle.onscreenclick(get_mouse_click_coor)
# Start the game loop to handle user input and display the map
turtle.mainloop() |
# frozen_string_literal: true
require 'rails_helper'
RSpec.feature 'Filter contributors', type: :feature do
let(:user) { create(:user) }
let!(:active_contributor) { create(:contributor, active: true) }
let!(:inactive_contributor) { create(:contributor, active: false) }
let!(:another_contributor) { create(:contributor, active: true) }
scenario 'Editor lists contributors' do
visit contributors_path(as: user)
expect(page).to have_link('Aktiv 2', href: contributors_path)
expect(page).to have_link('Inaktiv 1', href: contributors_path(filter: :inactive))
expect(page).to have_link(nil, href: contributor_path(active_contributor))
expect(page).not_to have_link(nil, href: contributor_path(inactive_contributor))
click_on 'Inaktiv'
expect(page).to have_link(nil, href: contributor_path(inactive_contributor))
expect(page).not_to have_link(nil, href: contributor_path(active_contributor))
end
scenario 'Editor views profile of an active contributor' do
visit contributor_path(active_contributor, as: user)
expect(page).to have_link(nil, href: contributor_path(active_contributor))
expect(page).not_to have_link(nil, href: contributor_path(inactive_contributor))
end
scenario 'Editor views profile of an inactive contributor' do
visit contributor_path(inactive_contributor, as: user)
expect(page).to have_link(nil, href: contributor_path(active_contributor))
expect(page).to have_link(nil, href: contributor_path(inactive_contributor))
end
end
|
import chai from 'chai'
import jsdom from 'jsdom'
import Enzyme from 'enzyme'
import Adapter from 'enzyme-adapter-react-16'
Enzyme.configure({ adapter: new Adapter() })
// Use except
global.expect = chai.expect
// JsDom browser
const { JSDOM } = jsdom;
const { document } = (new JSDOM('')).window;
global.document = document;
global.window = document.defaultView
global.navigator = global.window.navigator
|
#!/bin/bash
# Copyright 2018 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This shell script is used to build a cluster and create a namespace from our
# argo workflow
set -o errexit
set -o nounset
set -o pipefail
CLUSTER_NAME="${CLUSTER_NAME}"
ZONE="${GCP_ZONE}"
PROJECT="${GCP_PROJECT}"
GO_DIR=${GOPATH}/src/github.com/${REPO_OWNER}/${REPO_NAME}
echo "Activating service-account"
gcloud auth activate-service-account --key-file=${GOOGLE_APPLICATION_CREDENTIALS}
echo "Configuring kubectl"
echo "CLUSTER_NAME: ${CLUSTER_NAME}"
echo "ZONE: ${GCP_ZONE}"
echo "PROJECT: ${GCP_PROJECT}"
gcloud --project ${PROJECT} container clusters get-credentials ${CLUSTER_NAME} \
--zone ${ZONE}
kubectl config set-context $(kubectl config current-context) --namespace=default
USER=`gcloud config get-value account`
echo "All Katib components are running."
kubectl version
kubectl cluster-info
echo "Katib deployments"
kubectl -n kubeflow get deploy
echo "Katib services"
kubectl -n kubeflow get svc
echo "Katib pods"
kubectl -n kubeflow get pod
cd ${GO_DIR}/test/e2e/v1alpha3
echo "Running e2e test for never resume experiment"
export KUBECONFIG=$HOME/.kube/config
./run-e2e-experiment ../../../examples/v1alpha3/never-resume-example.yaml
kubectl -n kubeflow describe suggestion never-resume-example
kubectl -n kubeflow describe experiment never-resume-example
kubectl -n kubeflow delete experiment never-resume-example
exit 0
|
from __future__ import annotations
import typing
from datetime import datetime
import strawberry
from db import connect
@strawberry.type
class Game:
id: str
home: str
home_score: int
visitor_score: int
visitor: str
quarter: int
date: datetime
@strawberry.type
class Acca:
id: str
user: User
name: str
bets: typing.List[Bet]
@strawberry.type
class User:
username: str
@strawberry.type
class Bet:
spread: float
bet_on: str
home_bet: bool
net_score: int
game_id: str
@connect
def get_accas(driver, week, year):
""" Get a list of accas from neo4j """
with driver.session() as session:
data = session.run(
"""
MATCH (w:Week {week: $week, year: $year})
MATCH (w)-[:HAS_GAME]->(g:Game)
MATCH (h:Team)<-[:HOME_TEAM]-(g)
MATCH (g)<-[:BET_PLACED]-(b:Bet)<-[:HAS_BET]-(a:Acca)
MATCH (a)-[:HAS_USER]->(u:User)
MATCH (b)-[:BET_ON]->(t:Team)
WITH a, u, {
spread: b.spread,
bet_on: t.abbreviation,
home_bet: t.abbreviation = h.abbreviation,
net_score: toInteger(g.home_score) - toInteger(g.visitor_score),
game_id: g.id
} as bet
RETURN a, u, COLLECT(bet) AS bets
""",
week=week,
year=year
).data()
return [
Acca(
**x['a'],
user=User(**x['u']),
bets=[Bet(**b) for b in x['bets']]
) for x in data
]
@connect
def get_games(driver, week, year):
""" Get a list of games from neo4j """
with driver.session() as session:
data = session.run(
"""
MATCH (w:Week {week: $week, year: $year})
MATCH (w)-[:HAS_GAME]->(g:Game)
MATCH (h:Team)<-[:HOME_TEAM]-(g)-[:VISITOR_TEAM]->(v:Team)
RETURN g, h, v
""",
week=week,
year=year
).data()
return [
Game(
**x['g'],
home=x['h']['abbreviation'],
visitor=x['v']['abbreviation']
) for x in data
]
@strawberry.type
class Query:
@strawberry.field
def games(self, week: int, year: int) -> typing.List[Game]:
return get_games(week, year)
@strawberry.field
def accas(self, week: int, year: int) -> typing.List[Acca]:
return get_accas(week, year)
schema = strawberry.Schema(
query=Query
)
|
python3 pMHCpan_v2.py \
--input-train train_v4_el_single_HLA_9AA_0.txt.gz \
--input-validate train_v4_el_single_HLA_9AA_1.txt.gz \
--hidden-size1 800 --hidden-size2 400 -L 1 \
--olabel split0_9AA_w_pep_len_Aug24_wsun \
-e 5 --n_iter 5 --save_validate_pred \
> logfiles/pMHCpan_v2_800_split0_9AA_w_pep_len_Aug24_wsun.log &
python3 pMHCpan_v2.py \
--input-train train_v4_el_single_HLA_9AA_0.txt.gz \
--input-validate train_v4_el_single_HLA_9AA_1.txt.gz \
--hidden-size1 800 --hidden-size2 400 -L 1 \
--olabel split0_9AA_w_pep_len_8to11_Aug28_wsun \
-e 5 --n_iter 5 --save_validate_pred \
> logfiles/pMHCpan_v2_800_split0_9AA_w_pep_len__8to11_Aug28_wsun.log &
|
package org.kalima.kalimaandroidexample;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.TextView;
import org.kalima.androidlib.general.KMsgParcelable;
import java.util.ArrayList;
public class CacheOverviewAdapter extends BaseAdapter {
ArrayList<KMsgParcelable> messages;
LayoutInflater inflater;
public CacheOverviewAdapter(Context context, ArrayList<KMsgParcelable> messages) {
this.messages = messages;
this.inflater = LayoutInflater.from(context);
}
@Override
public int getCount() {
return messages.size();
}
@Override
public Object getItem(int position) {
return messages.get(position);
}
@Override
public long getItemId(int position) {
return 0;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
convertView = inflater.inflate(R.layout.listview_cache_overview, null);
TextView key = convertView.findViewById(R.id.tv_key);
TextView body = convertView.findViewById(R.id.tv_body);
key.setText(messages.get(position).getKey());
body.setText(new String(messages.get(position).getBody()));
return convertView;
}
public void setMessages(ArrayList<KMsgParcelable> messages) {
this.messages.clear();
this.messages.addAll(messages);
this.notifyDataSetChanged();
}
}
|
<reponame>unitasglobal/scalr
Scalr.regPage('Scalr.ui.core.disaster', function (loadParams, moduleParams) {
var pbar2 = Ext.create('Ext.ProgressBar', {
text:'Executing random scripts on your servers...',
id:'pbar2',
cls:'left-align',
style: {
margin: 20
}
});
var eCnt = 0;
var panel = Ext.create('Ext.form.Panel', {
width: 700,
title: 'Infrastructure disaster status',
bodyPadding: 5,
fieldDefaults: {
anchor: '100%',
labelWidth: 130
},
items: [{
xtype: 'displayfield',
hideLabel: true,
value: '<br /><br />'
}, pbar2, {
xtype: 'displayfield',
hideLabel: true,
value: '<br /><br />'
}],
dockedItems: [{
xtype: 'container',
cls: 'x-docked-buttons',
dock: 'bottom',
layout: {
type: 'hbox',
pack: 'center'
},
items: [{
xtype: 'button',
text: 'Abort',
handler: function() {
eCnt++;
if (eCnt > 2)
Scalr.message.Error('Not yet implemented // Guys, I know this is a joke for April Fool\'s, but this is actually dangerous code and can screw things up badly for people.');
else if (eCnt == 2)
Scalr.message.Error('Service temporary unavailable. Please try again.');
else
Scalr.message.Error('Unable to process your request at the moment. Please try again.');
}
}]
}]
});
var items = [
'Removing backups',
'Corrupting volumes',
'Removing row(s) from user tables',
'Reticulating splines',
'Publishing SSH keys on 4chan',
'Redirecting traffic to bit.ly/amazing_horse'
];
var Runner = function(){
var f = function(v, pbar, btn, count, cb){
return function(){
if(v > count){
cb();
}else{
var n = parseInt(v / 50);
pbar.updateProgress(v/count, items[n]);
}
};
};
return {
run : function(pbar, btn, count, cb) {
var ms = 50;
for(var i = 1; i < (count+2); i++){
setTimeout(f(i, pbar, btn, count, cb), i*ms);
}
}
};
}();
Runner.run(pbar2, null, parseInt(items.length)*50, function() {
pbar2.reset();
pbar2.updateText('$899 has been charged to your card ending in **** 6001. Thank you for your business!');
});
return panel;
});
|
/*
* Copyright (c) 2006-2007, AIOTrade Computing Co. and Contributors
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* o Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* o Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* o Neither the name of AIOTrade Computing Co. nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.aiotrade.modules.ui.options.general;
import java.awt.Image;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import org.netbeans.spi.options.OptionsCategory;
import org.netbeans.spi.options.OptionsPanelController;
import org.openide.util.ImageUtilities;
import org.openide.util.NbBundle;
/**
*
* @author <NAME>
*/
public final class GeneralOptionsCategory extends OptionsCategory {
@Override
public Icon getIcon() {
Image image = ImageUtilities.loadImage("org/netbeans/modules/options/resources/generalOptions.png");
return new ImageIcon(image);
}
public String getCategoryName() {
return loc("CTL_General_Category_Name");
}
public String getTitle() {
return loc("CTL_General_Title");
}
public OptionsPanelController create() {
return new GeneralOptionsPanelController();
}
private static String loc(String key) {
return NbBundle.getMessage(GeneralOptionsCategory.class, key);
}
public String getDisplayName() {
return loc("CTL_General_Title");
}
public String getTooltip() {
return loc("CTL_General_Title");
}
}
|
#pragma once
#include "rx.h"
#include <memory>
#include <utility>
namespace windberry {
namespace rx {
template <typename Clock, typename Observable>
auto throttle_progress(Clock get_now, Observable o) {
using Time = typename function_traits<Clock>::result_type;
struct last_state {
float progress = 0;
Time update_time = 0;
};
std::shared_ptr<last_state> last = std::make_shared<last_state>();
return o.bind([last = std::move(last), get_now](float p){
if (p > last->progress + 0.002f) { // FIXME default frequency classes? (UI progress, frame rate, ...)
Time now = get_now();
if (now > last->update_time + 0.01f) {
last->progress = p;
last->update_time = now;
return pure_observable(p).any();
}
}
return empty_observable<float>().any();
});
}
}
}
|
#!/bin/bash
# TMPDIR
if [ -d '/tmpfs' ]; then TMPDIR='/tmpfs'; else TMPDIR='/tmp'; fi
if [ -z "${LOGTO:-}" ]; then LOGTO="${TMPDIR}/${0##*/}.log"; fi
## timezone
set_timezone()
{
TZ=${1}
ZONEINFO="/usr/share/zoneinfo/${TZ}"
if [ -e "${ZONEINFO}" ]; then
cp "${ZONEINFO}" /etc/localtime
if [ "${DEBUG}" == 'true' ]; then echo "??? DEBUG -- $0 $$ -- zoneinfo: ${ZONEINFO}" >> ${LOGTO} 2>&1; fi
else
echo "+++ WARN $0 $$ -- cannot locate time zone: ${TZ}" >> ${LOGTO} 2>&1
ZONEINFO=
fi
echo "${ZONEINFO}"
}
## Bus 001 Device 004: ID 046d:0821 Logitech, Inc. HD Webcam C910
## Bus 001 Device 004: ID 1415:2000 Nam Tai E&E Products Ltd. or OmniVision Technologies, Inc. Sony Playstation Eye
###
### FUNCTIONS
###
source /usr/bin/motion-tools.sh
source /usr/bin/service-tools.sh
###
### MAIN
###
## initialize horizon
hzn_init
## configure service
CONFIG_SERVICES='[{"name":"cpu","url":"http://cpu"},{"name":"mqtt","url":"http://mqtt"},{"name":"hal","url":"http://hal"}]'
CONFIG_MQTT='{"host":"'${MQTT_HOST:-}'","port":'${MQTT_PORT:-1883}',"username":"'${MQTT_USERNAME:-}'","password":"'${MQTT_PASSWORD:-}'"}'
CONFIG_MOTION='{"post_pictures":"'${MOTION_POST_PICTURES:-best}'","locate_mode":"'${MOTION_LOCATE_MODE:-off}'","event_gap":'${MOTION_EVENT_GAP:-60}',"framerate":'${MOTION_FRAMERATE:-5}',"threshold":'${MOTION_THRESHOLD:-5000}',"threshold_tune":'${MOTION_THRESHOLD_TUNE:-false}',"noise_level":'${MOTION_NOISE_LEVEL:-0}',"noise_tune":'${MOTION_NOISE_TUNE:-false}',"log_level":'${MOTION_LOG_LEVEL:-9}',"log_type":"'${MOTION_LOG_TYPE:-all}'"}'
CONFIG_SERVICE='{"log_level":"'${LOG_LEVEL:-}'","debug":'${DEBUG:-false}',"group":"'${MOTION_GROUP:-}'","device":"'$(motion_device)'","timezone":"'$(set_timezone ${MOTION_TIMEZONE:-})'","services":'"${CONFIG_SERVICES}"',"mqtt":'"${CONFIG_MQTT}"',"motion":'"${CONFIG_MOTION}"'}'
## initialize servive
service_init ${CONFIG_SERVICE}
## initialize motion
motion_init ${CONFIG_MOTION}
## start motion
motion_start
if [ "${DEBUG}" == 'true' ]; then echo "--- INFO -- $0 $$ -- motion started; PID:" $(motion_pid) >> ${LOGTO} 2>&1; fi
## start motion watchdog
motion_watchdog
if [ "${DEBUG}" == 'true' ]; then echo "--- INFO -- $0 $$ -- motion watchdog started" >> ${LOGTO} 2>&1; fi
## initialize
OUTPUT_FILE="${TMPDIR}/${0##*/}.${SERVICE_LABEL}.$$.json"
echo '{"timestamp":"'$(date -u +%FT%TZ)'","date":"'$(date +%s)'"}' > "${OUTPUT_FILE}"
## set directory to watch
DIR=/var/lib/motion
## forever
while true; do
# update service
service_update ${OUTPUT_FILE}
if [ "${DEBUG}" == 'true' ]; then echo "--- INFO -- $0 $$ -- waiting on directory: ${DIR}" >> ${LOGTO} 2>&1; fi
# wait (forever) on changes in ${DIR}
inotifywait -m -r -e close_write --format '%w%f' "${DIR}" | while read FULLPATH; do
if [ "${DEBUG}" == 'true' ]; then echo "--- INFO -- $0 $$ -- inotifywait ${FULLPATH}" >> ${LOGTO} 2>&1; fi
if [ ! -z "${FULLPATH}" ]; then
# process updates
case "${FULLPATH##*/}" in
*-*-*.json)
if [ -s "${FULLPATH}" ]; then
OUT=$(jq '.' "${FULLPATH}")
if [ -z "${OUT}" ]; then OUT='null'; fi
# don't update always
if [ "${MOTION_POST_PICTURES}" == 'all' ]; then
jq '.motion.image='"${OUT}" "${OUTPUT_FILE}" > "${OUTPUT_FILE}.$$" && mv -f "${OUTPUT_FILE}.$$" "${OUTPUT_FILE}"
IMAGE_PATH="${FULLPATH%.*}.jpg"
if [ -s "${IMAGE_PATH}" ]; then
IMG_B64_FILE="${TMPDIR}/${IMAGE_PATH##*/}"; IMG_B64_FILE="${IMG_B64_FILE%.*}.b64"
base64 -w 0 "${IMAGE_PATH}" | sed -e 's|\(.*\)|{"motion":{"image":{"base64":"\1"}}}|' > "${IMG_B64_FILE}"
fi
fi
else
echo "+++ WARN $0 $$ -- no content in ${FULLPATH}; continuing..." >> ${LOGTO} 2>&1
continue
fi
if [ "${MOTION_POST_PICTURES}" != 'all' ]; then
if [ "${DEBUG}" == 'true' ]; then echo "??? DEBUG -- $0 $$ -- ${FULLPATH}: posting ONLY ${MOTION_POST_PICTURES} picture; continuing..." >> ${LOGTO} 2>&1; fi
continue
fi
;;
*-*.json)
if [ -s "${FULLPATH}" ]; then
OUT=$(jq '.' "${FULLPATH}")
if [ -z "${OUT}" ]; then OUT='null'; fi
if [ "${DEBUG}" == 'true' ]; then echo "??? DEBUG -- $0 $$ -- EVENT:" $(echo "${OUT}" | jq -c .) >> ${LOGTO} 2>&1; fi
else
echo "+++ WARN $0 $$ -- EVENT: no content in ${FULLPATH}" >> ${LOGTO} 2>&1
continue
fi
# test for end
IMAGES=$(jq -r '.images[]?' "${FULLPATH}")
if [ -z "${IMAGES}" ] || [ "${IMAGES}" == 'null' ]; then
if [ "${DEBUG}" == 'true' ]; then echo "??? DEBUG -- $0 $$ -- ${FULLPATH}: EVENT start; continuing..." >> ${LOGTO} 2>&1; fi
continue
else
if [ "${DEBUG}" == 'true' ]; then echo "??? DEBUG -- $0 $$ -- ${FULLPATH}: EVENT end" >> ${LOGTO} 2>&1; fi
# update event
jq '.motion.event='"${OUT}" "${OUTPUT_FILE}" > "${OUTPUT_FILE}.$$" && mv -f "${OUTPUT_FILE}.$$" "${OUTPUT_FILE}"
if [ "${DEBUG}" == 'true' ]; then echo "??? DEBUG -- $0 $$ -- EVENT: updated ${OUTPUT_FILE} with event JSON:" $(echo "${OUT}" | jq -c) >> ${LOGTO} 2>&1; fi
# check for GIF
IMAGE_PATH="${FULLPATH%.*}.gif"
if [ -s "${IMAGE_PATH}" ]; then
GIF_B64_FILE="${TMPDIR}/${IMAGE_PATH##*/}"; GIF_B64_FILE="${GIF_B64_FILE%.*}.b64"
if [ "${DEBUG}" == 'true' ]; then echo "??? DEBUG -- $0 $$ -- EVENT: found GIF: ${IMAGE_PATH}; creating ${GIF_B64_FILE}" >> ${LOGTO} 2>&1; fi
base64 -w 0 "${IMAGE_PATH}" | sed -e 's|\(.*\)|{"motion":{"event":{"base64":"\1"}}}|' > "${GIF_B64_FILE}"
fi
rm -f "${IMAGE_PATH}"
# find posted picture
POSTED_IMAGE_JSON=$(jq -r '.image?' "${FULLPATH}")
if [ ! -z "${POSTED_IMAGE_JSON}" ] && [ "${POSTED_IMAGE_JSON}" != 'null' ]; then
PID=$(echo "${POSTED_IMAGE_JSON}" | jq -r '.id?')
if [ ! -z "${PID}" ] && [ "${PID}" != 'null' ]; then
IMAGE_PATH="${FULLPATH%/*}/${PID}.jpg"
if [ -s "${IMAGE_PATH}" ]; then
IMG_B64_FILE="${TMPDIR}/${IMAGE_PATH##*/}"; IMG_B64_FILE="${IMG_B64_FILE%.*}.b64"
base64 -w 0 "${IMAGE_PATH}" | sed -e 's|\(.*\)|{"motion":{"image":{"base64":"\1"}}}|' > "${IMG_B64_FILE}"
fi
fi
rm -f "${IMAGE_PATH}"
# update output to posted image
jq '.motion.image='"${POSTED_IMAGE_JSON}" "${OUTPUT_FILE}" > "${OUTPUT_FILE}.$$" && mv -f "${OUTPUT_FILE}.$$" "${OUTPUT_FILE}"
fi
# cleanup
find "${FULLPATH%/*}" -name "${FULLPATH%.*}*" -print | xargs rm -f
fi
;;
*)
if [ "${DEBUG}" == 'true' ]; then echo "??? DEBUG -- $0 $$ -- ${FULLPATH}; continuing..." >> ${LOGTO} 2>&1; fi
continue
;;
esac
else
if [ "${DEBUG}" == 'true' ]; then echo "??? DEBUG -- $0 $$ -- timeout" >> ${LOGTO} 2>&1; fi
fi
# merge image base64 iff exists
if [ ! -z "${IMG_B64_FILE:-}" ] && [ -s "${IMG_B64_FILE}" ]; then
if [ "${DEBUG}" == 'true' ]; then echo "??? DEBUG -- $0 $$ -- found ${IMG_B64_FILE}" >> ${LOGTO} 2>&1; fi
jq -s 'reduce .[] as $item ({}; . * $item)' "${OUTPUT_FILE}" "${IMG_B64_FILE}" > "${OUTPUT_FILE}.$$" && mv "${OUTPUT_FILE}.$$" "${OUTPUT_FILE}"
rm -f "${IMG_B64_FILE}"
IMG_B64_FILE=
fi
# merge GIF base64 iff exists
if [ ! -z "${GIF_B64_FILE:-}" ] && [ -s "${GIF_B64_FILE}" ]; then
if [ "${DEBUG}" == 'true' ]; then echo "??? DEBUG -- $0 $$ -- found ${GIF_B64_FILE}" >> ${LOGTO} 2>&1; fi
jq -s 'reduce .[] as $item ({}; . * $item)' "${OUTPUT_FILE}" "${GIF_B64_FILE}" > "${OUTPUT_FILE}.$$" && mv "${OUTPUT_FILE}.$$" "${OUTPUT_FILE}"
rm -f "${GIF_B64_FILE}"
GIF_B64_FILE=
fi
# update output
service_update ${OUTPUT_FILE}
done
done
exit 1
|
class HTTPRequestHandler:
def process_post_body(self, headers, rfile):
content_len = int(headers.get('Content-Length')) # Extract content length from headers
post_body = str(rfile.read(content_len), 'utf-8').strip() # Read and decode the post body
return post_body # Return the extracted content |
# Copyright 2016 The dev Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
cmd_demo1() {
# ./.git/ close_write,CLOSE index.lock
# Pool performance
while inotifywait -r -e close_write --exclude "./\.git/" ./; do
# Can not obtain the output of inotifywait.
# 无法获取inotifywait的输出。
# Unable to get the output of inotifywait.
echo "changed"
sleep 3
echo "end"
done
}
cmd_demo2() {
# ./.git/ close_write,CLOSE index.lock
# The -m flag monitors the file, instead of exiting on the first event.
while inotifywait -m -r -e close_write --exclude "./\.git/" ./; do
# Nothing catch here.
# 没有什么在这里。
# Nothing is here.
# Nothing catched here.
# 这里什么都没有。
# Nothing here.
# Unable to catch the output from inotifywait.
# 无法捕获inotifywait的输出。
# Unable to capture the output of inotifywait.
# Unable to obtain the output of inotifywait.
# 无法获取inotifywait的输出。
# Unable to get the output of inotifywait.
echo "changed"
sleep 3
echo "end"
done
}
cmd_demo3() {
# ./.git/ close_write,CLOSE index.lock
# The -m flag monitors the file, instead of exiting on the first event.
inotifywait -m -r -e close_write --exclude "./\.git/" ./ | while read dir event file; do
echo "changed"
sleep 3
echo $dir
echo "end"
done
}
cmd_demo4() {
# ./.git/ close_write,CLOSE index.lock
while inotifywait -r -e close_write,delete,move --exclude "\./(\.git|.*/target)/" ./; do
echo "changed"
done
}
|
import cors from '@koa/cors';
import bodyParser from 'koa-bodyparser';
import staticData from 'koa-static';
import authMiddleware from '../middlewares/auth.middleware';
import apiRoutes from './api';
export default (app) => {
// Error handler
app.use(async (ctx, next) => {
try {
await next();
} catch (err) {
console.log('ErrorHandler: ', err);
ctx.status = err.status || 500;
ctx.body = {
message: err.message,
errors: err.errors
};
}
});
app.use(cors());
app.use(bodyParser());
app.use(async (ctx, next) => {
console.log('------------------------------------------');
console.log('Href: ', ctx.href);
// console.log('Query: ', ctx.query);
// console.log('Params: ', ctx.params);
// console.log('Body: ', ctx.request.body);
// console.log('Headers: ', ctx.request.headers);
await next();
});
app.use(authMiddleware());
app.use(apiRoutes());
app.use(staticData(__dirname + '/../../public'));
// Page not found handler
app.use(async ctx => {
ctx.status = 404;
ctx.body = 'Page not found';
});
}
|
<reponame>Kun-a-Kun/Algorithms-Fourth-Edition-Exercises<filename>src/Chapter1_2Text/Accumulator.java
package Chapter1_2Text;
import edu.princeton.cs.algs4.StdIn;
public class Accumulator {
private double m;
private double s;
private int N;
public void addDataValue(double x) {
N++;
s = s + 1.0 * (N - 1) / N * (x - m) * (x - m);
m = m + (x - m) / N;
}
public double mean() {
return m;
}
public double var() {
return s / (N - 1);
}
public double stddev() {
return Math.sqrt(this.var());
}
public static void main(String[] args) {
Accumulator accumulator = new Accumulator();
while (!StdIn.isEmpty()) {
accumulator.addDataValue(StdIn.readDouble());
}
System.out.println("Mean: " + accumulator.mean());
System.out.println("Var: " + accumulator.var());
System.out.println("Stddev: " + accumulator.stddev());
}
}
|
import java.io.*;
import java.net.*;
import java.nio.*;
import java.util.*;
/*
* Primitive TCP Tagging java client for OpenViBE 1.2.x
*
* @author <NAME> & <NAME> / Inria
* @date 25.Jan.2019
* @version 0.1
* @todo Add error handling
*/
class StimulusSender
{
Socket m_clientSocket;
DataOutputStream m_outputStream;
// Open connection to Acquisition Server TCP Tagging
boolean open(String host, Integer port) throws Exception
{
m_clientSocket = new Socket(host, port);
m_outputStream = new DataOutputStream(m_clientSocket.getOutputStream());
return true;
}
// Close connection
boolean close() throws Exception
{
m_clientSocket.close();
return true;
}
// Send stimulation with a timestamp.
boolean send(Long stimulation, Long timestamp) throws Exception
{
ByteBuffer b = ByteBuffer.allocate(24);
b.order(ByteOrder.LITTLE_ENDIAN); // Assumes AS runs on LE architecture
b.putLong(0); // Not used
b.putLong(stimulation); // Stimulation id
b.putLong(timestamp); // Timestamp: 0 = immediate
m_outputStream.write(b.array());
return true;
}
public static void main(String argv[]) throws Exception
{
StimulusSender sender1 = new StimulusSender();
StimulusSender sender2 = new StimulusSender();
String client1 = "127.0.0.1";
String client2 = "172.16.17.32";
Long startMarker = 1111L;
Long endMarker = 1234L;
Scanner in = new Scanner(System.in);
int choice;
boolean isConnected = false, enableSecondClient = false;
do {
System.out.print("\n\nTrigger for OpenViBE.\n********************* \n\n1)Connect\n2)Start Trigger\n3)End Trigger\n4)Disconnect\n5)Enable Second Client\n6)Exit. ");
if(isConnected) {
System.out.println("\nClient 1 connected at "+client1 + ".\n"
if(enableSecondClient){
System.out.println("Client 2 conneted at "+client2+".\n");
}
}
System.out.println("\n\nEnter Selection : ");
choice = in.nextInt();
if (choice == 1) {
System.out.print("\nOpening port for Client 1");
sender1.open(client1, 15361);
System.out.print("\n############Client 1 Connected at "+ client1 + ".############\n");
if(enableSecondClient) {
System.out.print("\nOpening port for Client 2");
sender2.open("172.16.17.32", 15361);
System.out.print("\n############Client 2 Connected at "+ client2 + ".############\n");
}
isConnected = true;
} else if (choice == 2) {
if(!isConnected) {
System.out.println("\nPlease Connect to server first.");
continue;
}
System.out.print("\nSending Start marker..\n ");
// Send identity of the event (stimulation id), time of occurrence.
// The preferred mechanism is to use time '0' and call the send()
// function immediately after each event has been rendered/played.
//sender.send(278L, 0L); // Some event
sender1.send(startMarker, 0L); // Another one...
if(enableSecondClient) {
sender2.send(startMarker, 0L); // Another one...
}
System.out.print("\nSuccess! Look for "+startMarker+" in csv file.");
// etc ...
// To verify that the stimulations are received correctly by
// AS, set LogLevel to Trace in 'openvibe.conf' before running AS.
// Note that instead of stamp=0, AS may print the stamp it replaces
// the 0 with. Finally, network-acquisition.xml (in box-tutorials/)
// scenario can be used to display the events in Designer as combined
// with the signal, for example using the Generic Oscillator driver
// in AS.
} else if (choice == 3) {
if(!isConnected) {
System.out.println("\nPlease Connect to server first.");
continue;
}
System.out.print("\nSending End marker.. \n");
sender1.send(endMarker, 0L);
if(enableSecondClient) {
sender2.send(endMarker, 0L); // Another one...
}
System.out.print("\nSuccess! Look for "+endMarker+" in csv file.");
} else if (choice == 4) {
if(!isConnected) {
System.out.println("\nPlease Connect to server first.");
continue;
}
sender1.close();
if(enableSecondClient) {
sender2.close();
}
isConnected = false;
System.out.print("\nTerminated all connections");
} else if (choice == 5) {
enableSecondClient = true;
System.out.print("\nSecond Client Enabled.\n");
}
} while(!(choice > 5 || choice < 1));
if(isConnected) {
sender1.close();
if(enableSecondClient) {
sender2.close();
}
}
System.out.print("\nCheers");
}
}
|
#ifndef CONFETTI_PAL_H
#define CONFETTI_PAL_H
/* This is adapted from the confetti routine created by <NAME> */
/* Usage - confetti_pal();
*
* thisfade
* thisdelay
* currentPalette and targetPalette
* thisdiff
* thisindex
* thisinc
* thisbright
*/
void confetti_pal() { // random colored speckles that blink in and fade smoothly
// EVERY_N_SECONDS(5) {
// SetupSimilar4Palette();
// }
fadeToBlackBy(leds, NUM_LEDS, thisfade);
int pos = random8(NUM_LEDS); // Pick an LED at random.
leds[pos] = ColorFromPalette(currentPalette, thisindex + random8(thisdiff)/4 , thisbright, currentBlending); // Munge the values and pick a colour from the palette
thisindex = thisindex + thisinc; // base palette counter increments here.
} // confetti_pal()
#endif
|
<reponame>adligo/models_core.adligo.org<gh_stars>0
package org.adligo.models.core.shared;
import org.adligo.i.util.shared.I_Immutable;
import org.adligo.i.util.shared.StringUtils;
public class PhoneNumber implements I_Validateable, I_PhoneNumber, I_Immutable
{
public static final String PHONE_NUMBER = "PhoneNumber";
public static final String SET_NUMBER = "setNumber";
private static final String DIGITS = "0123456789";
protected String number;
public PhoneNumber() {}
public PhoneNumber(String number) throws InvalidParameterException {
try {
setNumber(number);
} catch (InvalidParameterException e) {
throw new InvalidParameterException(e.getMessage(), PHONE_NUMBER, e);
}
}
public PhoneNumber(I_PhoneNumber p) throws InvalidParameterException {
try {
setNumber(p.getNumber());
} catch (InvalidParameterException e) {
throw new InvalidParameterException(e.getMessage(), PHONE_NUMBER, e);
}
}
/* (non-Javadoc)
* @see org.adligo.models.core.client.I_PhoneNumber#getNumber()
*/
public String getNumber() {
return number;
}
private void setNumber(String p) throws InvalidParameterException {
if (StringUtils.isEmpty(p)) {
throw new InvalidParameterException(ModelsCoreConstantsObtainer.getConstants()
.getPhoneEmptyError(),SET_NUMBER);
}
char [] chars = p.toCharArray();
for (int i = 0; i < chars.length; i++) {
char c = chars[i];
int index = DIGITS.indexOf(c);
if (index == -1) {
throw new InvalidParameterException(ModelsCoreConstantsObtainer.getConstants()
.getPhoneInvalidCharacterError(),SET_NUMBER);
}
}
number = p;
}
public void isValid() throws ValidationException {
try {
new PhoneNumber(this);
} catch (InvalidParameterException e) {
throw new ValidationException(e.getMessage(), I_Validateable.IS_VALID, e);
}
}
public int hashCode() {
if (number == null) {
return 0;
}
return number.hashCode();
}
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof I_PhoneNumber) {
I_PhoneNumber other = (I_PhoneNumber) obj;
if (number == null) {
if (other.getNumber() != null)
return false;
} else if (!number.equals(other.getNumber()))
return false;
return true;
}
return false;
}
public String toString() {
return number;
}
public String getImmutableFieldName() {
return "number";
}
}
|
#!/usr/bin/env bash
# This script is for releasing the Orange Judge web application.
# Currently it is used by Travis-CI.
# Files in target/release will be uploaded into a Google Cloud Storage after each compilation of master branch.
if [ -d "target/release" ]; then
echo "Release directory exists, delete it."
rm -rf target/release
fi
echo "Create release directory."
mkdir target/release
echo "Copy files to release directory."
# Copy install script
cp scripts_deploy/install.sh target/release
# Copy packaged zip
cp target/universal/*.zip target/release
# Delete share folder
zip --delete target/release/*.zip oj_web-\*/share/\*
# Create latest zip
cp target/release/*.zip target/release/oj_web-LASTEST.zip
echo "Release process finished."
|
alias sv="cd $HOME/scarfvim/"
export SV="$HOME/scarfvim"
export SVC="$SV/configs"
|
#!/bin/bash
# Copyright 2016 - 2018 Crunchy Data Solutions, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
source ${CCPROOT}/examples/common.sh
echo_info "Cleaning up.."
${CCP_CLI?} delete --namespace=${CCP_NAMESPACE?} pod pg-primary pg-replica
${CCP_CLI?} delete --namespace=${CCP_NAMESPACE?} pod pgbouncer-primary pgbouncer-replica
${CCP_CLI?} delete --namespace=${CCP_NAMESPACE?} service pg-primary pg-replica
${CCP_CLI?} delete --namespace=${CCP_NAMESPACE?} service pgbouncer-primary pgbouncer-replica
${CCP_CLI?} delete --namespace=${CCP_NAMESPACE?} secret pgbouncer-secrets
${CCP_CLI?} delete --namespace=${CCP_NAMESPACE?} secret pgsql-secrets
$CCPROOT/examples/waitforterm.sh pgbouncer-primary ${CCP_CLI?}
$CCPROOT/examples/waitforterm.sh pgbouncer-replica ${CCP_CLI?}
$CCPROOT/examples/waitforterm.sh pg-primary ${CCP_CLI?}
$CCPROOT/examples/waitforterm.sh pg-replica ${CCP_CLI?}
|
(function(){
return {
dependences:{
"mve-DOM":1
}
};
})() |
import { GuildMember, PermissionResolvable } from "discord.js";
import { MessageButton } from "../buttons/MessageButton";
import { ActionRow } from "../buttons/ActionRow";
declare type Component = MessageButton;
declare function msToTime(ms: number): string;
declare function missingPermissions(member: GuildMember, perms: PermissionResolvable): string;
declare function resolveComponent(component: Component): {
type: number;
style: number;
label: string;
emoji: {
id?: string | null | undefined;
name?: string | null | undefined;
};
url: string;
custom_id: string;
disabled: boolean;
} | undefined;
declare function resolveActionRow(actionRow: ActionRow): {
type: number;
components: any[];
};
declare function isEmoji(emoji: string): boolean;
declare function isComponent(component: any): boolean;
export { msToTime, missingPermissions, resolveComponent, isEmoji, resolveActionRow, isComponent, };
|
# frozen_string_literal: true
require 'json'
module Oso
module Polar
module FFI
# Wrapper class for Error FFI pointer + operations.
class Error < ::FFI::AutoPointer
def to_s
@to_s ||= read_string.force_encoding('UTF-8')
end
Rust = Module.new do
extend ::FFI::Library
ffi_lib FFI::LIB_PATH
attach_function :get, :polar_get_error, [], Error
attach_function :free, :string_free, [Error], :int32
end
private_constant :Rust
# Check for an FFI error and convert it into a Ruby exception.
#
# @return [::Oso::Polar::Error] if there's an FFI error.
# @return [::Oso::Polar::FFIErrorNotFound] if there isn't one.
def self.get(enrich_message) # rubocop:disable Metrics/AbcSize, Metrics/CyclomaticComplexity, Metrics/MethodLength, Metrics/PerceivedComplexity
error = Rust.get
return ::Oso::Polar::FFIErrorNotFound if error.null?
error = JSON.parse(error.to_s)
msg = error['formatted']
kind, body = error['kind'].first
# Not all errors have subkind and details.
# TODO (gj): This bug may exist in other libraries.
if body.is_a? Hash
subkind, details = body.first
else
subkind, details = nil
end
# Enrich error message and stack trace
msg = enrich_message.call(msg) if msg
if details
details['stack_trace'] = enrich_message.call(details['stack_trace']) if details['stack_trace']
details['msg'] = enrich_message.call(details['msg']) if details['msg']
end
case kind
when 'Parse'
parse_error(subkind, msg: msg, details: details)
when 'Runtime'
runtime_error(subkind, msg: msg, details: details)
when 'Operational'
operational_error(subkind, msg: msg, details: details)
when 'Parameter'
api_error(subkind, msg: msg, details: details)
when 'Validation'
validation_error(msg, details: details)
end
end
# Map FFI parse errors into Ruby exceptions.
#
# @param kind [String]
# @param msg [String]
# @param details [Hash<String, Object>]
# @return [::Oso::Polar::ParseError] the object converted into the expected format.
private_class_method def self.parse_error(kind, msg:, details:) # rubocop:disable Metrics/MethodLength
case kind
when 'ExtraToken'
::Oso::Polar::ParseError::ExtraToken.new(msg, details: details)
when 'IntegerOverflow'
::Oso::Polar::ParseError::IntegerOverflow.new(msg, details: details)
when 'InvalidToken'
::Oso::Polar::ParseError::InvalidToken.new(msg, details: details)
when 'InvalidTokenCharacter'
::Oso::Polar::ParseError::InvalidTokenCharacter.new(msg, details: details)
when 'UnrecognizedEOF'
::Oso::Polar::ParseError::UnrecognizedEOF.new(msg, details: details)
when 'UnrecognizedToken'
::Oso::Polar::ParseError::UnrecognizedToken.new(msg, details: details)
else
::Oso::Polar::ParseError.new(msg, details: details)
end
end
# Map FFI runtime errors into Ruby exceptions.
#
# @param kind [String]
# @param msg [String]
# @param details [Hash<String, Object>]
# @return [::Oso::Polar::PolarRuntimeError] the object converted into the expected format.
private_class_method def self.runtime_error(kind, msg:, details:) # rubocop:disable Metrics/MethodLength
case kind
when 'Serialization'
::Oso::Polar::SerializationError.new(msg, details: details)
when 'Unsupported'
::Oso::Polar::UnsupportedError.new(msg, details: details)
when 'TypeError'
::Oso::Polar::PolarTypeError.new(msg, details: details)
when 'StackOverflow'
::Oso::Polar::StackOverflowError.new(msg, details: details)
when 'FileLoading'
::Oso::Polar::FileLoadingError.new(msg, details: details)
else
::Oso::Polar::PolarRuntimeError.new(msg, details: details)
end
end
# Map FFI operational errors into Ruby exceptions.
#
# @param kind [String]
# @param msg [String]
# @param details [Hash<String, Object>]
# @return [::Oso::Polar::OperationalError] the object converted into the expected format.
private_class_method def self.operational_error(kind, msg:, details:)
case kind
when 'Unknown' # Rust panics.
::Oso::Polar::UnknownError.new(msg, details: details)
else
::Oso::Polar::OperationalError.new(msg, details: details)
end
end
# Map FFI API errors into Ruby exceptions.
#
# @param kind [String]
# @param msg [String]
# @param details [Hash<String, Object>]
# @return [::Oso::Polar::ApiError] the object converted into the expected format.
private_class_method def self.api_error(kind, msg:, details:)
case kind
when 'Parameter'
::Oso::Polar::ParameterError.new(msg, details: details)
else
::Oso::Polar::ApiError.new(msg, details: details)
end
end
# Map FFI Validation errors into Ruby exceptions.
#
# @param msg [String]
# @param details [Hash<String, Object>]
# @return [::Oso::Polar::ValidationError] the object converted into the expected format.
private_class_method def self.validation_error(msg, details:)
# This is currently the only type of validation error.
::Oso::Polar::ValidationError.new(msg, details: details)
end
end
end
end
end
|
#!/bin/sh -e
#
# Copyright (C) 2004, 2006-2013 Internet Systems Consortium, Inc. ("ISC")
# Copyright (C) 2000-2002 Internet Software Consortium.
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
# $Id: sign.sh,v 1.43 2011/11/04 05:36:28 each Exp $
SYSTEMTESTTOP=../..
. $SYSTEMTESTTOP/conf.sh
RANDFILE=../random.data
zone=secure.example.
infile=secure.example.db.in
zonefile=secure.example.db
cnameandkey=`$KEYGEN -T KEY -q -r $RANDFILE -a RSASHA1 -b 768 -n host cnameandkey.$zone`
dnameandkey=`$KEYGEN -T KEY -q -r $RANDFILE -a RSASHA1 -b 768 -n host dnameandkey.$zone`
keyname=`$KEYGEN -q -r $RANDFILE -a RSASHA1 -b 768 -n zone $zone`
cat $infile $cnameandkey.key $dnameandkey.key $keyname.key >$zonefile
$SIGNER -P -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
zone=bogus.example.
infile=bogus.example.db.in
zonefile=bogus.example.db
keyname=`$KEYGEN -q -r $RANDFILE -a RSAMD5 -b 768 -n zone $zone`
cat $infile $keyname.key >$zonefile
$SIGNER -P -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
zone=dynamic.example.
infile=dynamic.example.db.in
zonefile=dynamic.example.db
keyname1=`$KEYGEN -q -r $RANDFILE -a RSAMD5 -b 768 -n zone $zone`
keyname2=`$KEYGEN -q -r $RANDFILE -a RSAMD5 -b 1024 -n zone -f KSK $zone`
cat $infile $keyname1.key $keyname2.key >$zonefile
$SIGNER -P -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
zone=keyless.example.
infile=keyless.example.db.in
zonefile=keyless.example.db
keyname=`$KEYGEN -q -r $RANDFILE -a RSAMD5 -b 768 -n zone $zone`
cat $infile $keyname.key >$zonefile
$SIGNER -P -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
# Change the signer field of the a.b.keyless.example SIG A
# to point to a provably nonexistent KEY record.
mv $zonefile.signed $zonefile.tmp
<$zonefile.tmp perl -p -e 's/ keyless.example/ b.keyless.example/
if /^a.b.keyless.example/../NXT/;' >$zonefile.signed
rm -f $zonefile.tmp
#
# NSEC3/NSEC test zone
#
zone=secure.nsec3.example.
infile=secure.nsec3.example.db.in
zonefile=secure.nsec3.example.db
keyname=`$KEYGEN -q -r $RANDFILE -a RSAMD5 -b 768 -n zone $zone`
cat $infile $keyname.key >$zonefile
$SIGNER -P -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
#
# NSEC3/NSEC3 test zone
#
zone=nsec3.nsec3.example.
infile=nsec3.nsec3.example.db.in
zonefile=nsec3.nsec3.example.db
keyname=`$KEYGEN -q -r $RANDFILE -a NSEC3RSASHA1 -b 768 -n zone $zone`
cat $infile $keyname.key >$zonefile
$SIGNER -P -3 - -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
#
# OPTOUT/NSEC3 test zone
#
zone=optout.nsec3.example.
infile=optout.nsec3.example.db.in
zonefile=optout.nsec3.example.db
keyname=`$KEYGEN -q -r $RANDFILE -a NSEC3RSASHA1 -b 768 -n zone $zone`
cat $infile $keyname.key >$zonefile
$SIGNER -P -3 - -A -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
#
# A nsec3 zone (non-optout).
#
zone=nsec3.example.
infile=nsec3.example.db.in
zonefile=nsec3.example.db
keyname=`$KEYGEN -q -r $RANDFILE -a NSEC3RSASHA1 -b 768 -n zone $zone`
cat $infile $keyname.key >$zonefile
$SIGNER -P -g -3 - -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
#
# OPTOUT/NSEC test zone
#
zone=secure.optout.example.
infile=secure.optout.example.db.in
zonefile=secure.optout.example.db
keyname=`$KEYGEN -q -r $RANDFILE -a RSAMD5 -b 768 -n zone $zone`
cat $infile $keyname.key >$zonefile
$SIGNER -P -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
#
# OPTOUT/NSEC3 test zone
#
zone=nsec3.optout.example.
infile=nsec3.optout.example.db.in
zonefile=nsec3.optout.example.db
keyname=`$KEYGEN -q -r $RANDFILE -a NSEC3RSASHA1 -b 768 -n zone $zone`
cat $infile $keyname.key >$zonefile
$SIGNER -P -3 - -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
#
# OPTOUT/OPTOUT test zone
#
zone=optout.optout.example.
infile=optout.optout.example.db.in
zonefile=optout.optout.example.db
keyname=`$KEYGEN -q -r $RANDFILE -a NSEC3RSASHA1 -b 768 -n zone $zone`
cat $infile $keyname.key >$zonefile
$SIGNER -P -3 - -A -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
#
# A optout nsec3 zone.
#
zone=optout.example.
infile=optout.example.db.in
zonefile=optout.example.db
keyname=`$KEYGEN -q -r $RANDFILE -a NSEC3RSASHA1 -b 768 -n zone $zone`
cat $infile $keyname.key >$zonefile
$SIGNER -P -g -3 - -A -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
#
# A nsec3 zone (non-optout) with unknown hash algorithm.
#
zone=nsec3-unknown.example.
infile=nsec3-unknown.example.db.in
zonefile=nsec3-unknown.example.db
keyname=`$KEYGEN -q -r $RANDFILE -a NSEC3RSASHA1 -b 768 -n zone $zone`
cat $infile $keyname.key >$zonefile
$SIGNER -P -3 - -U -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
#
# A optout nsec3 zone.
#
zone=optout-unknown.example.
infile=optout-unknown.example.db.in
zonefile=optout-unknown.example.db
keyname=`$KEYGEN -q -r $RANDFILE -a NSEC3RSASHA1 -b 768 -n zone $zone`
cat $infile $keyname.key >$zonefile
$SIGNER -P -3 - -U -A -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
#
# A multiple parameter nsec3 zone.
#
zone=multiple.example.
infile=multiple.example.db.in
zonefile=multiple.example.db
keyname=`$KEYGEN -q -r $RANDFILE -a NSEC3RSASHA1 -b 768 -n zone $zone`
cat $infile $keyname.key >$zonefile
$SIGNER -P -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
mv $zonefile.signed $zonefile
$SIGNER -P -u3 - -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
mv $zonefile.signed $zonefile
$SIGNER -P -u3 AAAA -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
mv $zonefile.signed $zonefile
$SIGNER -P -u3 BBBB -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
mv $zonefile.signed $zonefile
$SIGNER -P -u3 CCCC -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
mv $zonefile.signed $zonefile
$SIGNER -P -u3 DDDD -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
#
# A RSASHA256 zone.
#
zone=rsasha256.example.
infile=rsasha256.example.db.in
zonefile=rsasha256.example.db
keyname=`$KEYGEN -q -r $RANDFILE -a RSASHA256 -b 768 -n zone $zone`
cat $infile $keyname.key >$zonefile
$SIGNER -P -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
#
# A RSASHA512 zone.
#
zone=rsasha512.example.
infile=rsasha512.example.db.in
zonefile=rsasha512.example.db
keyname=`$KEYGEN -q -r $RANDFILE -a RSASHA512 -b 1024 -n zone $zone`
cat $infile $keyname.key >$zonefile
$SIGNER -P -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
#
# A zone with the DNSKEY set only signed by the KSK
#
zone=kskonly.example.
infile=kskonly.example.db.in
zonefile=kskonly.example.db
kskname=`$KEYGEN -q -r $RANDFILE -fk $zone`
zskname=`$KEYGEN -q -r $RANDFILE $zone`
cat $infile $kskname.key $zskname.key >$zonefile
$SIGNER -x -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
#
# A zone with the expired signatures
#
zone=expired.example.
infile=expired.example.db.in
zonefile=expired.example.db
kskname=`$KEYGEN -q -r $RANDFILE -fk $zone`
zskname=`$KEYGEN -q -r $RANDFILE $zone`
cat $infile $kskname.key $zskname.key >$zonefile
$SIGNER -P -r $RANDFILE -o $zone -s -1d -e +1h $zonefile > /dev/null 2>&1
rm -f $kskname.* $zskname.*
#
# A NSEC3 signed zone that will have a DNSKEY added to it via UPDATE.
#
zone=update-nsec3.example.
infile=update-nsec3.example.db.in
zonefile=update-nsec3.example.db
kskname=`$KEYGEN -q -3 -r $RANDFILE -fk $zone`
zskname=`$KEYGEN -q -3 -r $RANDFILE $zone`
cat $infile $kskname.key $zskname.key >$zonefile
$SIGNER -P -3 - -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
#
# A NSEC signed zone that will have auto-dnssec enabled and
# extra keys not in the initial signed zone.
#
zone=auto-nsec.example.
infile=auto-nsec.example.db.in
zonefile=auto-nsec.example.db
kskname=`$KEYGEN -q -r $RANDFILE -fk $zone`
zskname=`$KEYGEN -q -r $RANDFILE $zone`
kskname=`$KEYGEN -q -r $RANDFILE -fk $zone`
zskname=`$KEYGEN -q -r $RANDFILE $zone`
cat $infile $kskname.key $zskname.key >$zonefile
$SIGNER -P -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
#
# A NSEC3 signed zone that will have auto-dnssec enabled and
# extra keys not in the initial signed zone.
#
zone=auto-nsec3.example.
infile=auto-nsec3.example.db.in
zonefile=auto-nsec3.example.db
kskname=`$KEYGEN -q -3 -r $RANDFILE -fk $zone`
zskname=`$KEYGEN -q -3 -r $RANDFILE $zone`
kskname=`$KEYGEN -q -3 -r $RANDFILE -fk $zone`
zskname=`$KEYGEN -q -3 -r $RANDFILE $zone`
cat $infile $kskname.key $zskname.key >$zonefile
$SIGNER -P -3 - -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
#
# Secure below cname test zone.
#
zone=secure.below-cname.example.
infile=secure.below-cname.example.db.in
zonefile=secure.below-cname.example.db
keyname=`$KEYGEN -q -r $RANDFILE -a RSASHA1 -b 1024 -n zone $zone`
cat $infile $keyname.key >$zonefile
$SIGNER -P -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
#
# Patched TTL test zone.
#
zone=ttlpatch.example.
infile=ttlpatch.example.db.in
zonefile=ttlpatch.example.db
signedfile=ttlpatch.example.db.signed
patchedfile=ttlpatch.example.db.patched
keyname=`$KEYGEN -q -r $RANDFILE -a RSASHA1 -b 768 -n zone $zone`
cat $infile $keyname.key >$zonefile
$SIGNER -P -r $RANDFILE -f $signedfile -o $zone $zonefile > /dev/null 2>&1
$CHECKZONE -D -s full $zone $signedfile 2> /dev/null | \
awk '{$2 = "3600"; print}' > $patchedfile
#
# Seperate DNSSEC records.
#
zone=split-dnssec.example.
infile=split-dnssec.example.db.in
zonefile=split-dnssec.example.db
signedfile=split-dnssec.example.db.signed
keyname=`$KEYGEN -q -r $RANDFILE -a RSASHA1 -b 768 -n zone $zone`
cat $infile $keyname.key >$zonefile
echo '$INCLUDE "'"$signedfile"'"' >> $zonefile
: > $signedfile
$SIGNER -P -r $RANDFILE -D -o $zone $zonefile > /dev/null 2>&1
#
# Seperate DNSSEC records smart signing.
#
zone=split-smart.example.
infile=split-smart.example.db.in
zonefile=split-smart.example.db
signedfile=split-smart.example.db.signed
keyname=`$KEYGEN -q -r $RANDFILE -a RSASHA1 -b 768 -n zone $zone`
cp $infile $zonefile
echo '$INCLUDE "'"$signedfile"'"' >> $zonefile
: > $signedfile
$SIGNER -P -S -r $RANDFILE -D -o $zone $zonefile > /dev/null 2>&1
#
# Zone with signatures about to expire, but no private key to replace them
#
zone="expiring.example."
infile="expiring.example.db.in"
zonefile="expiring.example.db"
signedfile="expiring.example.db.signed"
kskname=`$KEYGEN -q -r $RANDFILE $zone`
zskname=`$KEYGEN -q -r $RANDFILE -f KSK $zone`
cp $infile $zonefile
$SIGNER -S -r $RANDFILE -e now+1mi -o $zone $zonefile > /dev/null 2>&1
mv -f ${zskname}.private ${zskname}.private.moved
mv -f ${kskname}.private ${kskname}.private.moved
#
# A zone where the signer's name has been forced to uppercase.
#
zone="upper.example."
infile="upper.example.db.in"
zonefile="upper.example.db"
lower="upper.example.db.lower"
signedfile="upper.example.db.signed"
kskname=`$KEYGEN -q -r $RANDFILE $zone`
zskname=`$KEYGEN -q -r $RANDFILE -f KSK $zone`
cp $infile $zonefile
$SIGNER -P -S -r $RANDFILE -o $zone -f $lower $zonefile > /dev/null 2>&1
$CHECKZONE -D upper.example $lower 2>&- | \
sed '/RRSIG/s/ upper.example. / UPPER.EXAMPLE. /' > $signedfile
#
# Check that the signer's name is in lower case when zone name is in
# upper case.
#
zone="LOWER.EXAMPLE."
infile="lower.example.db.in"
zonefile="lower.example.db"
signedfile="lower.example.db.signed"
kskname=`$KEYGEN -q -r $RANDFILE $zone`
zskname=`$KEYGEN -q -r $RANDFILE -f KSK $zone`
cp $infile $zonefile
$SIGNER -P -S -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
#
# Zone with signatures about to expire, and dynamic, but configured
# not to resign with 'auto-resign no;'
#
zone="nosign.example."
infile="nosign.example.db.in"
zonefile="nosign.example.db"
signedfile="nosign.example.db.signed"
kskname=`$KEYGEN -q -r $RANDFILE $zone`
zskname=`$KEYGEN -q -r $RANDFILE -f KSK $zone`
cp $infile $zonefile
$SIGNER -S -r $RANDFILE -e now+1mi -o $zone $zonefile > /dev/null 2>&1
# preserve a normalized copy of the NS RRSIG for comparison later
$CHECKZONE -D nosign.example nosign.example.db.signed 2>&- | \
awk '$4 == "RRSIG" && $5 == "NS" {$2 = ""; print}' | \
sed 's/[ ][ ]*/ /g'> ../nosign.before
#
# An inline signing zone
#
zone=inline.example.
kskname=`$KEYGEN -q -3 -r $RANDFILE -fk $zone`
zskname=`$KEYGEN -q -3 -r $RANDFILE $zone`
#
# publish a new key while deactivating another key at the same time.
#
zone=publish-inactive.example
infile=publish-inactive.example.db.in
zonefile=publish-inactive.example.db
now=`date -u +%Y%m%d%H%M%S`
kskname=`$KEYGEN -q -r $RANDFILE -f KSK $zone`
kskname=`$KEYGEN -P $now+90s -A $now+3600s -q -r $RANDFILE -f KSK $zone`
kskname=`$KEYGEN -I $now+90s -q -r $RANDFILE -f KSK $zone`
zskname=`$KEYGEN -q -r $RANDFILE $zone`
cp $infile $zonefile
$SIGNER -S -r $RANDFILE -o $zone $zonefile > /dev/null 2>&1
#
# A zone which will change its sig-validity-interval
#
zone=siginterval.example
infile=siginterval.example.db.in
zonefile=siginterval.example.db
kskname=`$KEYGEN -q -3 -r $RANDFILE -fk $zone`
zskname=`$KEYGEN -q -3 -r $RANDFILE $zone`
cp $infile $zonefile
|
#!/bin/bash
# Remember! Set your global variables in the stdst8-variables.sh file
stdst8.update_terragrunt() {
local C=$(which terragrunt | wc -l)
if [[ ${C} -eq 1 ]]; then
echo "${ST8_PREFIX}Terragrunt installed, checking for update"
V=$(terragrunt --version | head -n1)
if [[ ${V} == *"${TG_VERSION}"* ]]; then
echo "${ST8_PREFIX}Current Terragrunt version (${TG_VERSION}) is the latest"
else
echo "${ST8_PREFIX}Updating Terragrunt to version ${TG_VERSION}"
rm "${LOCAL_BIN}/terragrunt"
stdst8.install_terragrunt
fi
else
stdst8.install_terragrunt
fi
echo
}
stdst8.install_terragrunt() {
echo "Installing Terragrunt ${TG_VERSION}"
wget -O "${LOCAL_BIN}/${TG_BIN_NAME}" "${TG_DOWNLOAD}"
mv "${LOCAL_BIN}/${TG_BIN_NAME}" "${LOCAL_BIN}/terragrunt-${TG_VERSION}"
chmod +x "${LOCAL_BIN}/terragrunt-${TG_VERSION}"
ln -s "${LOCAL_BIN}/terragrunt-${TG_VERSION}" "${LOCAL_BIN}/terragrunt"
terragrunt --version
}
|
#!/bin/sh
erlc -o ./tcp_interface/ebin ./tcp_interface/src/*.erl
erlc -o ./gen_web_server/ebin ./gen_web_server/src/*.erl
erlc -pa ./gen_web_server/ebin -o ./http_interface/ebin ./http_interface/src/*.erl
erlc -o ./simple_cache/ebin ./simple_cache/src/*.erl
erlc -o ./resource_discovery/ebin ./resource_discovery/src/*.erl
|
'use strict';
var getSriHash = require('./getSriHash');
var getParam = require('./getParam');
module.exports = {
getSriHash: getSriHash,
getParam: getParam
};
|
<?php
function generatePassword() {
$chars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*_-=+";
$pwd = substr(str_shuffle($chars), 0, 10);
return $pwd;
}
?> |
<filename>include/cat/heap.h
/*
* cat/heap.h -- Array-based heap implementation
*
* by <NAME>
*
* Copyright 2003-2017 -- See accompanying license
*
*/
#ifndef __cat_heap_h
#define __cat_heap_h
#include <cat/cat.h>
#include <cat/mem.h>
#if defined(CAT_USE_INLINE) && CAT_USE_INLINE
#define DECL static inline
#define PTRDECL static
#define LOCAL static inline
#define CAT_HEAP_DO_DECL 1
#else /* CAT_USE_INLINE */
#define DECL
#define PTRDECL
#define LOCAL static
#endif /* CAT_USE_INLINE */
/* Heap data structure */
struct heap {
int size; /* current maximum number of elements */
int fill; /* Number of elements populated */
void ** elem; /* Pointer to array of elem pointers */
cmp_f cmp; /* Comparison function for the heap */
struct memmgr * mm; /* Memory manager for dynamic resize */
};
/*
* Initialize a heap 'hp' so 'elem' points to an initial array of
* 'size' element pointers. 'fill' points to the number of elements
* that 'elem' currently has populated. 'cmp' is the comparison function.
* If 'mm' is non-null, then it will be used to resize 'elem' as the heap
* grows. Otherwise the heap is treated as having a fixed maximum size.
* The initialization process "sorts" the initial elements in the heap.
*/
DECL void hp_init(struct heap *hp, void **elem, int size, int fill,
cmp_f cmp, struct memmgr *mm);
/*
* Add 'elem' into 'hp'. Returns 0 on success or -1 if the heap is
* full and can't be expanded. On success, if 'pos' is non-NULL,
* then on return *pos will return the position of 'elem' hp->elem.
*/
DECL int hp_add(struct heap *hp, void *elem, int *pos);
/* Return the index in 'hp->elem' of 'data' */
DECL int hp_find(struct heap *hp, void *data);
/*
* Extract (remove) the element at the top of the heap and return it.
* Return NULL if the heap is empty.
*/
DECL void * hp_extract(struct heap *hp);
/* Remove the element at position 'elem' from the heap. */
DECL void * hp_rem(struct heap *hp, int elem);
/* ----- Implementation ----- */
#if defined(CAT_HEAP_DO_DECL) && CAT_HEAP_DO_DECL
LOCAL int reheapup(struct heap *hp, int pos)
{
int ppos = (pos-1) >> 1;
void *hold;
abort_unless(hp);
if ( pos >= hp->fill )
return 0;
while ( (pos > 0) && (hp->cmp(hp->elem[ppos], hp->elem[pos]) > 0) ) {
hold = hp->elem[pos];
hp->elem[pos] = hp->elem[ppos];
hp->elem[ppos] = hold;
pos = ppos;
ppos = (pos-1) >> 1;
}
return pos;
}
LOCAL void reheapdown(struct heap *hp, int pos)
{
int didswap;
int cld;
void *hold;
abort_unless(hp);
if ( pos >= hp->fill )
return;
do {
didswap = 0;
if ( (cld = (pos << 1) + 1) >= hp->fill )
break;
if ( ( cld + 1 < hp->fill ) &&
( hp->cmp(hp->elem[cld], hp->elem[cld+1]) > 0 ) )
cld += 1;
if ( hp->cmp(hp->elem[pos], hp->elem[cld]) > 0 ) {
didswap = 1;
hold = hp->elem[cld];
hp->elem[cld] = hp->elem[pos];
hp->elem[pos] = hold;
pos = cld;
}
} while (didswap);
}
DECL void hp_init(struct heap *hp, void **elem, int size, int fill,
cmp_f cmp, struct memmgr *mm)
{
int i;
abort_unless(hp);
abort_unless(size >= 0);
abort_unless(fill >= 0);
abort_unless(cmp);
hp->size = size;
hp->elem = elem;
hp->cmp = cmp;
hp->mm = mm;
if ( (hp->fill = fill) ) {
if ( fill > size ) /* sanity check */
hp->fill = size;
for ( i = hp->fill >> 1 ; i >= 0 ; i-- )
reheapdown(hp, i);
}
}
DECL int hp_add(struct heap *hp, void *elem, int *pos)
{
void * p;
int n;
abort_unless(hp);
if ( hp->fill == hp->size ) {
if ( ! hp->mm )
return -1;
if ( ! hp->size )
n = 32;
else
n = hp->size << 1;
if ( n < hp->size ) /* XXX check for overflow */
return -1;
p = mem_resize(hp->mm, hp->elem, n * sizeof(void *));
if ( p == NULL )
return -1;
hp->elem = p;
hp->size = n;
}
hp->elem[n = hp->fill++] = elem;
n = reheapup(hp, n);
if ( pos )
*pos = n;
return 0;
}
DECL int hp_find(struct heap *hp, void *data)
{
int i;
abort_unless(hp);
for ( i = 0 ; i < hp->fill ; ++i )
if ( ! hp->cmp(hp->elem[i], data) )
return i;
return -1;
}
DECL void * hp_extract(struct heap *hp)
{
void *hold;
int last;
abort_unless(hp);
last = hp->fill - 1;
if ( hp->fill == 0 )
return NULL;
hold = hp->elem[0];
hp->elem[0] = hp->elem[last];
hp->elem[last] = hold;
hp->fill -= 1;
reheapdown(hp, 0);
return hold;
}
DECL void * hp_rem(struct heap *hp, int elem)
{
void *hold;
int last = hp->fill-1;
abort_unless(hp);
abort_unless(elem >= 0);
if ( elem >= hp->fill )
return NULL;
hold = hp->elem[elem];
hp->elem[elem] = hp->elem[last];
hp->elem[last] = hold;
hp->fill = last;
if ( reheapup(hp, elem) == elem )
reheapdown(hp, elem);
return hp->elem[last];
}
#endif /* if defined(CAT_HEAP_DO_DECL) && CAT_HEAP_DO_DECL */
#undef PTRDECL
#undef DECL
#undef LOCAL
#endif /* __cat_heap_h */
|
#!/bin/bash
# Install buildx
# $ export DOCKER_BUILDKIT=1
# $ docker build --platform=local -o . git://github.com/docker/buildx
# $ mkdir -p ~/.docker/cli-plugins
# $ mv buildx ~/.docker/cli-plugins/docker-buildx
# Execute each time before building with buildx
# $ export DOCKER_BUILDKIT=1
# $ docker run --rm --privileged docker/binfmt:a7996909642ee92942dcd6cff44b9b95f08dad64
# $ cat /proc/sys/fs/binfmt_misc/qemu-aarch64
# $ chmod +x buildx-tags.sh
# $ ./buildx-tags.sh
set -e
echo "Start BUILDX"
#WS.Sensors
#:aarch64 Alpine 3.13
docker buildx build --platform linux/arm64 -f Dockerfile.WS.Sensors.alpine -t devdotnetorg/dotnet-ws-sensors:v1-aarch64 . --push
docker buildx build --platform linux/arm64 -f Dockerfile.WS.Sensors.alpine -t devdotnetorg/dotnet-ws-sensors:v1-alpine-aarch64 . --push
docker buildx build --platform linux/arm64 -f Dockerfile.WS.Sensors.alpine -t devdotnetorg/dotnet-ws-sensors:aarch64 . --push
docker buildx build --platform linux/arm64 -f Dockerfile.WS.Sensors.alpine -t devdotnetorg/dotnet-ws-sensors:alpine-aarch64 . --push
#:amd64 Alpine 3.13
docker buildx build --platform linux/amd64 -f Dockerfile.WS.Sensors.alpine -t devdotnetorg/dotnet-ws-sensors:v1-amd64 . --push
docker buildx build --platform linux/amd64 -f Dockerfile.WS.Sensors.alpine -t devdotnetorg/dotnet-ws-sensors:v1-alpine-amd64 . --push
docker buildx build --platform linux/amd64 -f Dockerfile.WS.Sensors.alpine -t devdotnetorg/dotnet-ws-sensors:amd64 . --push
docker buildx build --platform linux/amd64 -f Dockerfile.WS.Sensors.alpine -t devdotnetorg/dotnet-ws-sensors:alpine-amd64 . --push
#:latest all platform Alpine 3.13
docker buildx build --platform linux/arm64,linux/amd64 -f Dockerfile.WS.Sensors.alpine -t devdotnetorg/dotnet-ws-sensors:v1 . --push
docker buildx build --platform linux/arm64,linux/amd64 -f Dockerfile.WS.Sensors.alpine -t devdotnetorg/dotnet-ws-sensors:v1-alpine . --push
docker buildx build --platform linux/arm64,linux/amd64 -f Dockerfile.WS.Sensors.alpine -t devdotnetorg/dotnet-ws-sensors:latest . --push
#WS.Panel
#Avalonia with Xfce4
#for compatibility
docker buildx build --platform linux/arm64 -f Dockerfile.WS.Panel.AvaloniaX11WithXfce4.alpine -t devdotnetorg/dotnet-ws-panel:avaloniax11 . --push
#:aarch64 Alpine 3.13
docker buildx build --platform linux/arm64 -f Dockerfile.WS.Panel.AvaloniaX11WithXfce4.alpine -t devdotnetorg/dotnet-ws-panel:avalonia-xfce4-alpine-aarch64 . --push
docker buildx build --platform linux/arm64 -f Dockerfile.WS.Panel.AvaloniaX11WithXfce4.alpine -t devdotnetorg/dotnet-ws-panel:avalonia-xfce4-aarch64 . --push
docker buildx build --platform linux/arm64 -f Dockerfile.WS.Panel.AvaloniaX11WithXfce4.alpine -t devdotnetorg/dotnet-ws-panel:aarch64 . --push
#:amd64 Alpine 3.13
docker buildx build --platform linux/amd64 -f Dockerfile.WS.Panel.AvaloniaX11WithXfce4.alpine -t devdotnetorg/dotnet-ws-panel:avalonia-xfce4-alpine-amd64 . --push
docker buildx build --platform linux/amd64 -f Dockerfile.WS.Panel.AvaloniaX11WithXfce4.alpine -t devdotnetorg/dotnet-ws-panel:avalonia-xfce4-amd64 . --push
docker buildx build --platform linux/amd64 -f Dockerfile.WS.Panel.AvaloniaX11WithXfce4.alpine -t devdotnetorg/dotnet-ws-panel:amd64 . --push
#:latest all platform Alpine 3.13
docker buildx build --platform linux/arm64,linux/amd64 -f Dockerfile.WS.Panel.AvaloniaX11WithXfce4.alpine -t devdotnetorg/dotnet-ws-panel:avalonia-xfce4 . --push
docker buildx build --platform linux/arm64,linux/amd64 -f Dockerfile.WS.Panel.AvaloniaX11WithXfce4.alpine -t devdotnetorg/dotnet-ws-panel:latest . --push
#WS.Panel
#Avalonia only Xorg
#:aarch64 Alpine 3.13
docker buildx build --platform linux/arm64 -f Dockerfile.WS.Panel.AvaloniaX11.alpine -t devdotnetorg/dotnet-ws-panel:avalonia-xorg-alpine-aarch64 . --push
docker buildx build --platform linux/arm64 -f Dockerfile.WS.Panel.AvaloniaX11.alpine -t devdotnetorg/dotnet-ws-panel:avalonia-xorg-aarch64 . --push
#:amd64 Alpine 3.13
docker buildx build --platform linux/amd64 -f Dockerfile.WS.Panel.AvaloniaX11.alpine -t devdotnetorg/dotnet-ws-panel:avalonia-xorg-alpine-amd64 . --push
docker buildx build --platform linux/amd64 -f Dockerfile.WS.Panel.AvaloniaX11.alpine -t devdotnetorg/dotnet-ws-panel:avalonia-xorg-amd64 . --push
#:latest all platform Alpine 3.13
docker buildx build --platform linux/arm64,linux/amd64 -f Dockerfile.WS.Panel.AvaloniaX11.alpine -t devdotnetorg/dotnet-ws-panel:avalonia-xorg . --push
echo "BUILDX END"
|
public class DocumentManager
{
private List<ObjectViewModel> documents = new List<ObjectViewModel>();
public void CreateDocument(GameObject gameObject, DocumentShell shell)
{
bool found = false;
foreach (var document in documents)
{
if (document.AssociatedGameObject == gameObject)
{
found = true;
// Retrieve the document associated with the game object
// Implementation not shown
}
}
if (!found)
{
var document = new ObjectViewModel();
document.SetGameObject(gameObject);
shell.OpenDocument(document);
}
}
public ObjectViewModel RetrieveDocument(GameObject gameObject)
{
foreach (var document in documents)
{
if (document.AssociatedGameObject == gameObject)
{
return document;
}
}
return null;
}
} |
<gh_stars>0
/*global logger*/
/*
Remember search
========================
@file : RememberSearch.js
@version : 1.2.0
@author : <NAME>
@date : Mon, 12 Jun 2017 13:00:00 GMT
@copyright : Mendix
@license : Apache 2.0
Documentation
========================
Remembers the user's last search.
Modified by
========================
<NAME>
*/
define([
"dojo/_base/declare",
"mxui/widget/_WidgetBase",
"mxui/dom",
"dojo/dom-style",
"dojo/dom-attr",
"dojo/dom-construct",
"dojo/_base/lang",
"dijit/layout/LinkPane",
"RememberSearch/lib/jquery-1.12.4"
], function (declare, _WidgetBase, dom, domStyle, domAttr, domConstruct, lang, LinkPane, _jQuery) {
"use strict";
return declare("RememberSearch.widget.RememberSearch", [_WidgetBase], {
_objectChangeHandler: null,
startup: function () {
logger.debug(this.id + ".startup");
this._execute();
},
postCreate: function () {
logger.debug(this.id + ".postCreate");
},
update: function (obj, callback) {
logger.debug(this.id + ".update");
if (this.refreshOnContextChange) {
this._execute();
if (this.refreshOnContextUpdate) {
if (this._objectChangeHandler !== null) {
this.unsubscribe(this._objectChangeHandler);
}
if (obj) {
this._objectChangeHandler = this.subscribe({
guid: obj.getGuid(),
callback: lang.hitch(this, function () {
this._execute();
})
});
}
}
}
this._executeCallback(callback, "update");
},
_execute: function () {
var getSearchBar = function (elem) {
return elem.parents('.mx-grid-searchbar');
};
var getCookieName = function (searchBar) {
return 'searchValues_' + searchBar.parent().attr('mxid');
};
var preventSetCookie = false;
var searchAutomatically = this.searchAutomatically;
var fillForm = function () {
preventSetCookie = true;
// check if cookie exists, and if so, execute search
_jQuery('.mx-grid-searchbar').each(function () {
var searchBar = _jQuery(this);
var cookieName = getCookieName(searchBar);
var regexp = '(?:(?:^|.*;\\s*)' + cookieName + '\\s*\\=\\s*([^;]*).*$)|^.*$';
var cookieValue = document.cookie.replace(new RegExp(regexp), "$1");
if (!cookieValue)
return;
var values = JSON.parse(decodeURIComponent(cookieValue));
if (!values)
return;
searchBar.find('input, textarea').each(function () {
if (_jQuery(this).val())
return;
_jQuery(this).val(values.shift());
});
searchBar.find('select').each(function () {
if (_jQuery(this).val())
return;
_jQuery(this).val(_jQuery(this).find('option:contains(\'' + values.shift() + '\')').attr('value'));
});
searchBar.show();
if (searchAutomatically) {
searchBar.find('.mx-grid-search-button').click();
}
});
preventSetCookie = false;
};
var saveSearch = function (elem) {
if (preventSetCookie)
return;
var searchBar = getSearchBar(elem);
var cookieName = getCookieName(searchBar);
var values = [];
searchBar.find('input, textarea').each(function () {
values.push(_jQuery(this).val());
});
searchBar.find('select').each(function () {
values.push(_jQuery(this).find('option:selected').text());
});
document.cookie = cookieName + '=' + encodeURIComponent(JSON.stringify(values));
};
_jQuery(function () {
// Search button sets a cookie
_jQuery('.mx-grid-search-button').click(function () {
saveSearch(_jQuery(this));
});
// Pressing enter sets the cookie, too
_jQuery('.mx-grid-search-inputs .form-control').keypress(function (event) {
if (event.which == 13)
saveSearch(_jQuery(this));
});
// Reset button clears the cookie
_jQuery('.mx-grid-reset-button').click(function () {
var searchBar = getSearchBar(_jQuery(this));
var cookieName = getCookieName(searchBar);
document.cookie = cookieName + '=null';
});
});
setTimeout(fillForm, 1);
},
_executeCallback: function (cb, from) {
logger.debug(this.id + "._executeCallback" + (from ? " from " + from : ""));
if (cb && typeof cb === "function") {
cb();
}
}
});
});
require(["RememberSearch/widget/RememberSearch"]);
|
/**
* Copyright(c) 2004-2018 bianfeng
*/
package com.shareyi.molicode.controller.loginfree;
import com.shareyi.molicode.common.web.CommonResult;
import com.shareyi.molicode.service.sys.AcUserService;
import com.shareyi.molicode.vo.user.LoginUserVo;
import com.shareyi.molicode.web.base.BaseController;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.annotation.Resource;
import javax.servlet.http.HttpServletResponse;
import java.util.Map;
@Controller
@RequestMapping("/loginfree")
public class LoginController extends BaseController {
@Resource(name = "acUserService")
private AcUserService acUserService;
/**
* 登录入口
*
* @return
*/
@RequestMapping(value = "/login", method = RequestMethod.POST)
@ResponseBody
public Map login(LoginUserVo loginUserVo, HttpServletResponse response) {
CommonResult result = acUserService.login(loginUserVo, response);
return result.getReturnMap();
}
/**
* 注册用户
*
* @return
*/
@RequestMapping(value = "/register", method = RequestMethod.POST)
@ResponseBody
public Map register(LoginUserVo loginUserVo) {
CommonResult result = acUserService.register(loginUserVo);
return result.getReturnMap();
}
}
|
import re
def parse_revision_script(revision_script: str) -> dict:
result = {}
revision_id_match = re.search(r'Revision ID: (.+)', revision_script)
revises_match = re.search(r'Revises: (.+)', revision_script)
create_date_match = re.search(r'Create Date: (.+)', revision_script)
if revision_id_match:
result["revision_id"] = revision_id_match.group(1)
if revises_match:
result["revises"] = revises_match.group(1)
if create_date_match:
result["create_date"] = create_date_match.group(1)
return result |
<gh_stars>0
package com.sankuai.inf.leaf.segment.dao;
import com.sankuai.inf.leaf.segment.model.LeafWorkerIdAlloc;
import org.apache.ibatis.annotations.*;
/**
* @author jiangyx3915
*/
public interface WorkerIdAllocMapper {
/**
* 新增记录
* @param leafWorkerIdAlloc LeafWorkerIdAlloc 对象
* @return 新增id
*/
@Insert({
"insert into leaf_workerid_alloc (ip_port," +
"ip,port,max_timestamp,gmt_create,gmt_modified) ",
"values (#{leafWorkId.ipPort} , #{leafWorkId.ip} , ",
"#{leafWorkId.port} , #{leafWorkId.maxTimestamp} , ",
"#{leafWorkId.gmtCreate} , #{leafWorkId.gmtModified} )"
})
@Options(useGeneratedKeys = true, keyProperty = "leafWorkId.id", keyColumn = "id")
int insertIfNotExist(@Param("leafWorkId") LeafWorkerIdAlloc leafWorkerIdAlloc);
/**
* 根据ip port获取对象
* @param ipPort ip-port
* @return LeafWorkerIdAlloc 对象
*/
@Select("select `id`,`port`,`ip`,`ip_port`,`max_timestamp` " +
"from leaf_workerid_alloc where ip_port=#{ipPort};")
@Results({
@Result(column = "ip_port", property = "ipPort"),
@Result(column = "max_timestamp", property = "maxTimestamp")
})
LeafWorkerIdAlloc getByIpPort(@Param("ipPort") String ipPort);
/**
* 更新最新时间戳
* @param workerIdAlloc LeafWorkerIdAlloc 对象
*/
@Update("update leaf_workerid_alloc " +
"set max_timestamp = #{workerIdAlloc.maxTimestamp} " +
"where id = #{workerIdAlloc.id} ;")
void updateMaxTimestamp(@Param("workerIdAlloc") LeafWorkerIdAlloc workerIdAlloc);
}
|
<filename>opensoap/contrib/java/SocketService/OpenSoapConstants.java<gh_stars>100-1000
//----------------------------------------------------------------------------//
// MODEL : OpenSOAP
// GROUP : Use SAX Server Side Socket Service
// MODULE : OpenSoapConstants.java
// ABSTRACT : OpenSoap Constant Value Class
// DATE : 2002.02.20
// DESIGNED : Sunbit System k.Kuwa
//----------------------------------------------------------------------------//
// UpDate
// No. Registration Of Alteration Date User
//----------------------------------------------------------------------------//
//..+....1....+....2....+....3....+....4....+....5....+....6....+....7....+....8
public class OpenSoapConstants {
public static final String CALC_URI = "http://services.opensoap.jp/samples/Calc/";
public static final String SOAPENV_URI = "http://schemas.xmlsoap.org/soap/envelope/";
public static final String SOAP_HEADER_URI = "http://header.opensoap.jp/1.0/";
public static final String SOAPENCODING_URI = "http://schemas.xmlsoap.org/soap/encoding/";
public static final String SAXPARSER = "org.apache.xerces.parsers.SAXParser";
public static final String SAXNAMESPACES = "http://xml.org/sax/features/namespaces";
public static final String XSI1999SCHEMA = "http://www.w3.org/1999/XMLSchema-instance";
public static final String XSD1999SCHEMA = "http://www.w3.org/1999/XMLSchema";
} |
import { launch as launchBrowser, Browser, Page } from 'puppeteer'
export default async function getAlbumList(url: string, debug: boolean = false): Promise<string[]> {
const browser: Browser = await launchBrowser().catch(Promise.reject)
const page: Page = await browser.newPage()
await page.goto(url).catch(Promise.reject)
const urls: string[] = await page
.evaluate(() =>
(Array.from(
document.querySelectorAll('.photo-list-album-interaction > a')
) as HTMLLinkElement[]).map(e => e.href)
)
.catch(Promise.reject)
await browser.close()
return urls
}
|
/*************Binary Search Tree Visualization using D3JS *************/
var duration = 400;
var tree = d3.tree().separation(function () { return 40; });
var svg = d3.select('svg'),
g = svg.append('g').attr('transform', 'translate(40,40)');
var gLinks = g.append('g'),
gNodes = g.append('g');
svg.attr('width', '1280')
.attr('height', '720');
var oldPos = {};
var updateTree = function () {
var root = d3.hierarchy(data);
var newTreeSize = [root.descendants().length * 40, ((root.height + 1) * 2 - 1) * 30];
if (tree.size()[0] !== newTreeSize[0] || tree.size()[1] !== newTreeSize[1]) {
tree.size(newTreeSize);
center = pos = svg.attr('width')/2 - tree.size()[0]/2;
}
tree(root);
var nodes = root.descendants().filter(function(d) {
return d.data.data === null ? false : true;
});
var link = gLinks.selectAll('path')
.data(nodes, function(d) { return d.data.id; });
link.exit().remove();
link.transition() // Update new position of old links
.duration(duration)
.attrTween('d', function(d) {
var oldDraw = d3.select(this).attr('d');
if (oldDraw) {
oldDraw = oldDraw.match(/(M.*)(L.*)/);
var oldMoveto = oldMoveto = oldDraw[1].slice(1).split(',').map(Number),
oldLineto = oldDraw[2].slice(1).split(',').map(Number);
// If root is changed, reverse to correctly animated if rotate left
if (changeRoot && oldMoveto[1] === 0) { // Old root node
oldMoveto = oldDraw[2].slice(1).split(',').map(Number);
oldLineto = oldDraw[1].slice(1).split(',').map(Number);
changeRoot = false;
}
if ((oldLineto !== [d.x, d.y]) && (oldMoveto !== [d.parent.x, d.parent.y])) {
/*console.log(d.data.data, oldMoveto, oldLineto);
console.log(d.data.data, [d.parent.x, d.parent.y], [d.x, d.y]);*/
var interpolatorMX = d3.interpolateNumber(oldMoveto[0], d.parent.x);
var interpolatorMY = d3.interpolateNumber(oldMoveto[1], d.parent.y);
var interpolatorLX = d3.interpolateNumber(oldLineto[0], d.x);
var interpolatorLY = d3.interpolateNumber(oldLineto[1], d.y);
return function(t) {
return 'M' + interpolatorMX(t) + ',' + interpolatorMY(t) + 'L' + interpolatorLX(t) + ',' + interpolatorLY(t);
};
}
}
});
link.enter().append('path') // Add new element for new data
.attr('class', 'link')
.transition()
.duration(duration)
.attrTween('d', function(d) {
if(d.parent) {
var parentOldPos = oldPos[d.parent.data.id.toString()];
var interpolatorMX = d3.interpolateNumber(parentOldPos[0], d.parent.x);
var interpolatorMY = d3.interpolateNumber(parentOldPos[1], d.parent.y);
var interpolatorLX = d3.interpolateNumber(parentOldPos[0], d.x);
var interpolatorLY = d3.interpolateNumber(parentOldPos[1], d.y);
return function(t) {
return 'M' + interpolatorMX(t) + ',' + interpolatorMY(t) + 'L' + interpolatorLX(t) + ',' + interpolatorLY(t);
};
}
else {
d3.select(this).remove();
}
});
var node = gNodes.selectAll('g')
.data(nodes, function(d) { return d.data.id; });
node.exit().remove();
node.transition()
.duration(duration)
.attr('transform', function(d) {
setTimeout(function() { // Finish transition, update old position of this node
oldPos[d.data.id.toString()] = [d.x, d.y];
}, duration);
return 'translate(' + d.x + ',' + d.y + ')';
});
var newNode = node.enter().append('g')
.attr('transform', function(d) {
if (!d.parent) return 'translate(' + d.x + ',' + (d.y - 30) + ')';
else return 'translate(' + oldPos[d.parent.data.id.toString()][0] + ',' + (oldPos[d.parent.data.id.toString()][1] - 30) + ')';
})
.attr('class', 'node');
newNode.transition()
.duration(duration)
.attr('transform', function(d) {
oldPos[d.data.id.toString()] = [d.x, d.y];
return 'translate(' + d.x + ',' + d.y + ')';
});
newNode.append('circle')
.attr('r', 20);
newNode.append('text')
.attr('class', 'text')
.attr('text-anchor', 'middle')
.attr('dy', 5)
.text(function(d) { return d.data.data; });
};
var handleInsert = function(event) {
var num = document.getElementById('insertInput').value;
if (num) {
document.getElementById('insertInput').value = '';
d3.selectAll('#insertTree input').each(function() { // Disable insert
d3.select(this).attr('disabled', '')
});
insert(parseInt(num), function() {
d3.selectAll('#insertTree input').each(function() { // Enable insert
d3.select(this).attr('disabled', null);
});
});
}
return false;
};
var handleDelete = function(event) {
var num = document.getElementById('deleteInput').value;
if (num && data.data !== null) { // Tree is not empty
document.getElementById('deleteInput').value = '';
d3.selectAll('#deleteTree input').each(function() { // Disable insert
d3.select(this).attr('disabled', '')
});
deleteTree(parseInt(num), function() {
d3.selectAll('#deleteTree input').each(function() { // Enable insert
d3.select(this).attr('disabled', null);
});
});
}
return false;
}; |
package me.batizhao.dp.domain;
import java.util.List;
/**
* @author batizhao
* @date 2021/7/13
*/
public class CheckboxConfig extends Config {
public CheckboxConfig(String label, String tag, String tagIcon, boolean required, Integer formId, String renderKey, String optionType, Boolean border, List<String> defaultValue) {
super(label, tag, tagIcon, required, formId, renderKey, optionType, border);
this.defaultValue = defaultValue;
}
private List<String> defaultValue;
public List<String> getDefaultValue() {
return defaultValue;
}
public void setDefaultValue(List<String> defaultValue) {
this.defaultValue = defaultValue;
}
}
|
<filename>CoreFoundation/compat4ce/include/sys/locking.h
/***
*sys/locking.h - flags for locking() function
*
* Copyright (c) Microsoft Corporation. All rights reserved.
*
*Purpose:
* This file defines the flags for the locking() function.
* [System V]
*
* [Public]
*
****/
#if _MSC_VER > 1000
#pragma once
#endif
#ifndef _INC_LOCKING
#define _INC_LOCKING
#if !defined(_WIN32)
#error ERROR: Only Win32 target supported!
#endif
#define _LK_UNLCK 0 /* unlock the file region */
#define _LK_LOCK 1 /* lock the file region */
#define _LK_NBLCK 2 /* non-blocking lock */
#define _LK_RLCK 3 /* lock for writing */
#define _LK_NBRLCK 4 /* non-blocking lock for writing */
#if !__STDC__
/* Non-ANSI names for compatibility */
#define LK_UNLCK _LK_UNLCK
#define LK_LOCK _LK_LOCK
#define LK_NBLCK _LK_NBLCK
#define LK_RLCK _LK_RLCK
#define LK_NBRLCK _LK_NBRLCK
#endif
#endif /* _INC_LOCKING */
|
package main
import (
"fmt"
"os"
sq "github.com/rumblefrog/go-a2s"
)
func main() {
address := "127.0.0.1:27015"
if len(os.Args) >= 2 {
address = os.Args[1]
}
client, err := sq.NewClient(address)
if err != nil {
fmt.Printf("configure: %v\n", err)
os.Exit(1)
}
defer client.Close()
_, err = client.QueryInfo()
if err != nil {
fmt.Printf("query: %v\n", err)
os.Exit(1)
}
os.Exit(0)
}
|
def evaluate_equation(a, b, c, d):
if a + b - c == d:
return True
else:
return False |
<reponame>AY1920S1-CS2113T-W17-3/main<filename>src/main/java/owlmoney/logic/command/card/EditCardCommand.java
package owlmoney.logic.command.card;
import static owlmoney.commons.log.LogsCenter.getLogger;
import java.util.logging.Logger;
import owlmoney.logic.command.Command;
import owlmoney.model.card.exception.CardException;
import owlmoney.model.profile.Profile;
import owlmoney.ui.Ui;
/**
* Executes EditCardCommand to edit a credit card object.
*/
public class EditCardCommand extends Command {
private final String name;
private final String limit;
private final String rebate;
private final String newName;
private static final Logger logger = getLogger(EditCardCommand.class);
/**
* Creates an instance of EditCardCommand.
*
* @param name Name of credit card.
* @param limit New limit of credit card if any.
* @param rebate New rebate of credit card if any.
* @param newName New name of credit card if any.
*/
public EditCardCommand(String name, String limit, String rebate, String newName) {
this.limit = limit;
this.rebate = rebate;
this.name = name;
this.newName = newName;
}
/**
* Executes the function to edit the details of a credit card in the profile.
*
* @param profile Profile of the user.
* @param ui Ui of OwlMoney.
* @return false so OwlMoney will not terminate yet.
* @throws CardException If card cannot be found.
*/
@Override
public boolean execute(Profile profile, Ui ui) throws CardException {
profile.profileEditCardDetails(name, newName, limit, rebate, ui);
logger.info("Successful execution of editing a card");
return this.isExit;
}
}
|
<reponame>MineCodeDEV/Language
package dev.minecode.language.spigot.listener;
import dev.minecode.core.api.CoreAPI;
import dev.minecode.core.api.object.CorePlayer;
import dev.minecode.core.api.object.Language;
import dev.minecode.language.api.LanguageAPI;
import dev.minecode.language.spigot.LanguageSpigot;
import org.bukkit.Bukkit;
import org.bukkit.entity.Player;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.UUID;
public class PluginMessageListener implements org.bukkit.plugin.messaging.PluginMessageListener {
public PluginMessageListener() {
Bukkit.getMessenger().registerIncomingPluginChannel(LanguageSpigot.getInstance(), "minecode:language", this);
Bukkit.getMessenger().registerOutgoingPluginChannel(LanguageSpigot.getInstance(), "minecode:language");
}
@Override
public void onPluginMessageReceived(String channel, Player messagePlayer, byte[] bytes) {
try {
ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes);
DataInputStream dataInputStream = new DataInputStream(byteArrayInputStream);
String identifier = dataInputStream.readUTF();
if (identifier.equals("OpenLanguageChangeGUI")) {
String playerUUID = dataInputStream.readUTF();
Player player = Bukkit.getPlayer(UUID.fromString(playerUUID));
CorePlayer corePlayer = CoreAPI.getInstance().getPlayerManager().getPlayer(player.getUniqueId());
Language language = corePlayer.getLanguage(LanguageAPI.getInstance().getThisCorePlugin());
player.openInventory(LanguageSpigot.getInstance().getInventoryManager().getLanguageInventory().get(language));
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
|
#!/bin/sh
# Install tools
apk --update add gcc git musl-dev
# Install dep
go get -u github.com/golang/dep/cmd/dep
# Install dependencies
dep ensure
# Build the service
go build -ldflags "-X 'main.commit=dev' -X 'main.tag=dev' -X 'main.buildDate=$(date -u)'" -a -o cmd/controller/kubernetes-vault ./cmd/controller/
# Build the init container
go build -ldflags "-X 'main.commit=dev' -X 'main.tag=dev' -X 'main.buildDate=$(date -u)'" -a -o cmd/init/kubernetes-vault-init ./cmd/init/
# Build the sample-app container
go build -ldflags "-X 'main.commit=dev' -X 'main.tag=dev' -X 'main.buildDate=$(date -u)'" -a -o cmd/sample-app/sample-app ./cmd/sample-app/ |
for i in range(10):
print(i)
print(i + 1) # added line |
#!/bin/bash
source args.sh
SERVER_ASSET_PREFIX="clangd_indexing_tools-linux"
OUTPUT_NAME="$SERVER_ASSET_PREFIX.zip"
TEMP_DIR="$(mktemp -d)"
# Make sure we delete TEMP_DIR on exit.
trap "rm -r $TEMP_DIR" EXIT
# Copy all the necessary files for docker image into a temp directory and move
# into it.
cp ../docker/Dockerfile "$TEMP_DIR/"
cp ../docker/index_fetcher.sh "$TEMP_DIR/"
cp ../docker/entry_point.sh "$TEMP_DIR/"
cp ../docker/status_updater.sh "$TEMP_DIR/"
cp ../docker/start_server.sh "$TEMP_DIR/"
cp -r ../docker/status_templates "$TEMP_DIR/"
cp -r ../docs "$TEMP_DIR/"
cp ../download_latest_release_assets.py "$TEMP_DIR/"
cd "$TEMP_DIR"
# Generate static pages for serving.
cd docs
REPOSITORY=$INDEX_REPO j2 ../status_templates/contact > contact.html
export GRIPHOME="$(pwd)"
export GRIPURL="$(pwd)"
echo "CACHE_DIRECTORY = '$(pwd)/asset'" > settings.py
for f in *.md; do
BASE_NAME="${f%.*}"
OUT_FILE="${BASE_NAME}.html"
grip --export - $OUT_FILE --no-inline < $f
# Replace links to current directory with root.
sed -i "s@$(pwd)@@g" $OUT_FILE
# Replace links to current directory with root.
sed -i "s@<title>.*</title>@<title>${BASE_NAME} - ${PROJECT_ID}</title>@g" \
$OUT_FILE
# Insert the footer section for the navbar.
sed -i "\@</article>@e cat contact.html" $OUT_FILE
done
for f in asset/*.css; do
sed -i "\@</head>@i <link rel=\"stylesheet\" href=\"/$f\" />" ../status_templates/header
done
rm -f *.md settings.py footer.html
chmod -R a+rx *
cd ..
# First download and extract remote index server.
./download_latest_release_assets.py \
--repository="clangd/clangd" \
--asset-prefix="$SERVER_ASSET_PREFIX" \
--output-name="$OUTPUT_NAME"
# Extract clangd-index-server and monitor.
unzip -j "$OUTPUT_NAME" "*/bin/clangd-index-server*"
chmod +x clangd-index-server clangd-index-server-monitor
# Build the image, tag it for GCR and push.
docker build --build-arg REPOSITORY="$INDEX_REPO" \
--build-arg INDEX_ASSET_PORT_PAIRS="$INDEX_ASSET_PORT_PAIRS" \
--build-arg INDEXER_PROJECT_ROOT="$INDEXER_PROJECT_ROOT" \
--build-arg PROJECT_NAME="$PROJECT_ID" \
-t "$IMAGE_IN_GCR" .
gcloud auth configure-docker
docker push "$IMAGE_IN_GCR"
|
#!/bin/bash
# PYTHONPATH=/home/container/cli:$PYTHONPATH
# PYTHON=/home/container/cli:$PATH
export PYTHONPATH=$PYTHONPATH:/home/container/appinit
export PATH=$PATH:/home/container/appinit
. /home/container/actions/entry.sh |
package io.opensphere.core.appl;
/**
* Entry point for the OpenSphere application.
*/
public final class OpenSphere
{
/**
* A static reference to the Kernel to prevent it from being
* garbage-collected.
*/
@SuppressWarnings("unused")
private static final Kernel INSTANCE = new Kernel();
/**
* The main main method for the OpenSphere application.
*
* @param args command-line arguments
*/
public static void main(String[] args)
{
}
/** Disallow instantiation. */
private OpenSphere()
{
}
}
|
class CreateUsers < ActiveRecord::Migration[5.2]
def change
create_table :users do |t|
t.references :owner, polymorphic: true, index: true
t.timestamps
end
end
end
class User < ApplicationRecord
belongs_to :owner, polymorphic: true
has_many :posts, as: :owner
end
class Post < ApplicationRecord
belongs_to :owner, polymorphic: true
end |
translations = {
"_EMAIL_FRIEND_HINT": "Enviar esta página a un amigo",
"_EMAIL_FRIEND": "Enviar Página",
"_POPUP_HEADER": "Use esta ventana para enviar nuestra página a un amigo",
"_EMAIL_SENDING_ERR": "Error en envío",
"_INVALID_EMAIL_ADDRESS": "Correo electrónico incorrecto en el campo [[field_name]]",
"_FIELD_MISSING_EMAIL_TO": "Error. El siguiente campo obligatorio está vacío: Correo electrónico de su amigo",
"_FIELD_MISSING_EMAIL": "Error. El siguiente campo obligatorio está vacío: Su correo electrónico",
"_FIELD_MISSING_MSG": "Error. El siguiente campo obligatorio está vacío: Mensaje para su amigo",
"_FIELD_MISSING_NAME": "Error. El siguiente campo obligatorio está vacío: Su nombre",
"_EMAIL_SENT": "Su correo electrónico ha sido enviado con éxito. Gracias.",
"_SENDER_NAME": "<NAME>",
"_EMAIL_ADDRESS": "Correo electrónico de su amigo",
"_EMAIL_FROM": "Su correo electrónico",
"_MESSAGE": "Mensaje para su amigo"
}
def translatePhrase(phraseCode, language):
if phraseCode in translations:
return translations[phraseCode]
else:
return "Phrase not found" |
<gh_stars>0
package com.example.android_tic_tac_toe;
import androidx.appcompat.app.AppCompatActivity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
public class Choose extends AppCompatActivity {
Button single;
Button multiplayer;
Button Score;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_choose);
single = findViewById(R.id.singlePlayer);
multiplayer = findViewById(R.id.multiplayer);
Score = findViewById(R.id.score);
single.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = new Intent(Choose.this, MainActivity.class);
startActivity(intent);
}
});
multiplayer.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent1 = new Intent(Choose.this, TenBoard.class);
startActivity(intent1);
}
});
Score.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent1 = new Intent(Choose.this, Score.class);
startActivity(intent1);
}
});
}
public void clickexit(View v){
moveTaskToBack(true);
android.os.Process.killProcess(android.os.Process.myPid());
System.exit(1);
}
} |
import base64
import hashlib
import hmac
def genDSA(message, private_key, algorithm):
if algorithm == 'SHA256':
hash_func = hashlib.sha256
elif algorithm == 'SHA512':
hash_func = hashlib.sha512
else:
raise ValueError("Unsupported algorithm")
# Generate the signature using HMAC with the private key
signature = hmac.new(private_key, message, hash_func).digest()
# Return the result, signature, and a status indicating success
return True, signature, "Success" |
/**
* This file is licensed under the MIT License (MIT).
* Copyright (c) 2021 RandomKiddo
**/
#include <iostream>
#include <string>
#include <fstream>
void compress(std::string input, std::string output);
std::string runLine(std::string line);
int main(void) {
std::string input;
std::string output;
std::cout << "Enter an input dir: ";
std::cin >> input;
std::cout << "\n" << "Enter an output dir: ";
std::cin >> output;
std::cout << "\n";
compress(input, output);
return 0;
}
void compress(std::string input, std::string output) {
std::ifstream fin;
fin.open(input.c_str());
std::ofstream fout;
fout.open(output.c_str());
std::string buffer;
while (std::getline(fin, buffer)) {
std::string run = runLine(buffer);
fout << run << "\n";
}
fin.close();
fout.close();
}
std::string runLine(std::string line) {
std::string buffer;
for (int i = 0; i < line.size(); ++i) {
int count = 1;
while (i < line.size() - 1 && line[i] == line[i + 1]) {
++count;
++i;
}
buffer += line[i];
buffer += std::to_string(count);
}
return buffer;
} |
#!/usr/bin/env bash
composer install
composer dump-autoload --optimize
php artisan ide-helper:generate
php artisan ide-helper:meta
php artisan migrate
|
<filename>src/main.cpp<gh_stars>0
#include <iostream>
#include <string>
#include <cstring>
#include <windows.h>
#include <psapi.h>
using namespace std;
const string GtaExe("GTA5.exe");
DWORD getGtaProcessId()
{
DWORD aProcesses[1024], cbNeeded, cProcesses;
unsigned int i;
if (EnumProcesses(aProcesses, sizeof(aProcesses), &cbNeeded))
{
for (int i = 0; i < 1024; i++)
{
DWORD processID = aProcesses[i];
WCHAR szProcessName[512];
// Get a handle to the process.
HANDLE hProcess = OpenProcess(PROCESS_QUERY_INFORMATION | PROCESS_VM_READ, FALSE, processID);
if (hProcess != nullptr)
{
HMODULE hMod;
DWORD cbNeeded;
if (EnumProcessModules(hProcess, &hMod, sizeof(hMod), &cbNeeded))
{
// Get the process name.
GetModuleFileNameExW(hProcess, hMod, szProcessName, sizeof(szProcessName));
wstring ws(szProcessName);
string s(ws.begin(), ws.end());
if (s.substr(s.length() - GtaExe.length()) == GtaExe)
return processID;
}
}
}
}
return 0;
}
typedef LONG(NTAPI* NtSuspendProcess)(IN HANDLE ProcessHandle);
typedef LONG(NTAPI* NtResumeProcess)(IN HANDLE ProcessHandle);
int main(int argC, char **argV)
{
DWORD processID = getGtaProcessId();
if (!processID)
{
cout << "No GTA process" << endl;
exit(0);
}
cout << "GTA processID: " << processID << endl;
NtSuspendProcess pfnNtSuspendProcess = (NtSuspendProcess)GetProcAddress(GetModuleHandle("ntdll"), "NtSuspendProcess");
NtResumeProcess pfnNtResumeProcess = (NtResumeProcess)GetProcAddress(GetModuleHandle("ntdll"), "NtResumeProcess");
HANDLE processHandle = OpenProcess(PROCESS_ALL_ACCESS, FALSE, processID);
cout << "Suspend" << endl;
pfnNtSuspendProcess(processHandle);
cout << "Wait 12s" << endl;
Sleep(12000);
cout << "Resume" << endl;
pfnNtResumeProcess(processHandle);
CloseHandle(processHandle);
return 0;
} |
#!/bin/bash
docker run \
--detach \
--restart always \
--publish "8081:8080" \
--volume /home/peter/tmp/repliss:/opt/repliss/model/ \
repliss
|
#!/usr/bin/env bash
g++ -g main.cpp -o main && ./main > main.txt && rm -f main
|
<reponame>shraddha-chadha/yelp-data-visualization<gh_stars>0
'use strict';
const e = React.createElement;
class CuisineDropdown extends React.Component {
constructor(props) {
super(props);
this.state = {
isLoading: true,
isMultiSelect: false,
isOpen: false,
searchText: '',
selectedCuisines: [],
searchResults: [],
cuisines: [
"American (New)",
"American (Traditional)",
"Asian Fusion",
"Bakeries",
"Barbeque",
"Bars",
"Breakfast & Brunch",
"Burgers",
"Buffets",
"Cafes",
"Chinese",
"Canadian (New)",
"Caribbean",
"Desserts",
"Fast Food",
"Fish & Chips",
"French",
"Indian",
"Italian",
"Japanese",
"Korean",
"Mexican",
"Mediterranean",
"Modern European",
"Persian/Iranian",
"Pizza",
"Sandwiches",
"Steakhouses",
"Sushi Bars",
"Thai",
"Vietnamese"
]
};
}
componentDidMount() {
this.setState({
searchResults: this.state.cuisines
});
}
/**
Function that returns the label of the dropdown
*/
getDropdownSelectionLabel() {
return this.state.selectedCuisines.join(',') || 'Select Cuisines';
}
/**
Function to toggle the dropdown
*/
toggleDropdown() {
this.setState({
isOpen: !this.state.isOpen,
searchText: ''
});
}
/**
Function to search
*/
searchList(event) {
let updatedList = this.state.cuisines.filter(function(item) {
var cuisine = item.toLowerCase(),
filter = event.target.value.toLowerCase();
return cuisine.includes(filter);
});
this.setState({
searchText: event.target.value,
searchResults: updatedList
});
}
/**
Function to add or remove state from the selected states array
*/
addOrRemoveCuisine(event) {
var cuisineValue = event.target.textContent,
indexOfCuisine = this.state.selectedCuisines.indexOf(cuisineValue);
this.state.searchText = '';
if (indexOfCuisine > -1) {
this.state.selectedCuisines.splice(indexOfCuisine, 1)
this.setState({
selectedCuisines: this.state.selectedCuisines
});
}
else {
// for single select, the selectedCuisines can have only one entry
if (!this.props.isMultiSelect && this.state.selectedCuisines.length) {
this.state.selectedCuisines.pop();
this.state.selectedCuisines.push(cuisineValue);
this.setState({
selectedCuisines: this.state.selectedCuisines
});
}
else { // IF MULTI SELECT, array length can be more that 1
this.state.selectedCuisines.push(cuisineValue);
this.setState({
selectedCuisines: this.state.selectedCuisines
});
}
}
this.props.onCuisineSelect(this.state.selectedCuisines);
}
/**
React's function to render the DOM
*/
render() {
let cuisineList, selectedList;
cuisineList = this.state.searchResults.map((cuisine) => {
if (this.state.selectedCuisines.indexOf(cuisine) < 0) {
return (
<div key={cuisine} onClick={this.addOrRemoveCuisine.bind(this)} className="dropdown-item">
{cuisine}
</div>
);
}
else {
return null;
}
});
selectedList = this.state.selectedCuisines.map((cuisine) => {
return (
<div key={cuisine} onClick={this.addOrRemoveCuisine.bind(this)} className="active dropdown-item">
{cuisine}
</div>
);
});
// HTML
return (
<div className="state-dropdown react-dropdown position-relative dropdown" className={this.state.isOpen ? 'active dropdown' : 'dropdown'}>
<button className="btn btn-secondary dropdown-toggle w-100" type="button" id="cuisineDropdownMenuButton" data-toggle="dropdown" aria-haspopup="true" aria-expanded="false">
{this.getDropdownSelectionLabel()}
</button>
<div className="dropdown-menu position-absolute w-100" aria-labelledby="cuisineDropdownMenuButton">
<div className="p-2">
<input type="text" placeholder="Search Cuisines" className="form-control" onChange={this.searchList.bind(this)}/>
</div>
<h6 className="p-2">Selected Cuisines</h6>
{selectedList}
<hr></hr>
{cuisineList}
</div>
</div>
);
}
} |
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
BCSYMBOLMAP_DIR="BCSymbolMaps"
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
if [ -d "${source}/${BCSYMBOLMAP_DIR}" ]; then
# Locate and install any .bcsymbolmaps if present, and remove them from the .framework before the framework is copied
find "${source}/${BCSYMBOLMAP_DIR}" -name "*.bcsymbolmap"|while read f; do
echo "Installing $f"
install_bcsymbolmap "$f" "$destination"
rm "$f"
done
rmdir "${source}/${BCSYMBOLMAP_DIR}"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
warn_missing_arch=${2:-true}
if [ -r "$source" ]; then
# Copy the dSYM into the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .dSYM "$source")"
binary_name="$(ls "$source/Contents/Resources/DWARF")"
binary="${DERIVED_FILES_DIR}/${basename}.dSYM/Contents/Resources/DWARF/${binary_name}"
# Strip invalid architectures from the dSYM.
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary" "$warn_missing_arch"
fi
if [[ $STRIP_BINARY_RETVAL == 0 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.dSYM"
fi
fi
}
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
warn_missing_arch=${2:-true}
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
if [[ "$warn_missing_arch" == "true" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
fi
STRIP_BINARY_RETVAL=1
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=0
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/ChartER/ChartER.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/ChartER/ChartER.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
#!/usr/bin/env bash -vx
rm ../build/functions.zip
zappa package -o ../build/functions.zip
aws lambda update-function-code --function-name handler --zip-file fileb://../build/functions.zip
|
#!/usr/bin/env bash
set -e
echo "Starting mini-lab"
make up
echo "Waiting for machines to get to waiting state"
waiting=$(docker-compose run metalctl machine ls | grep Waiting | wc -l)
minWaiting=2
declare -i attempts=0
until [ "$waiting" -ge $minWaiting ]
do
if [ "$attempts" -ge 60 ]; then
echo "not enough machines in waiting state - timeout reached"
exit 1
fi
echo "$waiting/$minWaiting machines are waiting"
sleep 5
waiting=$(docker-compose run metalctl machine ls | grep Waiting | wc -l)
attempts=$attempts+1
done
echo "$waiting/$minWaiting machines are waiting"
echo "Create firewall and machine"
make firewall
make machine
echo "Waiting for machines to get to Phoned Home state"
phoned=$(docker-compose run metalctl machine ls | grep Phoned | wc -l)
minPhoned=2
declare -i attempts=0
until [ "$phoned" -ge $minPhoned ]
do
if [ "$attempts" -ge 120 ]; then
echo "not enough machines phoned home - timeout reached"
exit 1
fi
echo "$phoned/$minPhoned machines have phoned home"
sleep 5
phoned=$(docker-compose run metalctl machine ls | grep Phoned | wc -l)
attempts+=1
done
echo "$phoned/$minPhoned machines have phoned home"
sleep 10
echo "Adding route to leaf01 and leaf02"
make route
echo "Adding iptables forwarding rules for libvirt networking"
make fwrules
echo "Check if SSH login to firewall works"
# FIXME: Again this is unstable in CI integration tests
# ssh -o StrictHostKeyChecking=no metal@100.255.254.1 -C exit
echo "Successfully started mini-lab"
|
<filename>src/infra/http/factories/controllers/SearchSendersControllerFactory.ts
import { Controller } from '@core/infra/Controller'
import { PrismaSendersRepository } from '@modules/senders/repositories/prisma/PrismaSendersRepository'
import { SearchSenders } from '@modules/senders/useCases/SearchSenders/SearchSenders'
import { SearchSendersController } from '@modules/senders/useCases/SearchSenders/SearchSendersController'
export function makeSearchSendersController(): Controller {
const prismaSendersRepository = new PrismaSendersRepository()
const searchSenders = new SearchSenders(prismaSendersRepository)
const searchSendersController = new SearchSendersController(searchSenders)
return searchSendersController
}
|
/*
* Copyright (c) 2011 Intel Corporation. All Rights Reserved.
* Copyright (c) Imagination Technologies Limited, UK
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sub license, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice (including the
* next paragraph) shall be included in all copies or substantial portions
* of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
* IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
* ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
/******************************************************************************
@File msvdx_offsets.h
@Title MSVDX Offsets
@Platform
@Description
******************************************************************************/
#ifndef _MSVDX_OFFSETS_H_
#define _MSVDX_OFFSETS_H_
#include "msvdx_defs.h"
#define REG_MAC_OFFSET REG_MSVDX_DMAC_OFFSET
#define REG_MSVDX_CMDS_OFFSET REG_MSVDX_CMD_OFFSET
#define REG_MSVDX_VEC_MPEG2_OFFSET REG_MSVDX_VEC_OFFSET
#define REG_MSVDX_VEC_MPEG4_OFFSET REG_MSVDX_VEC_OFFSET
#define REG_MSVDX_VEC_H264_OFFSET REG_MSVDX_VEC_OFFSET
#define REG_MSVDX_VEC_VC1_OFFSET REG_MSVDX_VEC_OFFSET
#define REG_MSVDX_CORE_OFFSET REG_MSVDX_SYS_OFFSET
#define REG_DMAC_OFFSET REG_MSVDX_DMAC_OFFSET
#define RENDEC_REGISTER_OFFSET(__group__, __reg__ ) ( (__group__##_##__reg__##_OFFSET) + ( REG_##__group__##_OFFSET ) )
/* Not the best place for this */
#ifdef PLEASE_DONT_INCLUDE_REGISTER_BASE
/* This macro is used by KM gpu sim code */
#define REGISTER_OFFSET(__group__, __reg__ ) ( (__group__##_##__reg__##_OFFSET) )
#else
/* This is the macro used by UM Drivers - it included the Mtx memory offser to the msvdx redisters */
// #define REGISTER_OFFSET(__group__, __reg__ ) ( (__group__##_##__reg__##_OFFSET) + ( REG_##__group__##_OFFSET ) )
#define REGISTER_OFFSET(__group__, __reg__ ) ( (__group__##_##__reg__##_OFFSET) + ( REG_##__group__##_OFFSET ) + 0x04800000 )
#endif
#endif /*_MSVDX_OFFSETS_H_*/
|
<reponame>zekroTJA/discordgo-sonic
package base64x
import (
`reflect`
`unsafe`
)
func mem2str(v []byte) (s string) {
(*reflect.StringHeader)(unsafe.Pointer(&s)).Len = (*reflect.SliceHeader)(unsafe.Pointer(&v)).Len
(*reflect.StringHeader)(unsafe.Pointer(&s)).Data = (*reflect.SliceHeader)(unsafe.Pointer(&v)).Data
return
}
func str2mem(s string) (v []byte) {
(*reflect.SliceHeader)(unsafe.Pointer(&v)).Cap = (*reflect.StringHeader)(unsafe.Pointer(&s)).Len
(*reflect.SliceHeader)(unsafe.Pointer(&v)).Len = (*reflect.StringHeader)(unsafe.Pointer(&s)).Len
(*reflect.SliceHeader)(unsafe.Pointer(&v)).Data = (*reflect.StringHeader)(unsafe.Pointer(&s)).Data
return
}
func mem2addr(v []byte) unsafe.Pointer {
return *(*unsafe.Pointer)(unsafe.Pointer(&v))
}
|
import {Edge} from '../../structs/edge'
import {Algorithm} from '../../utils/algorithm'
// import {Assert} from '../../utils/assert'
import {CancelToken} from '../../utils/cancelToken'
import {GeomEdge} from '../core/geomEdge'
import {GeomGraph} from '../core/GeomGraph'
import {LayoutSettings} from '../layered/SugiyamaLayoutSettings'
import {MdsGraphLayout} from './MDSGraphLayout'
import {MdsLayoutSettings} from './MDSLayoutSettings'
// Initial layout using PivotMDS method for a graph with subgraphs
export class PivotMDS extends Algorithm {
length: (e: GeomEdge) => number
private graph: GeomGraph
// scales the final layout by the specified factor on X
iterationsWithMajorization: number
settings: MdsLayoutSettings
layoutSettingsFunc: (g: GeomGraph) => LayoutSettings
public get scaleX(): number {
return this.settings.ScaleX
}
public set scaleX(value: number) {
/*Assert.assert(!isNaN(value))*/
this.settings.ScaleX = value
}
// scales the final layout by the specified factor on Y
public get scaleY(): number {
return this.settings.ScaleY
}
public set scaleY(value: number) {
/*Assert.assert(!isNaN(value))*/
this.settings.ScaleY = value
}
// Layout graph by the PivotMds method. Uses spectral techniques to obtain a layout in
// O(n^2) time when iterations with majorization are used, otherwise it is more like O(PivotNumber*n).
constructor(
graph: GeomGraph,
cancelToken: CancelToken,
length: (e: GeomEdge) => number,
settings: MdsLayoutSettings,
layoutSettingsFunc: (g: GeomGraph) => LayoutSettings,
) {
super(cancelToken)
this.graph = graph
this.length = length
this.settings = settings
this.settings.ScaleX = this.settings.ScaleY = 200
this.layoutSettingsFunc = layoutSettingsFunc
}
// Executes the actual algorithm.
run() {
// with 0 majorization iterations we just do PivotMDS
const mdsLayout = new MdsGraphLayout(
this.settings,
this.graph,
this.cancelToken,
this.length,
)
mdsLayout.run()
}
}
// returns the map of pairs (new lifted GeomEdge, existing GeomEdge)
function CreateLiftedEdges(geomGraph: GeomGraph): Map<GeomEdge, GeomEdge> {
const liftedEdges = new Map<GeomEdge, GeomEdge>()
for (const u of geomGraph.deepNodes()) {
const liftedU = geomGraph.liftNode(u)
for (const uv of u.outEdges()) {
const v = uv.target
const liftedV = geomGraph.liftNode(v)
if (
liftedV == null ||
(liftedU == u && liftedV == v) ||
liftedU == liftedV
) {
continue
}
const newLiftedEdge = new Edge(liftedU.node, liftedV.node)
const newLiftedGeomEdge = new GeomEdge(newLiftedEdge)
liftedEdges.set(newLiftedGeomEdge, uv)
}
}
return liftedEdges
}
|
<reponame>camplight/hylo-evo
import PropTypes from 'prop-types'
import React from 'react'
import './ReplaceComponent.scss'
const { string } = PropTypes
export default function ReplaceComponent ({ example }) {
return <div styleName='exampleName'>{example}</div>
}
ReplaceComponent.propTypes = {
example: string
}
|
<filename>test/runTest.js
import { SampleBinary } from './SampleBinary';
const binary = new SampleBinary();
binary.ready.then(() => {
console.log( binary.add(8,18) );
console.log( binary.sub(8,18) );
console.log( binary.multiply(8,18) );
console.log( binary.divide(8,18) );
console.log( binary.status() );
console.log( binary.status(1) );
binary.echo();
binary.number();
}).catch(error => console.error(error));
|
package org.zalando.intellij.swagger.examples.extensions.zalando.field.completion.swagger;
import com.google.common.collect.ImmutableList;
import java.util.List;
import org.zalando.intellij.swagger.completion.field.model.common.Field;
import org.zalando.intellij.swagger.completion.field.model.common.StringField;
class SwaggerFields {
static List<Field> info() {
return ImmutableList.of(new StringField("x-api-id", true), new StringField("x-audience", true));
}
}
|
#!/bin/bash
# MIT License
#
# (C) Copyright [2021] Hewlett Packard Enterprise Development LP
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
docker build -t arti.dev.cray.com/csm-internal-docker-stable-local/craypc/vshasta-builder:latest .
if [[ "$1" == "push" ]]; then
docker push arti.dev.cray.com/csm-internal-docker-stable-local/craypc/vshasta-builder:latest
fi
|
def sum_two_nums(x, y):
sum = 0
for i in range(x, y+1):
sum += i
return sum
s = sum_two_nums(x, y)
print(s) |
<filename>src/test/util/parser/VoteXmlParser.test.js
/* eslint-env jest */
const assert = require('chai').assert
const Parsers = require('../../../util/parser/parsers')
const VoteXmlParser = Parsers.VoteXmlParser
const VoteParticipantsXmlParser = Parsers.VoteParticipantsXmlParser
describe('VoteXmlParser', () => {
it('should return a final vote for bill C-19', () => {
const nonFinalVote = { billNumber: 'C-19', name: '2nd Reading' }
const nonBillVote = { name: '3rd reading and adoption of Bill C-17' }
const finalBillVote = {
id: 95,
billNumber: 'C-19',
name: '3rd reading and adoption of Bill C-19',
yeas: 177,
nays: 139
}
const xml = genVoteXml([{}, nonFinalVote, nonBillVote, finalBillVote])
const parser = new VoteXmlParser(xml)
const votes = parser.getAllFromXml()
const vote = votes[3]
assert.strictEqual(vote.id, 95)
assert.strictEqual(vote.billNumber, 'C-19')
assert.strictEqual(vote.name, '3rd reading and adoption of Bill C-19')
assert.strictEqual(vote.yeas, 177)
assert.strictEqual(vote.nays, 139)
})
it('should return false if current parliament not satisfied', () => {
const parser = new VoteXmlParser('', { number: 42, session: 1 })
assert.isFalse(parser.isInCurrentParliament())
})
})
describe('VoteParticipantsXmlParser', () => {
it('should return dictionary of voters for bill C-47', () => {
const yeaVoter = { firstName: 'Voter', lastName: 'One', yea: true }
const nayVoter = { firstName: 'Michael', lastName: 'Chong', yea: false }
const pairedVoter = { firstName: 'Jean-Yves', lastName: 'Duclos', yea: true, paired: true }
const pairedVoterWithVote = { firstName: 'Marilène', lastName: 'Gill', yea: false, paired: true }
const xml = genVotersXml([yeaVoter, nayVoter, pairedVoter, pairedVoterWithVote])
const parser = new VoteParticipantsXmlParser(xml, 'someStringID')
const voters = parser.getAllFromXml()
assert(voters instanceof Array, 'should return an array')
assert.lengthOf(voters, 4)
const yeaVoters = voters.filter(voter => voter.yea === true)
assert.lengthOf(yeaVoters, 2)
const nayVoters = voters.filter(voter => voter.yea !== true)
assert.lengthOf(nayVoters, 2)
const pairedVoters = voters.filter(voter => voter.paired === true)
assert.lengthOf(pairedVoters, 2)
// check case of normal vote
assert.equal(voters[1].member, '<NAME>', '<NAME> is a voter')
assert.strictEqual(voters[1].yea, false)
assert.isFalse(voters[1].paired)
// check case of paired vote with no Yea/nay
assert.equal(voters[2].member, '<NAME>', '<NAME> is a voter')
assert.strictEqual(voters[2].yea, true)
assert.isTrue(voters[2].paired)
})
it('should return empty JSON if no participant data', () => {
const voteParticipantsXmlWithNoParticipantData = '<?xml version="1.0" encoding="utf-8"?>\n' +
'<ArrayOfVoteParticipants xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" />'
const parser = new VoteParticipantsXmlParser(voteParticipantsXmlWithNoParticipantData, 'someStringID')
assert.isEmpty(parser.getAllFromXml())
})
})
function genVoteXml (voteList) {
let xml = '<ArrayOfVote>'
voteList.forEach((vote, i) => {
const billNumberCode = (typeof vote.billNumber !== 'undefined')
? `<BillNumberCode>${vote.billNumber}</BillNumberCode>` : '<BillNumberCode />'
const voteXml = `<Vote>
<ParliamentNumber>${vote.parliamentNumber || 42}</ParliamentNumber>
<DecisionEventDateTime>2014-02-18</DecisionEventDateTime>
<SessionNumber>${vote.sessionNumber || 1}</SessionNumber>
<DecisionDivisionNumber>${vote.id || i}</DecisionDivisionNumber>
<DecisionDivisionSubject>${vote.name || 'Vote Subject Name'}</DecisionDivisionSubject>
<DecisionDivisionNumberOfYeas>${vote.yeas || 0}</DecisionDivisionNumberOfYeas>
<DecisionDivisionNumberOfNays>${vote.nays || 0}</DecisionDivisionNumberOfNays>
${billNumberCode}
</Vote>`
xml += voteXml
})
xml += '</ArrayOfVote>'
return xml
}
function genVotersXml (votersList) {
let xml = '<ArrayOfVoteParticipant>'
votersList.forEach((voter, i) => {
const voterXml = `<VoteParticipant>
<ParliamentNumber>${voter.parliamentNumber || 42}</ParliamentNumber>
<SessionNumber>${voter.sessionNumber || 1}</SessionNumber>
<DecisionDivisionNumber>${voter.voteNumber || 752}</DecisionDivisionNumber>
<IsVoteYea>${voter.yea}</IsVoteYea>
<PersonOfficialFirstName>${voter.firstName || 'PersonOfficialFirstName' + i}</PersonOfficialFirstName>
<PersonOfficialLastName>${voter.lastName || 'PersonOfficialLastName' + i}</PersonOfficialLastName>
<IsVotePaired>${voter.paired}</IsVotePaired>
</VoteParticipant>`
xml += voterXml
})
xml += '</ArrayOfVoteParticipant>'
return xml
}
|
A better data structure for an array of objects where fast lookup is important is to use a hash table. A hash table is a data structure that stores items in key-value pairs and uses a hash function to compute an index of the key. This allows for faster access of values since the lookup time is constant, no matter how many elements are stored in the array. |
<gh_stars>0
import React from 'react';
const SVG = ({
fill = '#000',
height = '100%',
width = '100%',
className = '',
viewBox = '0 0 16 16',
}) => (
<svg
className={className}
focusable="false"
height={height}
version="1.1"
viewBox={viewBox}
width={width}
x="0px"
xmlSpace="preserve"
xmlns="http://www.w3.org/2000/svg"
xmlnsXlink="http://www.w3.org/1999/xlink"
y="0px"
>
<g className="nc-icon-wrapper" fill={fill}>
<path
d="M13.3,5.2l1.1-2.1l-1.4-1.4l-2.1,1.1c-0.3-0.2-0.7-0.3-1.1-0.4L9,0H7L6.2,2.3C5.9,2.4,5.5,2.5,5.2,2.7 L3.1,1.6L1.6,3.1l1.1,2.1C2.5,5.5,2.4,5.9,2.3,6.2L0,7v2l2.3,0.8c0.1,0.4,0.3,0.7,0.4,1.1l-1.1,2.1l1.4,1.4l2.1-1.1 c0.3,0.2,0.7,0.3,1.1,0.4L7,16h2l0.8-2.3c0.4-0.1,0.7-0.3,1.1-0.4l2.1,1.1l1.4-1.4l-1.1-2.1c0.2-0.3,0.3-0.7,0.4-1.1L16,9V7 l-2.3-0.8C13.6,5.9,13.5,5.5,13.3,5.2z M8,11c-1.7,0-3-1.3-3-3s1.3-3,3-3s3,1.3,3,3S9.7,11,8,11z"
fill={fill}
/>
</g>
</svg>
);
export default SVG;
|
<gh_stars>1-10
require_relative 'utils.rb'
module Bankscrap
module Openbank
class Account < ::Bankscrap::Account
include Utils
attr_accessor :contract_id
ACCOUNT_ENDPOINT = '/my-money/cuentas/movimientos'.freeze
# Fetch transactions for the given account.
# By default it fetches transactions for the last month,
#
# Returns an array of BankScrap::Transaction objects
def fetch_transactions_for(connection, start_date: Date.today - 1.month, end_date: Date.today)
transactions = []
fields = { producto: contract_id,
numeroContrato: id,
fechaDesde: format_date(start_date),
fechaHasta: format_date(end_date),
concepto: '000' }
# Loop over pagination
until fields.empty?
data = connection.get(ACCOUNT_ENDPOINT, fields: fields)
transactions += data['movimientos'].map { |item| build_transaction(item) }
fields = next_page_fields(data)
end
transactions
end
# Build a transaction object from API data
def build_transaction(data)
Transaction.new(
account: self,
id: data['nummov'],
amount: money(data['importe']),
description: data['conceptoTabla'],
effective_date: parse_date(data['fechaValor']),
operation_date: parse_date(data['fechaOperacion']),
balance: money(data['saldo'])
)
end
end
end
end
|
<filename>akkaserver/src/main/scala/com/lightbend/modelserving/akka/ModelServerManagerBehavior.scala
/*
* Copyright (C) 2017-2019 Lightbend
*
* This file is part of the Lightbend model-serving-tutorial (https://github.com/lightbend/model-serving-tutorial)
*
* The model-serving-tutorial is free software: you can redistribute it and/or modify
* it under the terms of the Apache License Version 2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.lightbend.modelserving.akka
import akka.actor.typed.scaladsl.{AbstractBehavior, ActorContext, Behaviors}
import akka.actor.typed.{ActorRef, Behavior}
import com.lightbend.modelserving.model.ModelToServeStats
/**
* Akka Typed actor that handles model updates, scoring records with the current model, retrieving the current
* models, and retrieving the current state to support external state queries.
* @param context
*/
class ModelServerManagerBehavior(context: ActorContext[ModelServerManagerActor]) extends AbstractBehavior[ModelServerManagerActor](context) {
println("Creating Model Serving Manager")
private def getModelServer(dataType: String): ActorRef[ModelServerActor] = {
context.child(dataType) match {
case Some(actorRef) => actorRef.asInstanceOf[ActorRef[ModelServerActor]]
case _ => context.spawn(Behaviors.setup[ModelServerActor](
context => new ModelServerBehavior(context, dataType)), dataType)
}
}
private def getInstances : GetModelsResult = GetModelsResult(context.children.map(_.path.name).toSeq)
override def onMessage(msg: ModelServerManagerActor): Behavior[ModelServerManagerActor] = {
msg match {
case updateModel : UpdateModel =>
getModelServer(updateModel.model.dataType) tell updateModel
case scoreData : ScoreData =>
getModelServer(scoreData.record.getType) tell scoreData
case getState : GetState => // Used for state queries
context.child(getState.dataType) match{
case Some(server) => server.asInstanceOf[ActorRef[ModelServerActor]] tell getState
case _ => getState.reply ! ModelToServeStats()
}
case getModels : GetModels => // Get list of models
getModels.reply ! getInstances
}
this
}
}
|
<reponame>kulikulifoods/spree_analytics_trackers<filename>lib/spree_analytics_trackers.rb
require 'spree_core'
require 'spree_extension'
require 'spree_analytics_trackers/engine'
require 'spree_analytics_trackers/version'
require 'deface'
|
<reponame>JasonLiu798/javautil
package com.atjl.retry.api;
import com.atjl.retry.domain.RetryDataContextImpl;
public class DataContextFactory {
public static <T> DataContext<T> build(T data) {
DataContext<T> context = new RetryDataContextImpl<>(data);
return context;
}
}
|
use std::collections::HashMap;
// Define the types for Inherent and InherentIdentifier
type Inherent = Vec<u8>;
type InherentIdentifier = u64;
// Define the InherentManager struct
pub struct InherentManager {
data: HashMap<InherentIdentifier, Inherent>,
}
impl InherentManager {
// Create a new instance of InherentManager
pub fn new() -> Self {
InherentManager {
data: HashMap::new(),
}
}
// Insert a new inherent into the collection
pub fn insert(&mut self, identifier: InherentIdentifier, inherent: Inherent) {
self.data.insert(identifier, inherent);
}
// Retrieve the inherent associated with the given identifier
pub fn get<I: Decodable>(&self, identifier: &InherentIdentifier) -> Result<Option<I>, Error> {
match self.data.get(identifier) {
Some(inherent) => I::decode(&mut &inherent[..])
.map_err(|e| Error::DecodingFailed(e, *identifier))
.map(Some),
None => Ok(None),
}
}
// Get the number of inherents in this instance
pub fn len(&self) -> usize {
self.data.len()
}
}
// Define the Decodable trait for decoding inherents
trait Decodable {
fn decode(data: &mut &[u8]) -> Result<Self, Error>
where
Self: Sized;
}
// Define the Error type for decoding failures
enum Error {
DecodingFailed(String, InherentIdentifier),
}
// Implement the Decodable trait for Inherent
impl Decodable for Inherent {
fn decode(data: &mut &[u8]) -> Result<Self, Error> {
// Implement the decoding logic for Inherent
unimplemented!()
}
}
// Unit tests for InherentManager methods
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_insert_and_get() {
let mut manager = InherentManager::new();
let identifier = 1;
let inherent = vec![0, 1, 2, 3];
manager.insert(identifier, inherent.clone());
assert_eq!(manager.get::<Inherent>(&identifier), Ok(Some(inherent)));
}
#[test]
fn test_len() {
let mut manager = InherentManager::new();
assert_eq!(manager.len(), 0);
manager.insert(1, vec![0, 1, 2, 3]);
assert_eq!(manager.len(), 1);
}
} |
<reponame>ASinanSaglam/BNG_cli
from bionetgen.modelapi.pattern import Molecule, Pattern
from bionetgen.modelapi.utils import ActionList
class ModelObj:
"""
The base class for all items in a model (parameter, observable etc.).
Attributes
----------
comment : str
comment at the end of the line/object
line_label : str
line label at the beginning of the line/object
Methods
-------
print_line()
generates the actual line string with line label and comments
if applicable
gen_string()
generates the BNGL string of the object itself, separate from
line attributes
"""
def __init__(self):
self._comment = None
self._line_label = None
def __str__(self) -> str:
return self.gen_string()
def __repr__(self) -> str:
return self.gen_string()
def __contains__(self, key):
return hasattr(self, key)
def __getitem__(self, key):
return getattr(self, key)
def __setitem__(self, key, value):
setattr(self, key, value)
def __delitem__(self, key):
delattr(self, key)
@property
def comment(self) -> None:
return self._comment
@comment.setter
def comment(self, val) -> None:
# TODO: regex handling of # instead
if val.startswith("#"):
self._comment = val[1:]
else:
self._comment = val
@property
def line_label(self) -> str:
return self._line_label
@line_label.setter
def line_label(self, val) -> None:
# TODO: specific error handling
try:
ll = int(val)
self._line_label = "{} ".format(ll)
except:
self._line_label = "{}: ".format(val)
def print_line(self) -> str:
s = " "
# let's deal with line label
if self.line_label is not None:
s += self.line_label
# start building the rest of the string
s += str(self)
if self.comment is not None:
s += " #{}".format(self.comment)
return s
class Parameter(ModelObj):
"""
Class for all parameters in the model, subclass of ModelObj.
In BNGL parameters are of the form
parameter_name parameter_value/expression
or
parameter_name = parameter_value/expression
Attributes
----------
name : str
name of the parameter
value : str
value of the parameter, if loaded from XML this will always
exist since NFsim needs the value and not the expression
expr : str
this exists if the parameter is a math expression, not necerssary
write_expr : bool
this is a boolean that determines if the generated string has
is in expression form or in value form.
"""
def __init__(self, name, value, expr=None):
super().__init__()
self.name = name
self.value = value
self.expr = expr
try:
test = float(expr)
self.write_expr = False
except:
self.write_expr = True
def gen_string(self) -> str:
if self.write_expr:
return "{} {}".format(self.name, self.expr)
else:
return "{} {}".format(self.name, self.value)
class Compartment(ModelObj):
"""
Class for all compartments in the model, subclass of ModelObj.
In BNGL the compartments are of the form
compartment_name dimensions size
or
compartment_name dimensions size parent_compartment
the second form only applies when one compartment is contained in
another compartment.
Attributes
----------
name : str
name of the compartment
dim : str
dimensionality of the compartment
size : str
size/volume of the compartment
outside : str
parent compartment, if exists
write_expr : bool
boolean that describes if the size is a volume or an expression
"""
def __init__(self, name, dim, size, outside=None):
super().__init__()
self.name = name
self.dim = dim
self.size = size
try:
test = float(size)
self.write_expr = False
except:
self.write_expr = True
self.outside = outside
def gen_string(self) -> str:
s = "{} {} {}".format(self.name, self.dim, self.size)
if self.outside is not None:
s += " {}".format(self.outside)
return s
class Observable(ModelObj):
"""
Class for all observable in the model, subclass of ModelObj.
In BNGL the observables are of the form
observable_type observable_name observable_patterns
where patterns can include multiple patterns separated by commas.
Attributes
----------
name : str
name of the observable
type : str
type of the observable, Molecules or Species
patterns : list[Pattern]
list of patterns of the observable
Methods
-------
add_pattern
add a Pattern object into the list of patterns
for this observable
"""
def __init__(self, name, otype, patterns=[]):
super().__init__()
self.name = name
self.type = otype
if self.type == "Species":
for pat in patterns:
if pat.MatchOnce:
pat.MatchOnce = False
self.patterns = patterns
def gen_string(self) -> str:
s = "{} {} ".format(self.type, self.name)
for ipat, pat in enumerate(self.patterns):
if ipat > 0:
s += ","
s += str(pat)
return s
def add_pattern(self, pat) -> None:
# if type is species, set MatchOnce to false since all species automatically match once
if self.type == "Species":
pat.MatchOnce = False
self.patterns.append(pat)
class MoleculeType(ModelObj):
"""
Class for all parameters in the model, subclass of ModelObj.
In BNGL the molecule types are of the form
molecule_type
where all possible states of each component of a molecule is
listed, e.g.
A(b, p~0~1, k~ON~OFF~NULL)
Attributes
----------
molecule : Molecule
a molecule type only contains a molecule object which
can also handle multiple component states
"""
def __init__(self, name, components):
super().__init__()
self.name = name
self.molecule = Molecule(name=name, components=components)
def gen_string(self) -> str:
return str(self.molecule)
class Species(ModelObj):
"""
Class for all parameters in the model, subclass of ModelObj.
In BNGL the species/seed species are of the form
species count
where species is a single pattern and count is the starting
value for that specific pattern
Attributes
----------
pattern : Pattern
pattern of the seed species
count : str
starting value of the seed species
"""
def __init__(self, pattern=Pattern(), count=0):
super().__init__()
self.pattern = pattern
self.count = count
self.name = str(self.pattern)
def gen_string(self) -> str:
s = "{} {}".format(self.pattern, self.count)
return s
class Function(ModelObj):
"""
Class for all parameters in the model, subclass of ModelObj.
In BNGL functions are of the form
function_name function_expression
or
function_name = function_expression
and functions can have arguments
function_name(arg1, arg2, ..., argN)
Attributes
----------
name : str
name of the function
expr : str
function expression
args : list
optional list of arguments for the function
"""
def __init__(self, name, expr, args=None):
super().__init__()
self.name = name
self.expr = expr
self.args = args
def gen_string(self) -> str:
if self.args is None:
s = "{} = {}".format(self.name, self.expr)
else:
s = "{}({}) = {}".format(self.name, ",".join(self.args), self.expr)
return s
class Action(ModelObj):
"""
Class for all parameters in the model, subclass of ModelObj.
In BNGL actions are of the form
action_type({arg1=>value1, arg2=>value2, ...})
Attributes
----------
type : str
type of action, e.g. simulate or writeFile
args : dict[arg_name] = arg_value
action arguments as keys and their values as values
"""
def __init__(self, action_type=None, action_args={}) -> None:
super().__init__()
AList = ActionList()
self.normal_types = AList.normal_types
self.no_setter_syntax = AList.no_setter_syntax
self.square_braces = AList.square_braces
self.possible_types = AList.possible_types
# Set initial values
self.name = action_type
self.type = action_type
self.args = action_args
# check type
if self.type not in self.possible_types:
raise RuntimeError(f"Action type {self.type} not recognized!")
seen_args = []
for arg in action_args:
arg_name, arg_value = arg, action_args[arg]
valid_arg_list = AList.arg_dict[self.type]
# TODO: actions that don't take argument names should be parsed separately to check validity of arg-val tuples
# TODO: currently not type checking arguments
if valid_arg_list is None:
raise RuntimeError(
f"Argument {arg_name} is given, but action {self.type} does not take arguments"
)
if len(valid_arg_list) > 0:
if arg_name not in AList.arg_dict[self.type]:
raise RuntimeError(
f"Action argument {arg_name} not recognized!\nCheck to make sure action is correctly formatted"
)
# TODO: If arg_value is the correct type
if arg_name in seen_args:
print(
f"Warning: argument {arg_name} already given, using latter value {arg_value}"
)
else:
seen_args.append(arg_name)
def gen_string(self) -> str:
# TODO: figure out every argument that has special
# requirements, e.g. method requires the value to
# be a string
action_str = "{}(".format(self.type)
# we can skip curly if we don't have arguments
# and we NEED to skip it for some actions
if self.type in self.normal_types and not len(self.args) == 0:
action_str += "{"
elif self.type in self.square_braces:
action_str += "["
# add arguments
for iarg, arg in enumerate(self.args):
val = self.args[arg]
if iarg > 0:
action_str += ","
# some actions need =>, some don't
if self.type in self.normal_types:
action_str += f"{arg}=>{val}"
else:
action_str += f"{arg}"
# we can skip curly if we don't have arguments
# and we NEED to skip it for some actions
if self.type in self.normal_types and not len(self.args) == 0:
action_str += "}"
elif self.type in self.square_braces:
action_str += "]"
# close up the action
action_str += ")"
return action_str
def print_line(self) -> str:
s = ""
# let's deal with line label
if self.line_label is not None:
s += self.line_label
# start building the rest of the string
s += str(self)
if self.comment is not None:
s += " #{}".format(self.comment)
return s
class Rule(ModelObj):
"""
Class for all parameters in the model, subclass of ModelObj.
Attributes
----------
name : str
name of the rule, optional
reactants : list[Pattern]
list of patterns for reactants
products : list[Pattern]
list of patterns for products
Methods
-------
set_rate_constants((k_fwd,k_bck))
sets forward and backwards rate constants, backwards rate
constants are optional and if not given, will set the rule
to be a unidirectional rule
side_string(list[Pattern])
given a list of patterns, return a string formatted to be
on one side of a rule definition
"""
def __init__(self, name, reactants=[], products=[], rate_constants=()) -> None:
super().__init__()
self.name = name
self.reactants = reactants
self.products = products
self.set_rate_constants(rate_constants)
def set_rate_constants(self, rate_cts):
if len(rate_cts) == 1:
self.rate_constants = [rate_cts[0]]
self.bidirectional = False
elif len(rate_cts) == 2:
self.rate_constants = [rate_cts[0], rate_cts[1]]
self.bidirectional = True
else:
print("1 or 2 rate constants allowed")
def gen_string(self):
if self.bidirectional:
return "{}: {} <-> {} {},{}".format(
self.name,
self.side_string(self.reactants),
self.side_string(self.products),
self.rate_constants[0],
self.rate_constants[1],
)
else:
return "{}: {} -> {} {}".format(
self.name,
self.side_string(self.reactants),
self.side_string(self.products),
self.rate_constants[0],
)
def side_string(self, patterns):
side_str = ""
for ipat, pat in enumerate(patterns):
if ipat > 0:
side_str += " + "
side_str += str(pat)
return side_str
|
import isEqual from 'lodash/isEqual';
import createReactClass from 'create-react-class';
import React from 'react';
import Reflux from 'reflux';
import PropTypes from 'prop-types';
import BaseBadge from 'app/components/idBadge/baseBadge';
import BadgeDisplayName from 'app/components/idBadge/badgeDisplayName';
import TeamAvatar from 'app/components/avatar/teamAvatar';
import SentryTypes from 'app/sentryTypes';
import TeamStore from 'app/stores/teamStore';
import {Team} from 'app/types';
type DefaultProps = {
avatarSize: TeamAvatar['props']['size'];
// If true, will use default max-width, or specify one as a string
hideOverflow: boolean | string;
hideAvatar: boolean;
};
type Props = DefaultProps & {
team: Team;
className?: string;
};
class TeamBadge extends React.Component<Props> {
static propTypes = {
...BaseBadge.propTypes,
team: SentryTypes.Team.isRequired,
avatarSize: PropTypes.number,
hideOverflow: PropTypes.oneOfType([PropTypes.bool, PropTypes.string]),
hideAvatar: PropTypes.bool,
};
static defaultProps: DefaultProps = {
avatarSize: 24,
hideOverflow: true,
hideAvatar: false,
};
render() {
const {hideOverflow, team, ...props} = this.props;
return (
<BaseBadge
displayName={
<BadgeDisplayName hideOverflow={hideOverflow}>#{team.slug}</BadgeDisplayName>
}
team={team}
{...props}
/>
);
}
}
type ContainerState = {
team: Team;
};
const TeamBadgeContainer = createReactClass<Props, ContainerState>({
displayName: 'TeamBadgeContainer',
propTypes: {
team: SentryTypes.Team.isRequired,
},
mixins: [Reflux.listenTo(TeamStore, 'onTeamStoreUpdate') as any],
getInitialState() {
return {
team: this.props.team,
};
},
componentWillReceiveProps(nextProps) {
if (this.state.team === nextProps.team) {
return;
}
if (isEqual(this.state.team, nextProps.team)) {
return;
}
this.setState({
team: nextProps.team,
});
},
onTeamStoreUpdate(updatedTeam: Set<string>) {
if (!updatedTeam.has(this.state.team.id)) {
return;
}
const team = TeamStore.getById(this.state.team.id);
if (!team || isEqual(team.avatar, this.state.team.avatar)) {
return;
}
this.setState({team});
},
render() {
return <TeamBadge {...this.props} team={this.state.team} />;
},
});
export default TeamBadgeContainer;
|
'''
Generate SFZ file from samples
'''
import os
import os.path
from collections import Counter, defaultdict
string_range = {
'E2': 'F2 Gb2 G2 Ab2 A2 Bb2 B2 C3 Db3 D3 Eb3 E3 F3 Gb3 G3 Ab3 A3 Bb3 B3 C4 Db4 D4'.split(' '),
'A': 'Bb2 B2 C3 Db3 D3 Eb3 E3 F3 Gb3 G3 Ab3 A3 Bb3 B3 C4 Db4 D4 Eb4 E4 F4 Gb4 G4'.split(' '),
'D': 'Eb3 E3 F3 Gb3 G3 Ab3 A3 Bb3 B3 C4 Db4 D4 Eb4 E4 F4 Gb4 G4 Ab4 A4 Bb4 B4 C5'.split(' '),
'G': 'Ab3 A3 Bb3 B3 C4 Db4 D4 Eb4 E4 F4 Gb4 G4 Ab4 A4 Bb4 B4 C5 Db5 D5 Eb5 E5 F5'.split(' '),
'B': 'C4 Db4 D4 Eb4 E4 F4 Gb4 G4 Ab4 A4 Bb4 B4 C5 Db5 D5 Eb5 E5 F5 Gb5 G5 Ab5 A5'.split(' '),
'E4': 'F4 Gb4 G4 Ab4 A4 Bb4 B4 C5 Db5 D5 Eb5 E5 F5 Gb5 G5 Ab5 A5 Bb5 B5 C6 Db6 D6'.split(' '),
}
OPEN_STRINGS = ['E2', 'A', 'D', 'G', 'B', 'E4']
MIDI_START = {
'E4': 65, # F4
'B': 60, # C4
'G': 56, # Ab3
'D': 51, # Eb3
'A': 46, # Bb2
'E2': 41, # F2
}
def gen():
notes_sep = {}
notes_cnt = Counter()
notes_string = defaultdict(list)
for s, notes in string_range.items():
for n in notes:
notes_string[n].append(s)
for n in notes_string.keys():
notes_sep[n] = len(notes_string[n])
with open('strat_basic.sfz', 'w') as fout:
for s in OPEN_STRINGS:
midi_key = MIDI_START[s]
dynamics = 'mp'
for note in string_range[s]:
group = f'''
// note {note}
<group>
key={midi_key}
'''
lovel, hivel = int(127 - 127 / notes_sep[note] * (notes_cnt[note] + 1)) + 1, \
int(127 - 127 / notes_sep[note] * notes_cnt[note])
notes_cnt[note] += 1
print(note, 'cnt:', notes_cnt[note], 'sep:', notes_sep[note], lovel, hivel)
vel = f'''
lovel={lovel}
hivel={hivel}
'''
regions = f'''
<region>
sample=samples\{s.lower()}_{dynamics}\{s}_{note}_{dynamics}_1.wav
hirand=0.333
<region>
sample=samples\{s.lower()}_{dynamics}\{s}_{note}_{dynamics}_2.wav
lorand=0.333
hirand=0.666
<region>
sample=samples\{s.lower()}_{dynamics}\{s}_{note}_{dynamics}_3.wav
lorand=0.666
''' # noqa
fout.write(group + vel + regions)
midi_key += 1
def validate(sample_folder):
# files = glob('E2_*.wav')
sample_cnt = 3
absent_cnt = 0
for s in ['E2', 'A', 'D', 'G', 'B', 'E4']:
print(f'open string {s}:')
for d in ['mp']:
for note in string_range[s]:
for i in range(1, sample_cnt + 1):
note_file = os.path.join(f'{s.lower()}_{d}/', f'{s}_{note}_{d}_{i}.wav')
if not os.path.isfile(os.path.join(sample_folder, note_file)):
print(f'missing {note_file}')
absent_cnt += 1
return absent_cnt
if __name__ == '__main__':
BASE_PATH = os.get('BASE_PATH') # the sample folder
assert validate(BASE_PATH) == 0
gen()
|
// 1356. 유진수
// 2019.10.08
// 수학
#include<iostream>
#include<string>
using namespace std;
int main()
{
string s;
cin >> s;
bool flag = false;
for (int i = 1; i < s.size(); i++)
{
string first = s.substr(0, i); // 0번째부터 i-1번째까지
string second = s.substr(i); // i번째부터 끝까지
int x = 1;
int y = 1;
// 왼쪽 곱
for (int i = 0; i < first.size(); i++)
{
x *= (first[i] - '0');
}
// 오른쪽 곱
for (int i = 0; i < second.size(); i++)
{
y *= (second[i] - '0');
}
// 둘이 같다면 유진수
if (x == y)
{
flag = true;
break;
}
}
if (flag)
{
cout << "YES" << endl;
}
else
{
cout << "NO" << endl;
}
return 0;
}
|
<reponame>lizij/Leetcode
package Power_of_Four;
public class Solution {
public boolean isPowerOfFour(int num) {
return (num & (num - 1)) == 0 && ((num - 1) % 3) == 0;
}
public static void main(String[] args) {
Solution s = new Solution();
System.out.println(s.isPowerOfFour(16));
System.out.println(s.isPowerOfFour(5));
System.out.println(s.isPowerOfFour(-128));
}
} |
#!/bin/bash
# Copyright 2020 Adap GmbH. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# This script can be used to create an AWS EC2 AMI which contains the dependencies required
# to execute Flower PyTorch based baselines. The AWS EC2 AMI might not always reflect all
# dependencies listed in `pyproject.toml`, but it should at least have most of them.
# Prepare machine dependencies
sudo apt update
sudo apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev \
libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev \
liblzma-dev python-openssl git
sudo apt install -y python3.7 python3-pip
# Install project dependencies
python3.7 -m pip install -U pip==22.0.4 setuptools==60.9.3
python3.7 -m pip install -U numpy==1.18.1 grpcio==1.27.2 google==2.0.3 protobuf==3.12.1 \
boto3==1.12.36 boto3_type_annotations==0.3.1 paramiko==2.7.1 docker==4.2.0 matplotlib==3.2.1 \
tqdm==4.48.2 torch==1.6.0 torchvision==0.7.0
# Preload datasets
python3.7 -m flwr_experimental.baseline.dataset.pytorch_cifar_partitioned
|
package com.atjl.util.queue;
public class QueueConstant {
public static final int CONF_DFT_QUEUE_SIZE = 100000;
public static final String DFT_QUEUE_CONFIG_FILE = "sysconfig.properties";
public static final String CONF_QUEUE_KEY = "queue";
public static final String QUEUE_SEP_KEY = "queuesep";
public static final String QUEUE_SEP_DFT = ",";
public static final String QUEUE_SIZE_SEP_KEY = "queuesizesep";
public static final String QUEUE_SIZE_SEP_DFT = ":";
private QueueConstant(){
throw new UnsupportedOperationException();
}
}
|
import { app, BrowserWindow, /* session, */ nativeImage, Menu } from 'electron';
import * as path from 'path';
import * as Store from 'electron-store';
import * as windowStateKeeper from 'electron-window-state';
import * as remoteMain from '@electron/remote/main';
import ipcHandlers from './ipc-handlers';
Store.initRenderer();
const isDevelopment = process.env.NODE_ENV !== 'production';
const isMacOS = process.platform === 'darwin';
const gotTheLock = app.requestSingleInstanceLock();
process.env.ELECTRON_DISABLE_SECURITY_WARNINGS = 'true';
// global reference to mainWindow (necessary to prevent window from being garbage collected)
let mainWindow: BrowserWindow;
let mainWindowState: windowStateKeeper.State;
async function createMainWindow () {
const icon = require('../renderer/images/logo-32.png');
const window = new BrowserWindow({
width: mainWindowState.width,
height: mainWindowState.height,
x: mainWindowState.x,
y: mainWindowState.y,
minWidth: 900,
minHeight: 550,
title: 'Antares SQL',
autoHideMenuBar: true,
icon: nativeImage.createFromDataURL(icon.default),
webPreferences: {
nodeIntegration: true,
contextIsolation: false,
spellcheck: false
},
frame: false,
titleBarStyle: isMacOS ? 'hidden' : 'default',
trafficLightPosition: isMacOS ? { x: 10, y: 8 } : undefined,
backgroundColor: '#1d1d1d'
});
mainWindowState.manage(window);
window.on('moved', saveWindowState);
remoteMain.enable(window.webContents);
try {
if (isDevelopment)
await window.loadURL('http://localhost:9080');
else {
const indexPath = path.resolve(__dirname, 'index.html');
await window.loadFile(indexPath);
}
}
catch (err) {
console.log(err);
}
window.on('closed', () => {
window.removeListener('moved', saveWindowState);
mainWindow = null;
});
return window;
}
if (!gotTheLock) app.quit();
else {
require('@electron/remote/main').initialize();
// Initialize ipcHandlers
ipcHandlers();
// quit application when all windows are closed
app.on('window-all-closed', () => {
// on macOS it is common for applications to stay open until the user explicitly quits
if (isMacOS) app.quit();
});
app.on('activate', async () => {
// on macOS it is common to re-create a window even after all windows have been closed
if (mainWindow === null)
mainWindow = await createMainWindow();
});
// create main BrowserWindow when electron is ready
app.on('ready', async () => {
mainWindowState = windowStateKeeper({
defaultWidth: 1024,
defaultHeight: 800
});
mainWindow = await createMainWindow();
createAppMenu();
if (isDevelopment)
mainWindow.webContents.openDevTools();
process.on('uncaughtException', error => {
mainWindow.webContents.send('unhandled-exception', error);
});
process.on('unhandledRejection', error => {
mainWindow.webContents.send('unhandled-exception', error);
});
});
app.on('browser-window-created', (event, window) => {
if (isDevelopment) {
const { antares } = require('../../package.json');
const extensionPath = path.resolve(__dirname, `../../misc/${antares.devtoolsId}`);
window.webContents.session.loadExtension(extensionPath, { allowFileAccess: true }).catch(console.error);
}
});
}
function createAppMenu () {
let menu: Electron.Menu = null;
if (isMacOS) {
menu = Menu.buildFromTemplate([
{
label: app.name,
submenu: [
{ role: 'about' },
{ type: 'separator' },
{
label: 'Check for Updates...',
click: (_menuItem, win) => win.webContents.send('open-updates-preferences')
},
{
label: 'Preferences',
click: (_menuItem, win) => win.webContents.send('toggle-preferences'),
accelerator: 'CmdOrCtrl+,'
},
{ type: 'separator' },
{ role: 'hide' },
{ role: 'hideOthers' },
{ type: 'separator' },
{ role: 'quit' }
]
},
{
role: 'editMenu'
},
{
role: 'viewMenu'
},
{
role: 'windowMenu'
}
]);
}
Menu.setApplicationMenu(menu);
}
function saveWindowState () {
mainWindowState.saveState(mainWindow);
}
|
def string_length_sum(string1, string2):
return len(string1) + len(string2)
result = string_length_sum(string1, string2) |
package squeek.spiceoflife;
import cpw.mods.fml.common.FMLCommonHandler;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.entity.player.EntityPlayerMP;
import net.minecraftforge.common.config.Configuration;
import net.minecraftforge.common.config.Property;
import squeek.spiceoflife.compat.IByteIO;
import squeek.spiceoflife.compat.PacketDispatcher;
import squeek.spiceoflife.foodtracker.FoodHistory;
import squeek.spiceoflife.foodtracker.FoodModifier;
import squeek.spiceoflife.foodtracker.foodgroups.FoodGroupConfig;
import squeek.spiceoflife.foodtracker.foodgroups.FoodGroupRegistry;
import squeek.spiceoflife.interfaces.IPackable;
import squeek.spiceoflife.interfaces.IPacketProcessor;
import squeek.spiceoflife.network.PacketBase;
import squeek.spiceoflife.network.PacketConfigSync;
import java.io.File;
import java.util.Locale;
public class ModConfig implements IPackable, IPacketProcessor {
public static final ModConfig instance = new ModConfig();
public static final String ITEM_FOOD_JOURNAL_NAME = "bookfoodjournal";
public static final String ITEM_LUNCH_BOX_NAME = "lunchbox";
public static final String ITEM_LUNCH_BAG_NAME = "lunchbag";
private static final String COMMENT_SERVER_SIDE_OPTIONS =
"These config settings are server-side only\n"
+ "Their values will get synced to all clients on the server";
/*
* MAIN
*/
private static final String CATEGORY_MAIN = " main ";
private static final String CATEGORY_MAIN_COMMENT =
COMMENT_SERVER_SIDE_OPTIONS;
private static final String FOOD_MODIFIER_ENABLED_NAME = "food.modifier.enabled";
private static final boolean FOOD_MODIFIER_ENABLED_DEFAULT = true;
private static final String FOOD_MODIFIER_ENABLED_COMMENT = "If false, disables the entire diminishing returns part of the mod";
/*
* DEV
*/
private static final String CATEGORY_DEV = "dev";
private static final String CATEGORY_DEV_COMMENT =
"These config settings are only for developers";
private static final String DEV_LOGGING_ENABLED_NAME = "dev.logging.enabled";
private static final boolean DEV_LOGGING_ENABLED_DEFAULT = false;
private static final String DEV_LOGGING_ENABLED_COMMENT = "If true, enables extra logging to help modpack developers";
public static boolean DEV_LOGGING_ENABLED = ModConfig.DEV_LOGGING_ENABLED_DEFAULT;
/*
* SERVER
*/
private static final String CATEGORY_SERVER = "server";
private static final String CATEGORY_SERVER_COMMENT =
COMMENT_SERVER_SIDE_OPTIONS;
private static final String FOOD_HISTORY_LENGTH_NAME = "food.history.length";
private static final int FOOD_HISTORY_LENGTH_DEFAULT = 12;
private static final String FOOD_HISTORY_LENGTH_COMMENT =
"The maximum amount of eaten foods stored in the history at a time";
private static final String FOOD_HISTORY_PERSISTS_THROUGH_DEATH_NAME = "food.history.persists.through.death";
private static final boolean FOOD_HISTORY_PERSISTS_THROUGH_DEATH_DEFAULT = false;
private static final String FOOD_HISTORY_PERSISTS_THROUGH_DEATH_COMMENT =
"If true, food history will not get reset after every death";
private static final String FOOD_EATEN_THRESHOLD_NAME = "new.player.food.eaten.threshold";
private static final String FOOD_EATEN_THRESHOLD_COMMENT =
"The number of times a new player (by World) needs to eat before this mod has any effect";
private static final String USE_FOOD_GROUPS_AS_WHITELISTS_NAME = "use.food.groups.as.whitelists";
private static final boolean USE_FOOD_GROUPS_AS_WHITELISTS_DEFAULT = false;
private static final String USE_FOOD_GROUPS_AS_WHITELISTS_COMMENT =
"If true, any foods not in a food group will be excluded from diminishing returns";
private static final String FOOD_HUNGER_ROUNDING_MODE_NAME = "food.hunger.rounding.mode";
private static final String FOOD_HUNGER_ROUNDING_MODE_DEFAULT = "round";
private static final String FOOD_HUNGER_ROUNDING_MODE_COMMENT =
"Rounding mode used on the hunger value of foods\n"
+ "Valid options: 'round', 'floor', 'ceiling'";
private static final String AFFECT_FOOD_HUNGER_VALUES_NAME = "affect.food.hunger.values";
private static final boolean AFFECT_FOOD_HUNGER_VALUES_DEFAULT = true;
private static final String AFFECT_NEGATIVE_FOOD_HUNGER_VALUES_NAME = "affect.negative.food.hunger.values";
private static final boolean AFFECT_NEGATIVE_FOOD_HUNGER_VALUES_DEFAULT = false;
private static final String AFFECT_NEGATIVE_FOOD_HUNGER_VALUES_COMMENT =
"If true, foods with negative hunger values will be made more negative as nutritional value decreases\n"
+ "NOTE: " + AFFECT_FOOD_HUNGER_VALUES_NAME + " must be true for this to have any affect";
private static final String AFFECT_FOOD_SATURATION_MODIFIERS_NAME = "affect.food.saturation.modifiers";
private static final String AFFECT_FOOD_HUNGER_VALUES_COMMENT =
"If true, foods' hunger value will be multiplied by the current nutritional value\n"
+ "Setting this to false and " + ModConfig.AFFECT_FOOD_SATURATION_MODIFIERS_NAME + " to true will make diminishing returns affect saturation only";
private static final boolean AFFECT_FOOD_SATURATION_MODIFIERS_DEFAULT = false;
private static final String AFFECT_FOOD_SATURATION_MODIFIERS_COMMENT =
"If true, foods' saturation modifier will be multiplied by the current nutritional value\n"
+ "NOTE: When " + ModConfig.AFFECT_FOOD_HUNGER_VALUES_NAME + " is true, saturation bonuses of foods will automatically decrease as the hunger value of the food decreases\n"
+ "Setting this to true when " + ModConfig.AFFECT_FOOD_HUNGER_VALUES_NAME + " is true will make saturation bonuses decrease disproportionately more than hunger values\n"
+ "Setting this to true and " + ModConfig.AFFECT_FOOD_HUNGER_VALUES_NAME + " to false will make diminishing returns affect saturation only";
private static final String AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS_NAME = "affect.negative.food.saturation.modifiers";
private static final boolean AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS_DEFAULT = false;
private static final String AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS_COMMENT =
"If true, foods with negative saturation modifiers will be made more negative as nutritional value decreases\n"
+ "NOTE: " + AFFECT_FOOD_SATURATION_MODIFIERS_NAME + " must be true for this to have any affect";
private static final String FOOD_EATING_SPEED_MODIFIER_NAME = "food.eating.speed.modifier";
private static final float FOOD_EATING_SPEED_MODIFIER_DEFAULT = 1;
private static final String FOOD_EATING_SPEED_MODIFIER_COMMENT =
"If set to greater than zero, food eating speed will be affected by nutritional value\n"
+ "(meaning the lower the nutrtional value, the longer it will take to eat it)\n"
+ "Eating duration is calcualted using the formula (eating_duration / (nutritional_value^eating_speed_modifier))";
private static final String FOOD_EATING_DURATION_MAX_NAME = "food.eating.duration.max";
private static final int FOOD_EATING_DURATION_MAX_DEFAULT = 0;
private static final String FOOD_EATING_DURATION_MAX_COMMENT =
"The maximum time it takes to eat a food after being modified by " + ModConfig.FOOD_EATING_SPEED_MODIFIER_NAME + "\n"
+ "The default eating duration is 32. Set this to 0 to remove the limit on eating speed.\n"
+ "Note: If this is set to 0 and " + ModConfig.FOOD_EATING_SPEED_MODIFIER_NAME + " is > 0, a food with 0% nutrtional value will take nearly infinite time to eat";
private static final String FOOD_MODIFIER_FORMULA_STRING_NAME = "food.modifier.formula";
private static final String FOOD_MODIFIER_FORMULA_STRING_DEFAULT = "MAX(0, (1 - count/12))^MIN(8, food_hunger_value)";
private static final String FOOD_MODIFIER_FORMULA_STRING_COMMENT =
"Uses the EvalEx expression parser\n"
+ "See: https://github.com/uklimaschewski/EvalEx for syntax/function documentation\n\n"
+ "Available variables:\n"
+ "\tcount : The number of times the food (or its food group) has been eaten within the food history\n"
+ "\thunger_count : The total amount of hunger that the food (or its food group) has restored within the food history (1 hunger unit = 1/2 hunger bar)\n"
+ "\tsaturation_count : The total amount of saturation that the food (or its food group) has restored within the food history (1 saturation unit = 1/2 saturation bar)\n"
+ "\tmax_history_length : The maximum length of the food history (see " + FOOD_HISTORY_LENGTH_NAME + ")\n"
+ "\tcur_history_length : The current length of the food history (<= max_history_length)\n"
+ "\tfood_hunger_value : The default amount of hunger the food would restore in hunger units (1 hunger unit = 1/2 hunger bar)\n"
+ "\tfood_saturation_mod : The default saturation modifier of the food\n"
+ "\tcur_hunger : The current hunger value of the player in hunger units (20 = full)\n"
+ "\tcur_saturation : The current saturation value of the player\n"
+ "\ttotal_food_eaten : The all-time total number of times any food has been eaten by the player\n"
+ "\tfood_group_count : The number of food groups that the food belongs to\n"
+ "\tdistinct_food_groups_eaten : The number of distinct food groups in the player's current food history\n"
+ "\ttotal_food_groups : The total number of enabled food groups\n"
+ "\texact_count : The number of times the food (ignoring food groups) has been eaten within the food history\n";
private static final String GIVE_FOOD_JOURNAL_ON_START_NAME = "give.food.journal.as.starting.item";
private static final boolean GIVE_FOOD_JOURNAL_ON_START_DEFAULT = false;
private static final String GIVE_FOOD_JOURNAL_ON_START_COMMENT =
"If true, a food journal will be given to each player as a starting item";
private static final String FOOD_CONTAINERS_MAX_STACKSIZE_NAME = "food.containers.max.stacksize";
private static final int FOOD_CONTAINERS_MAX_STACKSIZE_DEFAULT = 2;
private static final String FOOD_CONTAINERS_MAX_STACKSIZE_COMMENT =
"The maximum stacksize per slot in a food container";
/*
* CLIENT
*/
private static final String CATEGORY_CLIENT = "client";
private static final String CATEGORY_CLIENT_COMMENT =
"These config settings are client-side only";
private static final String LEFT_CLICK_OPENS_FOOD_CONTAINERS_NAME = "left.click.opens.food.containers";
private static final boolean LEFT_CLICK_OPENS_FOOD_CONTAINERS_DEFAULT = false;
private static final String LEFT_CLICK_OPENS_FOOD_CONTAINERS_COMMENT =
"If true, left clicking the air while holding a food container will open it (so that it can be eaten from)";
/*
* FOOD GROUPS
*/
@Deprecated
private static final String CATEGORY_FOODGROUPS = "foodgroups";
private static final String CATEGORY_FOODGROUPS_COMMENT =
"Food groups are defined using .json files in /config/SpiceOfLife/\n"
+ "See /config/SpiceOfLife/example-food-group.json";
// whether or not food modifier is actually enabled (we either are the server or know the server has it enabled)
public static boolean FOOD_MODIFIER_ENABLED = false;
// the value written in the config file
public static boolean FOOD_MODIFIER_ENABLED_CONFIG_VAL = ModConfig.FOOD_MODIFIER_ENABLED_DEFAULT;
public static int FOOD_HISTORY_LENGTH = ModConfig.FOOD_HISTORY_LENGTH_DEFAULT;
private static final int FOOD_EATEN_THRESHOLD_DEFAULT = ModConfig.FOOD_HISTORY_LENGTH / 2;
public static boolean FOOD_HISTORY_PERSISTS_THROUGH_DEATH = ModConfig.FOOD_HISTORY_PERSISTS_THROUGH_DEATH_DEFAULT;
public static int FOOD_EATEN_THRESHOLD = ModConfig.FOOD_EATEN_THRESHOLD_DEFAULT;
public static boolean USE_FOOD_GROUPS_AS_WHITELISTS = ModConfig.USE_FOOD_GROUPS_AS_WHITELISTS_DEFAULT;
public static RoundingMode FOOD_HUNGER_ROUNDING_MODE = null;
public static String FOOD_HUNGER_ROUNDING_MODE_STRING = ModConfig.FOOD_HUNGER_ROUNDING_MODE_DEFAULT;
public static boolean AFFECT_FOOD_HUNGER_VALUES = ModConfig.AFFECT_FOOD_HUNGER_VALUES_DEFAULT;
public static boolean AFFECT_NEGATIVE_FOOD_HUNGER_VALUES = ModConfig.AFFECT_NEGATIVE_FOOD_HUNGER_VALUES_DEFAULT;
public static boolean AFFECT_FOOD_SATURATION_MODIFIERS = ModConfig.AFFECT_FOOD_SATURATION_MODIFIERS_DEFAULT;
public static boolean AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS = ModConfig.AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS_DEFAULT;
public static float FOOD_EATING_SPEED_MODIFIER = ModConfig.FOOD_EATING_SPEED_MODIFIER_DEFAULT;
public static int FOOD_EATING_DURATION_MAX = ModConfig.FOOD_EATING_DURATION_MAX_DEFAULT;
public static String FOOD_MODIFIER_FORMULA = ModConfig.FOOD_MODIFIER_FORMULA_STRING_DEFAULT;
public static boolean GIVE_FOOD_JOURNAL_ON_START = ModConfig.GIVE_FOOD_JOURNAL_ON_START_DEFAULT;
/*
* ITEMS
*/
public static int FOOD_CONTAINERS_MAX_STACKSIZE = ModConfig.FOOD_CONTAINERS_MAX_STACKSIZE_DEFAULT;
public static boolean LEFT_CLICK_OPENS_FOOD_CONTAINERS = ModConfig.LEFT_CLICK_OPENS_FOOD_CONTAINERS_DEFAULT;
private static Configuration config;
protected ModConfig() {
}
public static void init(File file) {
config = new Configuration(file);
load();
/*
* MAIN
*/
config.getCategory(CATEGORY_MAIN).setComment(CATEGORY_MAIN_COMMENT);
FOOD_MODIFIER_ENABLED_CONFIG_VAL = config.get(CATEGORY_MAIN, FOOD_MODIFIER_ENABLED_NAME, FOOD_MODIFIER_ENABLED_DEFAULT, FOOD_MODIFIER_ENABLED_COMMENT).getBoolean(FOOD_MODIFIER_ENABLED_DEFAULT);
// only use the config value immediately when server-side; the client assumes false until the server syncs the config
if (FMLCommonHandler.instance().getSide() == Side.SERVER)
FOOD_MODIFIER_ENABLED = FOOD_MODIFIER_ENABLED_CONFIG_VAL;
/*
* DEV
*/
config.getCategory(CATEGORY_DEV).setComment(CATEGORY_DEV_COMMENT);
DEV_LOGGING_ENABLED = config.get(CATEGORY_DEV, DEV_LOGGING_ENABLED_NAME, DEV_LOGGING_ENABLED_DEFAULT, DEV_LOGGING_ENABLED_COMMENT).getBoolean(DEV_LOGGING_ENABLED_DEFAULT);
/*
* SERVER
*/
config.getCategory(CATEGORY_SERVER).setComment(CATEGORY_SERVER_COMMENT);
Property FOOD_MODIFIER_PROPERTY = config.get(CATEGORY_SERVER, FOOD_MODIFIER_FORMULA_STRING_NAME, FOOD_MODIFIER_FORMULA_STRING_DEFAULT, FOOD_MODIFIER_FORMULA_STRING_COMMENT);
// enforce the new default if the config has the old default
if (FOOD_MODIFIER_PROPERTY.getString().equals("MAX(0, (1 - count/12))^MAX(0, food_hunger_value-ROUND(MAX(0, 1 - count/12), 0))"))
FOOD_MODIFIER_PROPERTY.set(FOOD_MODIFIER_FORMULA_STRING_DEFAULT);
FOOD_MODIFIER_FORMULA = FOOD_MODIFIER_PROPERTY.getString();
FOOD_HISTORY_LENGTH = config.get(CATEGORY_SERVER, FOOD_HISTORY_LENGTH_NAME, FOOD_HISTORY_LENGTH_DEFAULT, FOOD_HISTORY_LENGTH_COMMENT).getInt(FOOD_HISTORY_LENGTH_DEFAULT);
FOOD_HISTORY_PERSISTS_THROUGH_DEATH = config.get(CATEGORY_SERVER, FOOD_HISTORY_PERSISTS_THROUGH_DEATH_NAME, FOOD_HISTORY_PERSISTS_THROUGH_DEATH_DEFAULT, FOOD_HISTORY_PERSISTS_THROUGH_DEATH_COMMENT).getBoolean(FOOD_HISTORY_PERSISTS_THROUGH_DEATH_DEFAULT);
FOOD_EATEN_THRESHOLD = config.get(CATEGORY_SERVER, FOOD_EATEN_THRESHOLD_NAME, FOOD_EATEN_THRESHOLD_DEFAULT, FOOD_EATEN_THRESHOLD_COMMENT).getInt(FOOD_EATEN_THRESHOLD_DEFAULT);
USE_FOOD_GROUPS_AS_WHITELISTS = config.get(CATEGORY_SERVER, USE_FOOD_GROUPS_AS_WHITELISTS_NAME, USE_FOOD_GROUPS_AS_WHITELISTS_DEFAULT, USE_FOOD_GROUPS_AS_WHITELISTS_COMMENT).getBoolean(USE_FOOD_GROUPS_AS_WHITELISTS_DEFAULT);
AFFECT_FOOD_HUNGER_VALUES = config.get(CATEGORY_SERVER, AFFECT_FOOD_HUNGER_VALUES_NAME, AFFECT_FOOD_HUNGER_VALUES_DEFAULT, AFFECT_FOOD_HUNGER_VALUES_COMMENT).getBoolean(AFFECT_FOOD_HUNGER_VALUES_DEFAULT);
AFFECT_NEGATIVE_FOOD_HUNGER_VALUES = config.get(CATEGORY_SERVER, AFFECT_NEGATIVE_FOOD_HUNGER_VALUES_NAME, AFFECT_NEGATIVE_FOOD_HUNGER_VALUES_DEFAULT, AFFECT_NEGATIVE_FOOD_HUNGER_VALUES_COMMENT).getBoolean(AFFECT_NEGATIVE_FOOD_HUNGER_VALUES_DEFAULT);
AFFECT_FOOD_SATURATION_MODIFIERS = config.get(CATEGORY_SERVER, AFFECT_FOOD_SATURATION_MODIFIERS_NAME, AFFECT_FOOD_SATURATION_MODIFIERS_DEFAULT, AFFECT_FOOD_SATURATION_MODIFIERS_COMMENT).getBoolean(AFFECT_FOOD_SATURATION_MODIFIERS_DEFAULT);
AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS = config.get(CATEGORY_SERVER, AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS_NAME, AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS_DEFAULT, AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS_COMMENT).getBoolean(AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS_DEFAULT);
FOOD_EATING_SPEED_MODIFIER = (float) config.get(CATEGORY_SERVER, FOOD_EATING_SPEED_MODIFIER_NAME, FOOD_EATING_SPEED_MODIFIER_DEFAULT, FOOD_EATING_SPEED_MODIFIER_COMMENT).getDouble(FOOD_EATING_SPEED_MODIFIER_DEFAULT);
FOOD_EATING_DURATION_MAX = config.get(CATEGORY_SERVER, FOOD_EATING_DURATION_MAX_NAME, FOOD_EATING_DURATION_MAX_DEFAULT, FOOD_EATING_DURATION_MAX_COMMENT).getInt(FOOD_EATING_DURATION_MAX_DEFAULT);
GIVE_FOOD_JOURNAL_ON_START = config.get(CATEGORY_SERVER, GIVE_FOOD_JOURNAL_ON_START_NAME, GIVE_FOOD_JOURNAL_ON_START_DEFAULT, GIVE_FOOD_JOURNAL_ON_START_COMMENT).getBoolean(GIVE_FOOD_JOURNAL_ON_START_DEFAULT);
FOOD_CONTAINERS_MAX_STACKSIZE = config.get(CATEGORY_SERVER, FOOD_CONTAINERS_MAX_STACKSIZE_NAME, FOOD_CONTAINERS_MAX_STACKSIZE_DEFAULT, FOOD_CONTAINERS_MAX_STACKSIZE_COMMENT).getInt(FOOD_CONTAINERS_MAX_STACKSIZE_DEFAULT);
FOOD_HUNGER_ROUNDING_MODE_STRING = config.get(CATEGORY_SERVER, FOOD_HUNGER_ROUNDING_MODE_NAME, FOOD_HUNGER_ROUNDING_MODE_DEFAULT, FOOD_HUNGER_ROUNDING_MODE_COMMENT).getString();
setRoundingMode();
/*
* CLIENT
*/
config.getCategory(CATEGORY_CLIENT).setComment(CATEGORY_CLIENT_COMMENT);
LEFT_CLICK_OPENS_FOOD_CONTAINERS = config.get(CATEGORY_CLIENT, LEFT_CLICK_OPENS_FOOD_CONTAINERS_NAME, LEFT_CLICK_OPENS_FOOD_CONTAINERS_DEFAULT, LEFT_CLICK_OPENS_FOOD_CONTAINERS_COMMENT).getBoolean(LEFT_CLICK_OPENS_FOOD_CONTAINERS_DEFAULT);
/*
* FOOD GROUPS
*/
config.getCategory(CATEGORY_FOODGROUPS).setComment(CATEGORY_FOODGROUPS_COMMENT);
FoodGroupConfig.setup(file.getParentFile());
// remove obsolete config options
config.getCategory(CATEGORY_SERVER).remove("use.food.groups");
config.getCategory(CATEGORY_FOODGROUPS).clear();
save();
}
/*
* OBSOLETED
*/
// nothing here
public static void load() {
config.load();
}
public static void setRoundingMode() {
for (RoundingMode roundingMode : RoundingMode.values()) {
if (roundingMode.id.equals(FOOD_HUNGER_ROUNDING_MODE_STRING.toLowerCase(Locale.ROOT))) {
FOOD_HUNGER_ROUNDING_MODE = roundingMode;
break;
}
}
if (FOOD_HUNGER_ROUNDING_MODE == null) {
ModSpiceOfLife.Log.warn("Rounding mode '" + FOOD_HUNGER_ROUNDING_MODE_STRING + "' not recognized; defaulting to 'round'");
FOOD_HUNGER_ROUNDING_MODE_STRING = "round";
FOOD_HUNGER_ROUNDING_MODE = RoundingMode.ROUND;
}
}
public static void save() {
config.save();
}
public static void sync(EntityPlayerMP player) {
PacketDispatcher.get().sendTo(new PacketConfigSync(), player);
}
@SideOnly(Side.CLIENT)
public static void assumeClientOnly() {
// assume false until the server syncs
FOOD_MODIFIER_ENABLED = false;
}
@Override
public void pack(IByteIO data) {
data.writeBoolean(FOOD_MODIFIER_ENABLED_CONFIG_VAL);
if (FOOD_MODIFIER_ENABLED_CONFIG_VAL) {
data.writeUTF(FOOD_MODIFIER_FORMULA);
data.writeShort(FOOD_HISTORY_LENGTH);
data.writeBoolean(FOOD_HISTORY_PERSISTS_THROUGH_DEATH);
data.writeInt(FOOD_EATEN_THRESHOLD);
data.writeBoolean(USE_FOOD_GROUPS_AS_WHITELISTS);
data.writeBoolean(AFFECT_FOOD_SATURATION_MODIFIERS);
data.writeBoolean(AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS);
data.writeFloat(FOOD_EATING_SPEED_MODIFIER);
data.writeInt(FOOD_EATING_DURATION_MAX);
data.writeUTF(FOOD_HUNGER_ROUNDING_MODE_STRING);
}
data.writeInt(FOOD_CONTAINERS_MAX_STACKSIZE);
}
@Override
public void unpack(IByteIO data) {
FOOD_MODIFIER_ENABLED = data.readBoolean();
if (FOOD_MODIFIER_ENABLED) {
FOOD_MODIFIER_FORMULA = data.readUTF();
FOOD_HISTORY_LENGTH = data.readShort();
FOOD_HISTORY_PERSISTS_THROUGH_DEATH = data.readBoolean();
FOOD_EATEN_THRESHOLD = data.readInt();
USE_FOOD_GROUPS_AS_WHITELISTS = data.readBoolean();
AFFECT_FOOD_SATURATION_MODIFIERS = data.readBoolean();
AFFECT_NEGATIVE_FOOD_SATURATION_MODIFIERS = data.readBoolean();
FOOD_EATING_SPEED_MODIFIER = data.readFloat();
FOOD_EATING_DURATION_MAX = data.readInt();
FOOD_HUNGER_ROUNDING_MODE_STRING = data.readUTF();
}
FOOD_CONTAINERS_MAX_STACKSIZE = data.readInt();
}
@Override
public PacketBase processAndReply(Side side, EntityPlayer player) {
if (FOOD_MODIFIER_ENABLED) {
setRoundingMode();
FoodModifier.onGlobalFormulaChanged();
FoodHistory.get(player).onHistoryTypeChanged();
FoodGroupRegistry.clear();
}
return null;
}
public enum RoundingMode {
ROUND("round") {
@Override
public double round(double val) {
return Math.round(val);
}
},
FLOOR("floor") {
@Override
public double round(double val) {
return Math.floor(val);
}
},
CEILING("ceiling") {
@Override
public double round(double val) {
return Math.ceil(val);
}
};
public final String id;
RoundingMode(String id) {
this.id = id;
}
public abstract double round(double val);
}
}
|
#!/bin/bash
set -eo pipefail
SCRIPT_DIR=$(cd "$(dirname "$0")"; pwd)
PROJECT_DIR=$1
shift
"$@" ./src/play/play \
VUYAOCS \
"${SCRIPT_DIR}/tiles.txt" \
"${PROJECT_DIR}/boards/wwf_regular.txt"
|
#!/vendor/bin/sh
BASEDIR=vendor
PATH=/sbin:/$BASEDIR/sbin:/$BASEDIR/bin:/$BASEDIR/xbin
export PATH
while getopts dpfrM op;
do
case $op in
d) dbg_on=1;;
p) populate_only=1;;
f) dead_touch=1;;
r) reset_touch=1;;
M) mount_2nd_stage=1;;
esac
done
shift $(($OPTIND-1))
scriptname=${0##*/}
hw_mp=/proc/hw
config_mp=/proc/config
reboot_utag=$config_mp/.reboot
touch_status_prop=vendor.hw.touch.status
hw_cfg_file=hw_config.xml
vhw_file=/$BASEDIR/etc/vhw.xml
bp_file=/system/build.prop
oem_file=/oem/oem.prop
load_error=3
need_to_reload=2
reload_in_progress=1
reload_done=0
ver_utag=".version"
version_fs="unknown"
xml_version="unknown"
device_params=""
xml_file=""
utag_update_fail="false"
modem_ver_prop=ro.vendor.hw.modem_version
policy_prop=ro.vendor.super_image_policy
super_image_detection()
{
local subsys
local file2mount
local basefile
local version
local extention
local image_dir
local is_super_image
local super_image_prop
local file_mount_prop
local modem_version=$(getprop $modem_ver_prop)
local policy=$(getprop $policy_prop)
debug "'$policy_prop' is '$policy'"
for subsys in modem fsg; do
debug "Processing [${subsys}]..."
is_super_image=""
case ${subsys} in
modem) image_dir=/vendor/firmware_mnt;;
fsg) image_dir=/vendor/fsg;;
esac
[ -f $image_dir/super_modem ] && is_super_image="true"
debug "super image '$is_super_image'"
if [ "$is_super_image" == "true" ]; then
file2mount=""
case ${subsys} in
modem) super_image_prop="ro.vendor.hw.modem_super_image"
file_mount_prop="ro.vendor.hw.modem_mount_file"
basefile="NON-HLOS.bin"
extention=".bin"
[ "$modem_version" ] && file2mount=$(printf "NON-HLOS%sbin" $modem_version)
;;
fsg) super_image_prop="ro.vendor.hw.fsg_super_image"
file_mount_prop="ro.vendor.hw.fsg_mount_file"
basefile="fsg.mbn"
extention=".mbn"
[ "$modem_version" ] && file2mount=$(printf "fsg%smbn" $modem_version)
;;
esac
if [ -z "$file2mount" ]; then
notice "'$modem_ver_prop' not set, but [$subsys] is super image!"
else
# modem_version matches existing file in super image
if [ -f $image_dir/$file2mount ]; then
notice "[$subsys] is super image. '$file2mount' will be mounted"
setprop $file_mount_prop $file2mount
setprop $super_image_prop yes
continue
fi
notice "[$subsys] is super image. '$file2mount' not found"
fi
else
notice "[$subsys] non-super image"
fi
# check super image policy
if [ "$policy" == "enforce" ]; then
notice "[$subsys] strict super image policy! Rebooting to recovery..."
debug "'ro.vendor.hw.super_image_failure' -> 'yes'"
setprop ro.vendor.hw.super_image_failure yes
return
fi
notice "[$subsys] super image policy not enforced"
# proceed with non-super image if policy allows
if [ -z "$is_super_image" ]; then
notice "[$subsys] proceed with non-super image!"
continue
fi
# retrieve default version if available
version=$(cat $image_dir/super_modem)
if [ "$version" ]; then
basefile=$version
notice "default file override '$basefile'"
else
notice "use default file: '$basefile' instead of '$file2mount'"
fi
notice "Searching for '$basefile' in $image_dir..."
debug "checking file '$image_dir/$basefile'"
if [ -f $image_dir/$basefile ]; then
notice "[$subsys] is super image! '$basefile' will be mounted"
debug "'$file_mount_prop' -> '$basefile'"
setprop $file_mount_prop $basefile
debug "'$super_image_prop' -> 'yes'"
setprop $super_image_prop yes
continue
fi
# set to fail
notice "Unable to mount '$basefile'! Rebooting to recovery..."
debug "'ro.vendor.hw.super_image_failure' -> 'yes'"
setprop ro.vendor.hw.super_image_failure yes
return
done
}
set_reboot_counter()
{
local value=$1
local reboot_cnt=0
local reboot_mp=${reboot_utag%.*}
local tag_name=${reboot_utag##*/}
if [ $((value)) -gt 0 ]; then
notice "increase reboot counter"
[ -d $reboot_utag ] && reboot_cnt=$(cat $reboot_utag/ascii)
value=$(($reboot_cnt + 1))
fi
if [ ! -d $reboot_utag ]; then
echo ${reboot_utag##*/} > $reboot_mp/all/new
[ $? != 0 ] && notice "error creating UTAG $tag_name"
fi
echo "$value" > $reboot_utag/ascii
[ $? != 0 ] && notice "error updating UTAG $tag_name"
notice "UTAG $tag_name is [`cat $reboot_utag/ascii`]"
}
set_reboot_counter_property()
{
local reboot_cnt=0
local tag_name=${reboot_utag##*/}
if [ -d $reboot_utag ]; then
reboot_cnt=$(cat $reboot_utag/ascii)
notice "UTAG $tag_name has value [$reboot_cnt]"
else
notice "UTAG $tag_name does not exist"
fi
setprop $touch_status_prop $reboot_cnt
notice "property [$touch_status_prop] is set to [`getprop $touch_status_prop`]"
}
debug()
{
[ $dbg_on ] && echo "Debug: $*"
}
notice()
{
echo "$*"
echo "$scriptname: $*" > /dev/kmsg
}
add_device_params()
{
device_params=$device_params" $@"
debug "add_device_params='$device_params'"
}
drop_device_parameter()
{
device_params=${device_params% *}
debug "drop_device_parameter='$device_params'"
}
set_xml_file()
{
xml_file=$@
debug "working with XML file='$xml_file'"
}
exec_parser()
{
eval motobox expat -u -f $xml_file $device_params "$@" 2>/dev/null
}
reload_utags()
{
local mp=$1
local value
echo "1" > $mp/reload
value=$(cat $mp/reload)
while [ "$value" == "$reload_in_progress" ]; do
notice "waiting for loading to complete"
sleep 1;
value=$(cat $mp/reload)
notice "'$mp' current status [$value]"
done
}
procfs_wait_for_device()
{
local __result=$1
local status
local mpi
local IFS=' '
local device_timeout_count=0
while [ ! -f $hw_mp/reload ] || [ ! -f $config_mp/reload ]; do
notice "waiting for devices"
sleep 1;
if [ "$device_timeout_count" -eq "10" ];then
notice "waiting for devices timeout"
eval $__result=""
return
fi
device_timeout_count=$(($device_timeout_count + 1))
done
for mpi in $hw_mp $config_mp; do
status=$(cat $mpi/reload)
notice "mount point '$mpi' status [$status]"
if [ "$status" == "$need_to_reload" ]; then
notice "force $mpi reloading"
reload_utags $mpi
fi
done
for mpi in $hw_mp $config_mp; do
status=$(cat $mpi/reload)
notice "$mpi reload is [$status]"
while [ "$status" != "$reload_done" ]; do
notice "waiting for loading to complete"
sleep 1;
status=$(cat $mpi/reload)
done
done
eval $__result=$status
}
get_attr_data_by_name()
{
local __result=$1
local attr=$2
shift 2
local IFS=' '
eval $__result=""
for arg in ${@}; do
[ "${arg%=*}" == "$attr" ] || continue
debug "attr_data='${arg#*=}'"
eval $__result="${arg#*=}"
break
done
}
get_tag_data()
{
local __name=$1
local __value=$2
shift 2
local dataval
local IFS=' '
eval $__name=""
eval $__value=""
for arg in ${@}; do
case $arg in
string-array | string)
debug "---/ skip keyword: '$arg'"
continue;;
esac
debug "---> arg='$arg'"
if [ "${arg%=*}" == "name" ]; then
eval $__name=${arg#*=}
continue
fi
# eval treats ';' as a separator, thus make it '\;'
dataval=$(echo ${arg#?} | sed 's/;/\\;/g')
debug "<--- dataval='$dataval'"
eval $__value=$dataval
done
}
update_utag()
{
local utag=$1
local payload=$2
local verify
local rc
if [ ! -d $hw_mp/$utag ]; then
notice "creating utag '$utag'"
echo $utag > $hw_mp/all/new
rc=$?
[ "$rc" != "0" ] && notice "'$utag' create dir failed rc=$rc"
fi
debug "writing '$payload' to '$hw_mp/$utag/ascii'"
echo "$payload" > $hw_mp/$utag/ascii
rc=$?
if [ "$rc" != "0" ]; then
utag_update_fail="true"
notice "'$utag' write file failed rc=$rc"
fi
verify=$(cat $hw_mp/$utag/ascii)
debug "read '$verify' from '$hw_mp/$utag/ascii'"
[ "$verify" != "$payload" ] && notice "'$utag' payload validation failed"
}
populate_utags()
{
local selection="$@"
local pline
local ptag
local pvalue
for pline in $(exec_parser $selection); do
get_tag_data ptag pvalue $pline
debug "tag='$ptag' value='$pvalue'"
update_utag $ptag $pvalue
done
}
set_ro_hw_properties_upgrade()
{
local utag_path
local utag_name
local prop_prefix
local utag_value
local verify
for hwtag in $(find $hw_mp -name '.system'); do
debug "path $hwtag has '.system' in its name"
prop_prefix="ro.vendor.hw."
utag_path=${hwtag%/*}
utag_name=${utag_path##*/}
utag_value=$(cat $utag_path/ascii)
setprop $prop_prefix$utag_name "$utag_value"
notice "ro.vendor.hw.$utag_name='$utag_value'"
done
}
set_ro_hw_properties()
{
local utag_path
local utag_name
local prop_prefix
local utag_value
local verify
for hwtag in $(find $hw_mp -name '.system'); do
debug "path $hwtag has '.system' in its name"
prop_prefix=$(cat $hwtag/ascii)
verify=${prop_prefix%.}
# esure property ends with '.'
if [ "$prop_prefix" == "$verify" ]; then
prop_prefix="$prop_prefix."
debug "added '.' at the end of [$prop_prefix]"
fi
utag_path=${hwtag%/*}
utag_name=${utag_path##*/}
utag_value=$(cat $utag_path/ascii)
setprop $prop_prefix$utag_name "$utag_value"
notice "$prop_prefix$utag_name='$utag_value'"
done
}
set_ro_vendor_incremental()
{
local vendor_incremental="ro.vendor.build.version.incremental"
local vendor_incremental_value
local fetch_prop="ro.build.version.incremental"
local fetch_value=$(getprop $fetch_prop)
[ -z "$fetch_value" ] && prefetch_from_file $fetch_prop vendor_incremental_value
setprop $vendor_incremental "$vendor_incremental_value"
notice "$vendor_incremental='$vendor_incremental_value'"
}
smart_value()
{
local mtag=$1
local __result=$2
local value
eval $__result=""
local tmp=${mtag#?}
# absolute path to the file starts with '/'
if [ "${mtag%$tmp}" == "/" ]; then
value=$(cat $mtag)
# property likely to have '.'
elif [ "$mtag" != "${mtag%.*}" ]; then
value=$(getprop $mtag)
else # tag otherwise
value=$(cat $hw_mp/$mtag/ascii)
fi
eval $__result='$value'
}
url_style_off()
{
local __arg=$1
local value=$2
if [[ $value == *%* ]]; then
value=$(echo $value | sed 's/%20/ /g')
value=$(echo $value | sed 's/%28/\(/g')
value=$(echo $value | sed 's/%29/\)/g')
fi
eval $__arg='$value'
}
match()
{
local mapping
local mline
local mtag
local fs_value
local mvalue
local matched
url_style_off mapping $1
debug "match mapping='$mapping'"
# put '\"' around $mapping to ensure XML
# parser takes it as a single argument
for mline in $(exec_parser \"$mapping\"); do
get_tag_data mtag mvalue $mline
url_style_off mvalue $mvalue
# obtain value based on data source: utag, property or file
smart_value $mtag fs_value
if [ "$fs_value" == "$mvalue" ]; then
matched="true";
else
matched="false";
fi
debug "cmp utag='$mtag' values '$mvalue' & '$fs_value' is \"$matched\""
[ "$matched" == "false" ] && break
done
[ "$matched" == "true" ] && return 0
return 1
}
find_match()
{
local __retval=$1
local tag_name
local fline
local line
local subsection
local matched="false"
eval $__retval=""
for fline in $(exec_parser); do
subsection=${fline%% *}
add_device_params $subsection
for line in $(exec_parser); do
get_attr_data_by_name tag_name "name" $line
debug "tag_name='$tag_name'"
match $tag_name
[ "$?" != "0" ] && continue
eval $__retval=$tag_name
matched="true"
break
done
drop_device_parameter
[ "$matched" == "true" ] && break
done
}
prefetch_from_file()
{
local pname=$1
local __result=$2
local value
local override
eval $__result=""
value=$(cat $bp_file 2>/dev/null | sed '/^$/d' | sed '/^#/d' | sed '/^import/d' | sed -n "/$pname=/p" | sed 's/.*=//')
debug "'$pname' from '$bp_file': '$value'"
if [ -f $oem_file ]; then
override=$(cat $oem_file 2>/dev/null | sed '/^$/d' | sed '/^#/d' | sed '/^import/d' | sed -n "/$pname=/p" | sed 's/.*=//')
[ "$override" ] && value=$override && debug "'$pname' from '$oem_file': '$value'"
fi
eval $__result=$value
}
append_match()
{
local prop_list=$1
local suffix="$2"
local dest_prop
local fetched_prop
local prop_value
local IFS=','
# properties list to put the result of appending hw suffix to
# example: appended="ro.vendor.product.name,ro.vendor.product.device"
for dest_prop in $prop_list; do
fetch_prop=${dest_prop}
# only alter property name that has "vendor" in it
if [ "${fetch_prop//.vendor}" != "$dest_prop" ]; then
fetch_prop=${fetch_prop//.vendor}
prop_value=$(getprop $fetch_prop)
[ -z "$prop_value" ] && prefetch_from_file $fetch_prop prop_value
# finally set destination property to appended value
setprop $dest_prop "$prop_value$suffix"
notice "$dest_prop='$prop_value$suffix'"
fi
done
}
process_mappings()
{
local pname=""
local pexport=""
local pdefault=""
local pappend=""
local putag=""
local subsection
local pline
local matched_val
local whitespace_val
local export_val
local utag_val
for pline in $(exec_parser); do
subsection=${pline%% *}
debug "subsection is '$subsection'"
get_attr_data_by_name pname "name" $pline
get_attr_data_by_name pexport "export" $pline
get_attr_data_by_name pdefault "default" $pline
get_attr_data_by_name pappend "append" $pline
get_attr_data_by_name putag "writeback" $pline
[ "$pname" ] && url_style_off pname $pname && debug "name='$pname'"
[ "$pexport" ] && url_style_off pexport $pexport && debug "export='$pexport'"
[ "$pdefault" ] && url_style_off pdefault $pdefault && debug "default='$pdefault'"
[ "$pappend" ] && url_style_off pappend $pappend && debug "append='$pappend'"
# add 'subsection' to permanent parameters
add_device_params $subsection
# call itself here to handle nonamed subsection, like quirks
[ -z "$pname" ] && [ -z "$pexport" ] && [ -z "$pdefault" ] && [ -z "$pappend" ] && [ -z "$putag" ] && process_mappings && continue
find_match matched_val
[ "$matched_val" ] && url_style_off matched_val $matched_val
# append_match handles OEM overrides, thus has to be called even with empty value
[ "$pappend" ] && append_match $pappend "$matched_val"
if [ "$matched_val" ]; then
if [ "$pexport" ]; then
setprop $pexport "$matched_val"
notice "exporting '$matched_val' into property $pexport"
fi
elif [ "$pexport" -a "$pdefault" ]; then
# if match is not found, proceed with default
setprop $pexport "$pdefault"
notice "defaulting '$pdefault' into property $pexport"
fi
if [ "$putag" ] && [ -d $hw_mp/$putag ]; then
export_val=$(getprop $pexport)
utag_val=$(cat $hw_mp/$putag/ascii)
debug "writeback compare $utag_val,$export_val"
# if property is empty value, clear the utag.
# if property and writeback utag are empty value, don't update utag
if [ "$export_val" -o "$utag_val" != "(null)" ] && [ "$utag_val" != "$export_val" ]; then
update_utag $putag $export_val
notice "writeback '$export_val' into utag $putag"
fi
fi
# remove the last added parameter
drop_device_parameter
done
}
# Main starts here
IFS=$'\n'
if [ ! -z "$mount_2nd_stage" ]; then
notice "Super image detection"
super_image_detection
return 0
fi
if [ ! -z "$reset_touch" ]; then
notice "reset reboot counter"
set_reboot_counter 0
return 0
fi
if [ ! -z "$dead_touch" ]; then
notice "property [$touch_status_prop] set to [dead]"
set_reboot_counter 1
return 0
fi
if [ -f /vendor/lib/modules/utags.ko ]; then
notice "loading utag driver"
insmod /vendor/lib/modules/utags.ko
fi
notice "checking integrity"
# check necessary components exist and just proceed
# with RO properties setup otherwise
if [ ! -f /$BASEDIR/bin/expat ] || [ ! -f $vhw_file ]; then
notice "warning: missing expat or xml"
set_ro_hw_properties
return 0
fi
if [ ! -z "$populate_only" ]; then
# special handling for factory UTAGs provisioning
for path in /data/local/tmp /pds/factory; do
[ -f $path/$hw_cfg_file ] && break
done
notice "populating hw config from '$path/$hw_cfg_file'"
set_xml_file $path/$hw_cfg_file
populate_utags hardware
return 0
fi
notice "initializing procfs"
procfs_wait_for_device readiness
if [ "$readiness" != "0" ]; then
notice "no access to hw utags procfs"
return 1
fi
# populate touch status property with reboot counter
set_reboot_counter_property &
# XML parsing starts here
set_xml_file $vhw_file
get_attr_data_by_name boot_device_prop "match" $(exec_parser)
debug "attr='get' value='$boot_device_prop'"
if [ -z $boot_device_prop ]; then
notice "fatal: undefined boot device property"
return 1
fi
# ensure lower case
typeset -l boot_device=$(getprop $boot_device_prop)
# drop suffixes
boot_device=${boot_device%[_-]*}
notice "matching to boot device '$boot_device'"
# add 'validation' to permanent parameters
add_device_params validation
for line in $(exec_parser); do
get_attr_data_by_name product "name" $line
debug "attr='name' value='$product'"
if [ "$product" == "$boot_device" ]; then
get_attr_data_by_name xml_version "version" $line
[ "$xml_version" != "unknown" ] && notice "device '$boot_device' xml version='$xml_version'"
break
fi
done
[ "$xml_version" == "unknown" ] && notice "no match found for device '$boot_device'"
# delete obsolete 'version' utag if exists
[ -d $hw_mp/${ver_utag#?} ] && $(echo ${ver_utag#?} > $hw_mp/all/delete)
# read procfs version
[ -d $hw_mp/$ver_utag ] && version_fs=$(cat $hw_mp/$ver_utag/ascii)
notice "procfs version='$version_fs'"
# add 'device' and '$boot_device' to permanent parameters
add_device_params device $boot_device
[ "$xml_version" == "$version_fs" ] && notice "hw descriptor is up to date"
for section in $(exec_parser); do
debug "section='$section'"
case $section in
mappings)
# add 'mappings' to permanent parameters
add_device_params $section
process_mappings &
;;
*)
[ "$xml_version" == "$version_fs" ] && continue
populate_utags $section;;
esac
done
if [ "$xml_version" != "$version_fs" ]; then
# create version utag if it's missing
[ ! -d $hw_mp/$ver_utag ] && $(echo "$ver_utag" > $hw_mp/all/new)
# update procfs version
[ -d $hw_mp/$ver_utag ] && $(echo "$xml_version" > $hw_mp/$ver_utag/ascii)
fi
set_ro_vendor_incremental &
set_ro_hw_properties
if [ "$utag_update_fail" == "true" ]; then
set_ro_hw_properties_upgrade
fi
wait
notice "script init.oem.hw.sh finish "
return 0
|
<reponame>boost-entropy-golang/buildbuddy<gh_stars>1-10
import React from "react";
import rpcService from "../../../app/service/rpc_service";
import { invocation } from "../../../proto/invocation_ts_proto";
import format from "../../../app/format/format";
import { Code } from "lucide-react";
interface State {
repoStats: invocation.IInvocationStat[];
loading: boolean;
}
const RECOMMENDED_REPOS = [
"buildbuddy-io/buildbuddy",
"bazelbuild/bazel",
"bazelbuild/bazel-gazelle",
"abseil/abseil-cpp",
"tensorflow/tensorflow",
"cockroachdb/cockroach",
];
export default class CodeEmptyStateComponent extends React.Component {
state: State = {
repoStats: [],
loading: false,
};
componentDidMount() {
const request = new invocation.GetInvocationStatRequest({
aggregationType: invocation.AggType.REPO_URL_AGGREGATION_TYPE,
});
rpcService.service
.getInvocationStat(request)
.then((response) => {
console.log(response);
this.setState({ repoStats: response.invocationStat.filter((stat) => stat.name) });
})
.finally(() => this.setState({ loading: false }));
return;
}
render() {
return (
<div className="">
<div className="code-menu">
<div className="code-menu-logo">
<a href="/">
<img alt="BuildBuddy Code" src="/image/logo_dark.svg" className="logo" /> Code{" "}
<Code className="icon code-logo" />
</a>
</div>
</div>
<div className="repo-previews">
{this.state.repoStats?.length > 0 && (
<>
<div className="repo-previews-title">Your Repos</div>
<div className="repo-previews-section">
{this.state.repoStats.map((repo) => this.renderRepo(format.formatGitUrl(repo.name)))}
</div>
</>
)}
<div className="repo-previews-title">Recommended Repos</div>
<div className="repo-previews-section">{RECOMMENDED_REPOS.map(this.renderRepo)}</div>
</div>
</div>
);
}
renderRepo(repo: string) {
return (
<div className="repo-preview">
<a href={`/code/${repo}`}>
<img
src={`https://opengraph.githubassets.com/678d0aac73c1525b882f63e6a2978a53b80d99d1788ddb16863183a38a66f98a/${repo}`}
/>
</a>
</div>
);
}
}
|
<filename>workflow/app/omnifocus.rb
require 'appscript'
class Omnifocus
def activate_if_not_running!
unless app.is_running?
app.activate
end
end
def projects(without_completed: true)
projects = doc.flattened_projects
if without_completed
projects = projects[whose.completed.eq(false)]
end
projects.name.get.map{ |name| name.force_encoding("UTF-8") }
end
def contexts
doc.flattened_contexts.name.get.map{ |name| name.force_encoding("UTF-8") }
end
def create_task(task_string)
app.parse_tasks_into(doc, with_transport_text: task_string)
end
private
def app
Appscript.app("OmniFocus")
end
def whose
Appscript.its
end
def doc
app.default_document
end
end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.