text stringlengths 1 1.05M |
|---|
#!usr/bash
# A popup window should appear which says:
# The "xcode-select" command requires the
# command line developer tools. Would you like
# to install the tools now?
# Click "Install"
xcode-select --install
# Install Homebrew
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
# Install necessary binaries
brew install tesseract
brew install imagemagick
brew install chromedriver
# Install RVM and get latest Ruby version
\curl -sSL https://get.rvm.io | bash -s stable --ruby
source /Users/$USER/.rvm/scripts/rvm
gem install rtesseract
gem install rmagick
gem install selenium-webdriver
|
#!/usr/bin/env bash
# Copyright 2017 Hossein Hadian
phone_dir=data/local/dict_nosp
dir=data/local/dict_char
mkdir -p $dir
[ -f path.sh ] && . ./path.sh
# Simply transcribe each word with its comprising characters:
# We keep only one pronunciation for each word. Other alternative pronunciations are discarded.
cat $phone_dir/lexicon1_raw_nosil.txt | \
perl -e 'while(<>){@A = split; if(! $seen{$A[0]}) {$seen{$A[0]} = 1; print $_;}}' \
> $phone_dir/lexicon2_raw_nosil.txt || exit 1;
cat $phone_dir/lexicon2_raw_nosil.txt | python -c 'import sys
for l in sys.stdin:
w = l.strip().split(" ")[0]
r = w
for c in w:
if c not in "!~@#$%^&*()+=/\",;:?_{}-":
r += " " + c
print(r)
' > $dir/lexicon2_raw_nosil.txt || exit 1;
(echo SIL; echo SPN; echo NSN) > $dir/silence_phones.txt
echo SIL > $dir/optional_silence.txt
(echo '!SIL SIL'; echo '<SPOKEN_NOISE> SPN'; \
echo '<UNK> SPN'; echo '<NOISE> NSN'; ) | \
cat - $dir/lexicon2_raw_nosil.txt | sort | uniq > $dir/lexicon.txt || exit 1;
# Get the set of non-silence phones
cut -d' ' -f2- $dir/lexicon2_raw_nosil.txt | tr ' ' '\n' | \
sort -u > $dir/nonsilence_phones.txt
echo "Character-based dictionary preparation succeeded."
|
#!/usr/bin/env bash
DATASET=$1
if [ "$DATASET" == "gym" ]; then
echo "We are processing $DATASET"
else
echo "Bad Argument, we only support gym now."
exit 0
fi
DATA_DIR="../../../data/posec3d/"
if [[ ! -d "${DATA_DIR}" ]]; then
echo "${DATA_DIR} does not exist. Creating";
mkdir -p ${DATA_DIR}
fi
wget https://download.openmmlab.com/mmaction/posec3d/${DATASET}_train.pkl
wget https://download.openmmlab.com/mmaction/posec3d/${DATASET}_val.pkl
mv ${DATASET}_train.pkl ${DATA_DIR}
mv ${DATASET}_val.pkl ${DATA_DIR}
|
package com.bv.eidss;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.ListFragment;
import android.support.v7.widget.PopupMenu;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ListAdapter;
import com.bv.eidss.model.ASDisease;
import com.bv.eidss.model.ASSession;
import com.bv.eidss.model.interfaces.Constants;
import com.bv.eidss.model.interfaces.IGet;
import com.bv.eidss.model.interfaces.IToChange;
public class ASDiseasesFragment extends ListFragment
implements IToChange {
private ASSessionActivity mActivity;
public ASDiseasesFragment() {
// Required empty public constructor
}
//IToChange
private boolean mToChange;
public boolean ToChange(){return mToChange;}
public void setToChange(boolean value){mToChange = value;}
//End IToChange
private boolean mReadonly;
public boolean Readonly(){return mReadonly;}
public void setReadonly(boolean value){mReadonly = value;}
public static ASDiseasesFragment newInstance() {
return new ASDiseasesFragment();
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setHasOptionsMenu(true);
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
if (context instanceof Activity){
mActivity = (ASSessionActivity) context;
}
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
setListAdapter(new ASDiseasesListAdapter(this, mCase()));
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
//Bind
if (mCase().getMonitoringSession() != 0 || mCase().getMonitoringSessionStatus() == Constants.AsSessionStatus_Closed) {
mReadonly = true;
}
return inflater.inflate(R.layout.list_m_choice_layout, null);
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
inflater.inflate(R.menu.add_remove_with_sync, menu);
}
/* Called whenever we call invalidateOptionsMenu() */
@Override
public void onPrepareOptionsMenu(Menu menu) {
final ASDiseasesListAdapter adapter = GetAdapter();
if (adapter != null)
adapter.updateMenuVisibility(menu);
super.onPrepareOptionsMenu(menu);
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent intent) {
if (requestCode == getResources().getInteger(R.integer.ACTIVITY_ID_ASDISEASE)) {
if (resultCode == Activity.RESULT_OK) {
int position = intent.getIntExtra("position", -1);
final ASDisease gotasdisease = intent.getParcelableExtra(getResources().getString(R.string.EXTRA_ID_ITEM));
if (position >= 0) {
final ASDiseasesListAdapter list = GetAdapter();
if (list != null) {
final ASDisease asdisease = list.getItem(position);
if (asdisease != null) {
asdisease.setDiagnosis(gotasdisease.getDiagnosis());
asdisease.setSpeciesType(gotasdisease.getSpeciesType());
asdisease.setSampleType(gotasdisease.getSampleType());
updateListItemAtPosition(position);
}
}
}
else {
mCase().asDiseases.add(gotasdisease);
((ASDiseasesListAdapter) getListView().getAdapter()).notifyDataSetChanged();
}
}
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
mActivity.Home();
return true;
case R.id.CreateNew:
final ASDisease asdisease = ASDisease.CreateNew(mCase().getMonitoringSession(), mCase().getId());
final Intent intent = new Intent(getActivity(), ASDiseaseActivity.class);
intent.putExtra(getResources().getString(R.string.EXTRA_ID_ITEM), asdisease);
intent.putExtra(getResources().getString(R.string.EXTRA_ID_ASSESSION), mCase());
startActivityForResult(intent, getResources().getInteger(R.integer.ACTIVITY_ID_ASDISEASE));
return true;
case R.id.Save:
mActivity.Save();
return true;
case R.id.Remove:
final int sel = ((ASDiseasesListAdapter) getListView().getAdapter()).getCheckItemsCount();
if(sel == 0) {
EidssAndroidHelpers.AlertOkDialog.Show(mActivity.getSupportFragmentManager(), R.string.NothingToDelete);
return true;
}
if (!CheckCanDeleteASDiseases()) {
EidssAndroidHelpers.AlertOkDialog.Show(mActivity.getSupportFragmentManager(), R.string.ErrDiseaseCantBeDeleted);
return true;
}
DeleteASDiseases();
return true;
case R.id.Refresh:
final View menuItemView = mActivity.findViewById(R.id.Refresh);
PopupMenu popupMenu = new PopupMenu(mActivity, menuItemView);//, R.style.PopupMenu
popupMenu.inflate(R.menu.synchronize_session_one_menu);
popupMenu.setOnMenuItemClickListener(new PopupMenu.OnMenuItemClickListener() {
@Override
public boolean onMenuItemClick(MenuItem item) {
onSyncMenuItemClick(item);
return true;
}
});
popupMenu.show();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
public boolean onSyncMenuItemClick(MenuItem item) {
switch (item.getItemId()) {
case R.id.IDM_ONLINE:
mActivity.OnLine();
break;
case R.id.IDM_OFFLINE:
mActivity.OffLine();
break;
default:
return super.onContextItemSelected(item);
}
return true;
}
protected ASDiseasesListAdapter GetAdapter() {
if(getListView() !=null) {
ListAdapter adapter = getListView().getAdapter();
if (adapter != null && adapter instanceof ASDiseasesListAdapter) {
return (ASDiseasesListAdapter) adapter;
}
}
return null;
}
public void updateListItemAtPosition(int position) {
int visiblePosition = getListView().getFirstVisiblePosition();
View view = getListView().getChildAt(position - visiblePosition);
getListView().getAdapter().getView(position, view, getListView());
}
private void DeleteASDiseases()
{
((ASDiseasesListAdapter)getListView().getAdapter()).DeleteASDiseases();
}
private boolean CheckCanDeleteASDiseases()
{
return ((ASDiseasesListAdapter)getListView().getAdapter()).CheckCanDeleteASDiseases();
}
private ASSession mCase() {
return (ASSession)((IGet)mActivity).get();
}
}
|
"""
Example program to plot refractive index against wavelength is a default plot
"""
from poptics.wavelength import MaterialIndex
import matplotlib.pyplot as plt
def main():
# Get a material index, the defaut is to prompt for key
index = MaterialIndex()
index.draw()
plt.title("Material : " + str(index))
plt.show()
if __name__ == "__main__" :
main() |
#!/usr/bin/env bash
###############################################################################
# Copyright 2020 The Apollo Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
# Fail on first error.
set -e
CURR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd -P)"
. ${CURR_DIR}/installer_base.sh
# References:
# 1) http://www.linuxfromscratch.org/blfs/view/svn/x/qt5.html
# 2) https://src.fedoraproject.org/rpms/qt5-qtbase/tree/master
# 3) https://launchpad.net/ubuntu/+source/qtbase-opensource-src/5.12.8+dfsg-0ubuntu1
apt_get_update_and_install \
libdbus-1-dev \
libfontconfig1-dev \
libfreetype6-dev \
libgl1-mesa-dev \
libharfbuzz-dev \
libjpeg-dev \
libpcre3-dev \
libpng-dev \
libsqlite3-dev \
libssl-dev \
libvulkan-dev \
libxcb1-dev \
libexpat1-dev \
zlib1g-dev \
libxcb-image0-dev \
libxcb-keysyms1-dev \
libxcb-render-util0-dev \
libxcb-shm0-dev \
libxcb-util1 \
libxcb-xinerama0-dev \
libxcb-xkb-dev \
libxkbcommon-dev \
libxkbcommon-x11-dev
VERSION="5.12.9"
MAJOR_VERSION="${VERSION%.*}"
PKG_NAME="qtbase-everywhere-src-${VERSION}.tar.xz"
CHECKSUM="331dafdd0f3e8623b51bd0da2266e7e7c53aa8e9dc28a8eb6f0b22609c5d337e"
DOWNLOAD_LINK="https://download.qt.io/official_releases/qt/${MAJOR_VERSION}/${VERSION}/submodules/${PKG_NAME}"
download_if_not_cached "${PKG_NAME}" "${CHECKSUM}" "${DOWNLOAD_LINK}"
tar xJf ${PKG_NAME}
QT5_PREFIX="/usr/local/qt5"
mkdir -p "/usr/local/Qt-${VERSION}"
ln -sfnv "Qt-${VERSION}" "${QT5_PREFIX}"
pushd qtbase-everywhere-src-${VERSION} >/dev/null
find . -name "*.pr[io]" | xargs sed -i 's/python/&3/'
pushd src/3rdparty
[ -d UNUSED ] || mkdir UNUSED
mv freetype libjpeg libpng zlib sqlite UNUSED/ || true
popd
./configure \
-verbose \
-prefix $QT5_PREFIX \
-sysconfdir /etc/xdg \
-platform linux-g++ \
-release \
-optimized-qmake \
-shared \
-strip \
-confirm-license \
-opensource \
-fontconfig \
-dbus-linked \
-openssl-linked \
-system-harfbuzz \
-system-freetype \
-system-sqlite \
-system-libjpeg \
-system-libpng \
-system-zlib \
-nomake examples \
-no-pch \
-no-rpath \
-skip qtwebengine
make -j$(nproc)
make install
# PostInstall
find $QT5_PREFIX/ -name \*.prl \
-exec sed -i -e '/^QMAKE_PRL_BUILD_DIR/d' {} \;
find ${QT5_PREFIX}/lib -name "*.la" \
-exec rm -f {} \;
popd >/dev/null
echo "${QT5_PREFIX}/lib" > /etc/ld.so.conf.d/qt.conf
ldconfig
__mytext="""
export QT5_PATH=\"${QT5_PREFIX}\"
export QT_QPA_PLATFORM_PLUGIN_PATH=\"\${QT5_PATH}/plugins\"
add_to_path \"\${QT5_PATH}/bin\"
"""
echo "${__mytext}" | tee -a "${APOLLO_PROFILE}"
ok "Successfully installed Qt5 qtbase-${VERSION} from src for $(uname -m) ."
rm -rf qtbase-everywhere-src-${VERSION} ${PKG_NAME}
# Clean up cache to reduce layer size.
apt-get clean && \
rm -rf /var/lib/apt/lists/*
|
#include <iostream>
#include <fstream>
#include <yaml_library> // Assume the existence of a YAML library for parsing and serialization
// Define the AtomicOperation class to represent a single atomic operation
class AtomicOperation {
// Define member variables and methods as per the requirements
};
// Define the Implementation class to handle file I/O operations for atomic operations
class Implementation {
public:
Implementation(const std::string& file_path) {
// Initialize the Implementation with the provided file path
}
void write_operation(const AtomicOperation& operation) {
// Write the atomic operation to the YAML file using the YAML library
}
std::optional<AtomicOperation> read_next_record() {
// Read the next atomic operation from the YAML file using the YAML library
// Return the read operation as an optional
}
};
// Complete the YamlLogger class using the provided code snippet and the defined classes
class YamlLogger {
private:
std::unique_ptr<Implementation> _pimpl;
public:
YamlLogger(const std::string& file_path) : _pimpl(std::make_unique<Implementation>(file_path)) {
// Do nothing in the constructor body
}
void write_operation(const AtomicOperation& operation) {
_pimpl->write_operation(operation);
}
std::optional<AtomicOperation> read_next_record() {
return _pimpl->read_next_record();
}
};
int main() {
// Usage example of the YamlLogger class
YamlLogger logger("log_file.yaml");
// Create and write an atomic operation to the YAML file
AtomicOperation operation;
logger.write_operation(operation);
// Read the next atomic operation from the YAML file
std::optional<AtomicOperation> next_operation = logger.read_next_record();
return 0;
} |
public class RestrictedZoneAlert extends AppCompatActivity implements LocationListener {
LocationManager locationManager;
boolean isWithinZone = false;
double latLowerBound=27.761045;
double latUpperBound=27.763889;
double longLowerBound=-82.638719;
double longUpperBound=-82.630029;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_restricted_zone_alert);
locationManager = (LocationManager) getSystemService(LOCATION_SERVICE);
if (locationManager.isProviderEnabled(LocationManager.GPS_PROVIDER)) {
locationManager.requestLocationUpdates(LocationManager.GPS_PROVIDER, 0, 0, this);
}
}
@Override
public void onLocationChanged(Location location) {
double latitude = location.getLatitude();
double longitude = location.getLongitude();
if (latitude >=latLowerBound && latitude <=latUpperBound ) {
if (longitude >= longLowerBound && longitude <= longUpperBound) {
if(!isWithinZone) {
isWithinZone = true;
Toast.makeText(this, "You have entered a restricted zone", Toast.LENGTH_LONG).show();
}
}
else {
isWithinZone = false;
}
}
}
// other Methods
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
}
@Override
public void onProviderEnabled(String provider) {
}
@Override
public void onProviderDisabled(String provider) {
Toast.makeText(RestrictedZoneAlert.this, "Please Enable GPS and Internet", Toast.LENGTH_SHORT).show();
}
} |
use std::io::{BufRead, Error, ErrorKind};
use csv::Reader;
use serde::Deserialize;
#[derive(Debug, Deserialize)]
struct Job {
ID: String,
LAT: f64,
LNG: f64,
DEMAND: u32,
DURATION: u32,
TW_START: Option<String>,
TW_END: Option<String>,
}
fn read_csv_problem(jobs_reader: impl BufRead, constraints_reader: impl BufRead) -> Result<Vec<Job>, Error> {
let mut jobs_csv_reader = Reader::from_reader(jobs_reader);
let mut constraints_csv_reader = Reader::from_reader(constraints_reader);
let mut jobs: Vec<Job> = Vec::new();
for result in jobs_csv_reader.deserialize() {
let record: Job = result?;
jobs.push(record);
}
// Additional validation logic can be added here based on constraints_reader
Ok(jobs)
}
#[test]
fn can_propagate_format_error() {
let invalid_jobs = r"
ID,LAT,LNG,DEMAND,DURATION,TW_START,TW_END
job2,52.5225,13.4095,1,3,,
job2,52.5165,13.3808,3,,
";
let result = read_csv_problem(invalid_jobs.as_bytes(), "".as_bytes())
.err()
.expect("Should return error!");
assert_eq!(result.kind(), ErrorKind::InvalidData);
} |
/*
* Copyright 2022 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.moduliths.moments.autoconfigure;
import java.time.Clock;
import org.moduliths.moments.support.Moments;
import org.moduliths.moments.support.MomentsProperties;
import org.moduliths.moments.support.TimeMachine;
import org.springframework.beans.factory.ObjectProvider;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.ApplicationEventPublisher;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.EnableScheduling;
/**
* Auto-configuration for {@link Moments}.
*
* @author <NAME>
* @since 1.3
*/
@EnableScheduling
@EnableConfigurationProperties(MomentsProperties.class)
@ConditionalOnProperty(name = "moduliths.moments.enabled", havingValue = "true", matchIfMissing = true)
@Configuration(proxyBeanMethods = false)
class MomentsAutoConfiguration {
@Bean
@ConditionalOnProperty(name = "moduliths.moments.enable-time-machine", havingValue = "false", matchIfMissing = true)
Moments moments(ObjectProvider<Clock> clockProvider, ApplicationEventPublisher events, MomentsProperties properties) {
Clock clock = clockProvider.getIfAvailable(() -> Clock.systemUTC());
return new Moments(clock, events, properties);
}
@Bean
@ConditionalOnProperty(name = "moduliths.moments.enable-time-machine", havingValue = "true", matchIfMissing = false)
TimeMachine timeMachine(ObjectProvider<Clock> clockProvider, ApplicationEventPublisher events,
MomentsProperties properties) {
Clock clock = clockProvider.getIfAvailable(() -> Clock.systemUTC());
return new TimeMachine(clock, events, properties);
}
}
|
#!/bin/sh
echo 1 > ~/.fcitx-enable
echo "Please relogin" >&2
|
#!/usr/bin/env -S bash -euET -o pipefail -O inherit_errexit
SCRIPT=$(readlink -f "$0") && cd $(dirname "$SCRIPT")
# --- Script Init ---
mkdir -p log
rm -R -f log/*
# --- Setup run dirs ---
find output -type f -not -name '*summary-info*' -not -name '*.json' -exec rm -R -f {} +
rm -R -f /tmp/%FIFO_DIR%/fifo/*
rm -R -f work/*
mkdir work/kat/
mkdir work/gul_S1_summaryaalcalc
mkfifo /tmp/%FIFO_DIR%/fifo/gul_P19
mkfifo /tmp/%FIFO_DIR%/fifo/gul_S1_summary_P19
# --- Do ground up loss computes ---
tee < /tmp/%FIFO_DIR%/fifo/gul_S1_summary_P19 work/gul_S1_summaryaalcalc/P19.bin > /dev/null & pid1=$!
summarycalc -m -i -1 /tmp/%FIFO_DIR%/fifo/gul_S1_summary_P19 < /tmp/%FIFO_DIR%/fifo/gul_P19 &
eve 19 20 | getmodel | gulcalc -S100 -L100 -r -a1 -i - > /tmp/%FIFO_DIR%/fifo/gul_P19 &
wait $pid1
# --- Do ground up loss kats ---
|
ALTER PROCEDURE usp_RemoveUserOof
@channelId NVARCHAR(255),
@userId NVARCHAR (255)
AS
SET NOCOUNT ON;
BEGIN TRANSACTION
DECLARE @userFound INT;
DECLARE @removeUser INT;
DECLARE @otcuInternalTable TABLE (Internal_id INT);
DECLARE @cobuoofInternalTable TABLE (Internal_id INT);
SET @userFound = 0;
-- Verify if the users exist on the db and in the right channel.
INSERT INTO @otcuInternalTable
SELECT otcu.Internal_id
FROM ChannelsOtterBrassUser otcu
INNER JOIN OtterBrassUser otus
ON otus.Internal_id = otcu.User_internal_id
INNER JOIN Channels otch
ON otch.Internal_id = otcu.Channel_internal_id
WHERE otus.Id = @userId
AND otch.Id = @channelId
SELECT @userFound = COUNT(Internal_id)
FROM @otcuInternalTable
-- User should be registered before continuing
IF @userFound < 1
BEGIN
ROLLBACK
RETURN;
END
-- Verify if the users exist on the oof table
INSERT INTO @cobuoofInternalTable
SELECT otcuInternal.Internal_id
FROM @otcuInternalTable otcuInternal
INNER JOIN ChannelsOtterBrassUserOOF cobuoof
ON cobuoof.ChannelsOtterBrassUser_internal_id = otcuInternal.Internal_id
SELECT @removeUser = COUNT(Internal_id)
FROM @cobuoofInternalTable
-- If user is not found in the OOF Table it means he has never used the feature
IF @removeUser > 0
BEGIN
SELECT TOP 1 @removeUser = Internal_id
FROM @cobuoofInternalTable
DELETE FROM [ChannelsOtterBrassUserOOF]
WHERE ChannelsOtterBrassUser_internal_id = @removeUser
END
IF @@ERROR <> 0
BEGIN
-- Rollback the transaction
ROLLBACK
-- Raise an error and return
RAISERROR ('Error while updating Random Table status.', 16, 1)
RETURN
END
COMMIT
GO |
#!/bin/bash
# this scripts should be run under the root folder of kunpengsecl project
#set -eux
PROJROOT=.
# run number of rac clients to test
NUM=1
# include common part
. ${PROJROOT}/attestation/test/integration/common.sh
# above are common preparation steps, below are specific preparation step, scope includs:
# configure files, input files, environment variables, cmdline paramenters, flow control paramenters, etc.
### Start Preparation
echo "start test preparation..." | tee -a ${DST}/control.txt
pushd $(pwd)
cd ${PROJROOT}/attestation/quick-scripts
echo "clean database" | tee -a ${DST}/control.txt
sh clear-database.sh | tee -a ${DST}/control.txt
popd
sed -i --follow-symlinks "s/trustduration: 2m0s/trustduration: 20s/g" ${DST}/ras/config.yaml
### End Preparation
### start launching binaries for testing
echo "start ras..." | tee -a ${DST}/control.txt
( cd ${DST}/ras ; ./ras -T &>${DST}/ras/echo.txt ; ./ras &>>${DST}/ras/echo.txt ;)&
echo "sleep 5s" | tee -a ${DST}/control.txt
sleep 5
# start number of rac
echo "start ${NUM} rac clients..." | tee -a ${DST}/control.txt
(( count=0 ))
for (( i=1; i<=${NUM}; i++ ))
do
( cd ${DST}/rac-${i} ; ${DST}/rac/raagent -t &>>${DST}/rac-${i}/echo.txt ; )&
(( count++ ))
if (( count >= 1 ))
then
(( count=0 ))
echo "start ${i} rac clients at $(date)..." | tee -a ${DST}/control.txt
fi
done
### start monitoring and control the testing
echo "start to perform test ..." | tee -a ${DST}/control.txt
echo "wait for 5s" | tee -a ${DST}/control.txt
sleep 5
# stop rac
echo "kill all raagent processes..." | tee -a ${DST}/control.txt
pkill -u ${USER} raagent
# modify ima file
NEWLINE1="10 5a2842c1767f26defc2e96a01e46062524333501 ima 732458574c63c3790cad093a36eadfb990d11ee6 /var/lib/docker/containers/9b954212d796863e9f2c04372d4ab7e39fe0b62870c82a9e83c3ec326e5fb9b9/hosts"
NEWLINE2="10 8c4c4953a4cc0b1a73ee54fcc72540a6834e8f49 ima 186d209e1d331e9160d6d0b03c523d862ae9d2a4 /var/lib/docker/containers/9b954212d796863e9f2c04372d4ab7e39fe0b62870c82a9e83c3ec326e5fb9b9/resolv.conf"
echo "${NEWLINE1}" >> ${RACDIR}/${IMAFILE}
echo "${NEWLINE2}" >> ${RACDIR}/${IMAFILE}
# restart number of rac
echo "start ${NUM} rac clients..." | tee -a ${DST}/control.txt
(( count=0 ))
for (( i=1; i<=${NUM}; i++ ))
do
( cd ${DST}/rac-${i} ; ${DST}/rac/raagent -t &>>${DST}/rac-${i}/echo.txt ; )&
(( count++ ))
if (( count >= 1 ))
then
(( count=0 ))
echo "start ${i} rac clients at $(date)..." | tee -a ${DST}/control.txt
fi
done
# register container
AUTHTOKEN=$(grep "Bearer " ${DST}/ras/echo.txt)
echo "register container ing..." | tee -a ${DST}/control.txt
curl -X POST -H "Authorization: $AUTHTOKEN" -H "Content-Type: application/json" http://localhost:40002/container/9b954212d796863e9f2c04372d4ab7e39fe0b62870c82a9e83c3ec326e5fb9b9 --data '{"registered":true,"serverid":1,"uuid":"9b954212d796863e9f2c04372d4ab7e39fe0b62870c82a9e83c3ec326e5fb9b9"}'
# post basevalue
echo "post basevalue ing..." | tee -a ${DST}/control.txt
curl -X PUT -H "Authorization: $AUTHTOKEN" -H "Content-Type: application/json" http://localhost:40002/container/basevalue/9b954212d796863e9f2c04372d4ab7e39fe0b62870c82a9e83c3ec326e5fb9b9 --data '{"measurements":[{"name":"/var/lib/docker/containers/9b954212d796863e9f2c04372d4ab7e39fe0b62870c82a9e83c3ec326e5fb9b9/hosts","type":"ima","value":"732458574c63c3790cad093a36eadfb990d11ee6"},{"name":"/var/lib/docker/containers/9b954212d796863e9f2c04372d4ab7e39fe0b62870c82a9e83c3ec326e5fb9b9/resolv.conf","type":"ima","value":"186d209e1d331e9160d6d0b03c523d862ae9d2a4"}]}'
echo "wait for 15s" | tee -a ${DST}/control.txt
sleep 15
# get-response
echo "get-response ing..." | tee -a ${DST}/control.txt
RESPONSE1=$(curl http://localhost:40002/container/status 2>/dev/null)
echo ${RESPONSE1} | tee -a ${DST}/control.txt
# stop rac
echo "kill all raagent processes..." | tee -a ${DST}/control.txt
pkill -u ${USER} raagent
# modify ima file
OLDLINE="10 5a2842c1767f26defc2e96a01e46062524333501 ima 732458574c63c3790cad093a36eadfb990d11ee6 \/var\/lib\/docker\/containers\/9b954212d796863e9f2c04372d4ab7e39fe0b62870c82a9e83c3ec326e5fb9b9\/hosts"
NEWLINE="10 5a4442c1767f26defc2e96a01e46062524333501 ima 712456674c63c3790cad093a36eadfb990d11ee6 \/var\/lib\/docker\/containers\/9b954212d796863e9f2c04372d4ab7e39fe0b62870c82a9e83c3ec326e5fb9b9\/hosts"
sed -i --follow-symlinks "s/${OLDLINE}/${NEWLINE}/g" ${RACDIR}/${IMAFILE}
# restart number of rac
echo "start ${NUM} rac clients..." | tee -a ${DST}/control.txt
(( count=0 ))
for (( i=1; i<=${NUM}; i++ ))
do
( cd ${DST}/rac-${i} ; ${DST}/rac/raagent -t &>>${DST}/rac-${i}/echo.txt ; )&
(( count++ ))
if (( count >= 1 ))
then
(( count=0 ))
echo "start ${i} rac clients at $(date)..." | tee -a ${DST}/control.txt
fi
done
echo "wait for 15s" | tee -a ${DST}/control.txt
sleep 15
# get-response
RESPONSE2=$(curl http://localhost:40002/container/status 2>/dev/null)
echo ${RESPONSE2} | tee -a ${DST}/control.txt
### stop testing
echo "kill all test processes..." | tee -a ${DST}/control.txt
pkill -u ${USER} ras
pkill -u ${USER} raagent
echo "test DONE!!!" | tee -a ${DST}/control.txt
### analyse the testing data
CLIENTID1=$(echo ${RESPONSE1} | jq -r '.' | awk '/ClientID/ {gsub(",","",$2);print $2}')
STATUS1=$(echo ${RESPONSE1} | jq -r '.' | awk '/Status/ {gsub(",","",$2);gsub("\"","",$2);print $2}')
CLIENTID2=$(echo ${RESPONSE2} | jq -r '.' | awk '/ClientID/ {gsub(",","",$2);print $2}')
STATUS2=$(echo ${RESPONSE2} | jq -r '.' | awk '/Status/ {gsub(",","",$2);gsub("\"","",$2);print $2}')
### generate the test report
echo "First time: ClientID:${CLIENTID1}, Status:${STATUS1}" | tee -a ${DST}/control.txt
echo "Second time: ClientID:${CLIENTID2}, Status:${STATUS2}" | tee -a ${DST}/control.txt
if [[ ${STATUS1} == "trusted" && ${STATUS2} == "untrusted" ]]
then
echo "test succeeded!" | tee -a ${DST}/control.txt
exit 0
else
echo "test failed!" | tee -a ${DST}/control.txt
exit 1
fi |
def most_common_word(sentence):
freq_dict = {}
words = sentence.split()
for word in words:
if word in freq_dict:
freq_dict[word] += 1
else:
freq_dict[word] = 1
most_common = max(freq_dict,key=freq_dict.get)
return (most_common, freq_dict[most_common])
sentence = 'The quick brown fox jumps over the lazy dog'
print(most_common_word(sentence)) # Output = ('the', 2) |
SELECT *
FROM employees
WHERE
salary > 20000 AND
bonus > ((salary * 5) / 100); |
<filename>FairyPlugin/src/main/java/com/limpoxe/fairy/core/android/HackWindow.java
package com.limpoxe.fairy.core.android;
import android.content.Context;
import android.view.LayoutInflater;
import com.limpoxe.fairy.util.RefInvoker;
/**
* Created by cailiming on 16/10/30.
*/
public class HackWindow {
private static final String ClassName = "android.view.Window";
private static final String Field_mContext = "mContext";
private static final String Field_mWindowStyle = "mWindowStyle";
private static final String Field_mLayoutInflater = "mLayoutInflater";
private Object instance;
public HackWindow(Object instance) {
this.instance = instance;
}
public void setContext(Context context) {
RefInvoker.setField(instance, ClassName, Field_mContext, context);
}
public void setWindowStyle(Object style) {
RefInvoker.setField(instance, ClassName, Field_mWindowStyle, style);
}
public void setLayoutInflater(String className, LayoutInflater layoutInflater) {
RefInvoker.setField(instance, className, Field_mLayoutInflater, layoutInflater);
}
}
|
#@IgnoreInspection BashAddShebang
# Copyright (c) YugaByte, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and limitations
# under the License.
#
# This is common between build and test scripts.
set -euo pipefail
if [[ $BASH_SOURCE == $0 ]]; then
echo "$BASH_SOURCE must be sourced, not executed" >&2
exit 1
fi
# Guard against multiple inclusions.
if [[ -n ${YB_COMMON_BUILD_ENV_SOURCED:-} ]]; then
# Return to the executing script.
return
fi
YB_COMMON_BUILD_ENV_SOURCED=1
# -------------------------------------------------------------------------------------------------
# Load yugabyte-bash-common
# -------------------------------------------------------------------------------------------------
set_yb_src_root() {
export YB_SRC_ROOT=$1
YB_BUILD_SUPPORT_DIR=$YB_SRC_ROOT/build-support
if [[ ! -d $YB_SRC_ROOT ]]; then
fatal "YB_SRC_ROOT directory '$YB_SRC_ROOT' does not exist"
fi
YB_COMPILER_WRAPPER_CC=$YB_BUILD_SUPPORT_DIR/compiler-wrappers/cc
YB_COMPILER_WRAPPER_CXX=$YB_BUILD_SUPPORT_DIR/compiler-wrappers/c++
yb_java_project_dirs=( "$YB_SRC_ROOT/java" "$YB_SRC_ROOT/ent/java" )
}
# This script is expected to be in build-support, a subdirectory of the repository root directory.
set_yb_src_root "$( cd "$( dirname "$BASH_SOURCE" )"/.. && pwd )"
if [[ $YB_SRC_ROOT == */ ]]; then
fatal "YB_SRC_ROOT ends with '/' (not allowed): '$YB_SRC_ROOT'"
fi
YB_BASH_COMMON_DIR=$YB_SRC_ROOT/submodules/yugabyte-bash-common
if [[ ! -d $YB_BASH_COMMON_DIR || -z "$( ls -A "$YB_BASH_COMMON_DIR" )" ]]; then
( cd "$YB_SRC_ROOT"; git submodule update --init --recursive )
fi
. "$YB_SRC_ROOT/submodules/yugabyte-bash-common/src/yugabyte-bash-common.sh"
# -------------------------------------------------------------------------------------------------
# Constants
# -------------------------------------------------------------------------------------------------
declare -i MAX_JAVA_BUILD_ATTEMPTS=5
# Reuse the C errno value for this.
declare -r -i YB_EXIT_CODE_NO_SUCH_FILE_OR_DIRECTORY=2
# What matches these expressions will be filtered out of Maven output.
MVN_OUTPUT_FILTER_REGEX='\[INFO\] (Download(ing|ed): '
MVN_OUTPUT_FILTER_REGEX+='|[^ ]+ already added, skipping$)'
MVN_OUTPUT_FILTER_REGEX+='|^Generating .*[.]html[.][.][.]$'
MVN_OUTPUT_FILTER_REGEX+='|^\[INFO\] Copying .*[.]jar to .*[.]jar$'
readonly YB_JENKINS_NFS_HOME_DIR=/n/jenkins
# In our NFS environment, we keep Linuxbrew builds in this directory.
readonly SHARED_LINUXBREW_BUILDS_DIR="$YB_JENKINS_NFS_HOME_DIR/linuxbrew"
readonly SHARED_CUSTOM_HOMEBREW_BUILDS_DIR="$YB_JENKINS_NFS_HOME_DIR/homebrew"
# We look for the list of distributed build worker nodes in this file. This gets populated by
# a cronjob on buildmaster running under the jenkins user (as of 06/20/2017).
YB_BUILD_WORKERS_FILE=${YB_BUILD_WORKERS_FILE:-$YB_JENKINS_NFS_HOME_DIR/run/build-workers}
# The assumed number of cores per build worker. This is used in the default make parallelism level
# calculation in yb_build.sh. This does not have to be the exact number of cores per worker, but
# will affect whether or not we force the auto-scaling group of workers to expand.
readonly YB_NUM_CORES_PER_BUILD_WORKER=8
# The "number of build workers" that we'll end up using to compute the parallelism (by multiplying
# it by YB_NUM_CORES_PER_BUILD_WORKER) will be first brought into this range.
readonly MIN_EFFECTIVE_NUM_BUILD_WORKERS=5
readonly MAX_EFFECTIVE_NUM_BUILD_WORKERS=10
readonly MVN_OUTPUT_FILTER_REGEX
# An even faster alternative to downloading a pre-built third-party dependency tarball from S3
# or Google Storage: just use a pre-existing third-party build from NFS. This has to be maintained
# outside of main (non-thirdparty) YB codebase's build pipeline.
readonly NFS_PARENT_DIR_FOR_SHARED_THIRDPARTY="$YB_JENKINS_NFS_HOME_DIR/thirdparty"
# We create a Python Virtual Environment inside this directory in the build directory.
readonly YB_VIRTUALENV_BASENAME=python_virtual_env
readonly YB_LINUXBREW_LOCAL_ROOT=$HOME/.linuxbrew-yb-build
readonly YB_SHARED_MVN_LOCAL_REPO="$YB_JENKINS_NFS_HOME_DIR/m2_repository"
readonly YB_NON_SHARED_MVN_LOCAL_REPO=$HOME/.m2/repository
readonly YB_SHARED_MVN_SETTINGS="$YB_JENKINS_NFS_HOME_DIR/m2_settings.xml"
if [[ -z ${is_run_test_script:-} ]]; then
is_run_test_script=false
fi
readonly is_run_test_script
# Setting this to "true" will prevent any changes to the virtualenv (creating it or installing
# modules into it) as part of activate_virtualenv.
yb_readonly_virtualenv=false
# How long we'll wait while a concurrent process downloads and extracts a third-party archive.
declare -i -r YB_DEP_DOWNLOAD_LOCK_WAIT_SEC=120
YB_NFS_DOWNLOAD_CACHE_DIR=${YB_NFS_DOWNLOAD_CACHE_DIR:-$YB_JENKINS_NFS_HOME_DIR/download_cache}
readonly VALID_BUILD_TYPES=(
asan
debug
fastdebug
idebug
irelease
ifastdebug
profile_build
profile_gen
release
tsan
tsan_slow
compilecmds
)
# Valid values of CMAKE_BUILD_TYPE passed to the top-level CMake build. This is the same as the
# above with the exclusion of ASAN/TSAN.
readonly VALID_CMAKE_BUILD_TYPES=(
debug
fastdebug
profile_build
profile_gen
release
)
readonly VALID_COMPILER_TYPES=( gcc clang zapcc gcc8 )
readonly VALID_LINKING_TYPES=( static dynamic )
make_regexes_from_lists \
VALID_BUILD_TYPES \
VALID_CMAKE_BUILD_TYPES \
VALID_COMPILER_TYPES \
VALID_LINKING_TYPES
readonly BUILD_ROOT_BASENAME_RE=\
"^($VALID_BUILD_TYPES_RAW_RE)-\
($VALID_COMPILER_TYPES_RAW_RE)-\
($VALID_LINKING_TYPES_RAW_RE)\
(-ninja)?\
(-clion)?$"
# We first use this to find ephemeral drives.
readonly EPHEMERAL_DRIVES_GLOB="/mnt/ephemeral* /mnt/d*"
# We then filter the drives found using this.
# The way we use this regex we expect it NOT to be anchored in the end.
readonly EPHEMERAL_DRIVES_FILTER_REGEX="^/mnt/(ephemeral|d)[0-9]+" # No "$" in the end.
declare -i -r DIRECTORY_EXISTENCE_WAIT_TIMEOUT_SEC=100
declare -i -r YB_DOWNLOAD_LOCK_TIMEOUT_SEC=120
readonly YB_DOWNLOAD_LOCKS_DIR=/tmp/yb_download_locks
readonly YB_NFS_PATH_RE="^/(n|z|u|net|Volumes/net)/"
# -------------------------------------------------------------------------------------------------
# Functions
# -------------------------------------------------------------------------------------------------
is_thirdparty_build() {
[[ ${YB_IS_THIRDPARTY_BUILD:-0} == "1" ]]
}
normalize_build_type() {
if [[ -z ${build_type:-} ]]; then
if [[ -n ${BUILD_TYPE:-} ]]; then
build_type=$BUILD_TYPE
else
fatal "Neither build_type or BUILD_TYPE are set"
fi
fi
validate_build_type "$build_type"
local lowercase_build_type=$( echo "$build_type" | to_lowercase )
if [[ "$build_type" != "$lowercase_build_type" ]]; then
# Only assign if we actually need to, because the build_type variable may already be read-only.
build_type=$lowercase_build_type
fi
}
# Sets the build directory based on the given build type (the build_type variable) and the value of
# the YB_COMPILER_TYPE environment variable.
set_build_root() {
set_use_ninja
if [[ ${1:-} == "--no-readonly" ]]; then
local -r make_build_root_readonly=false
shift
else
local -r make_build_root_readonly=true
fi
expect_num_args 0 "$@"
normalize_build_type
readonly build_type
if [[ -z ${YB_COMPILER_TYPE:-} ]]; then
fatal "YB_COMPILER_TYPE is not set"
fi
validate_compiler_type "$YB_COMPILER_TYPE"
determine_linking_type
BUILD_ROOT=$YB_BUILD_PARENT_DIR/$build_type-$YB_COMPILER_TYPE-$YB_LINK
if using_ninja; then
BUILD_ROOT+="-ninja"
fi
normalize_build_root
if "$make_build_root_readonly"; then
readonly BUILD_ROOT
fi
if [[ -n ${predefined_build_root:-} && $predefined_build_root != $BUILD_ROOT ]] &&
! "$YB_BUILD_SUPPORT_DIR/is_same_path.py" "$predefined_build_root" "$BUILD_ROOT"; then
fatal "An inconsistency between predefined BUILD_ROOT ('$predefined_build_root') and" \
"computed BUILD_ROOT ('$BUILD_ROOT')."
fi
export BUILD_ROOT
export YB_BUILD_ROOT=$BUILD_ROOT
}
# Resolve the BUILD_ROOT symlink and save the result to the real_build_root_path variable.
set_real_build_root_path() {
if [[ -h $BUILD_ROOT ]]; then
real_build_root_path=$( readlink "$BUILD_ROOT" )
else
real_build_root_path="$BUILD_ROOT"
fi
readonly real_build_root_path=$( cd "$real_build_root_path" && pwd )
}
ensure_build_root_is_set() {
if [[ -z ${BUILD_ROOT:-} ]]; then
fatal "The BUILD_ROOT environment variable is not set. This must point to the absolute path" \
"of the build root directory, e.g. '<yugabyte_src_dir>/build/debug'."
fi
}
ensure_build_root_exists() {
ensure_build_root_is_set
if [[ ! -d $BUILD_ROOT ]]; then
fatal "The directory BUILD_ROOT ('$BUILD_ROOT') does not exist"
fi
}
normalize_build_root() {
ensure_build_root_is_set
if [[ -d $BUILD_ROOT ]]; then
BUILD_ROOT=$( cd "$BUILD_ROOT" && pwd )
fi
}
determine_linking_type() {
if [[ -z "${YB_LINK:-}" ]]; then
YB_LINK=dynamic
fi
if [[ ! "${YB_LINK:-}" =~ ^$VALID_LINKING_TYPES_RE$ ]]; then
fatal "Expected YB_LINK to be set to \"static\" or \"dynamic\", got \"${YB_LINK:-}\""
fi
export YB_LINK
readonly YB_LINK
}
validate_build_type() {
expect_num_args 1 "$@"
# Local variable named _build_type to avoid a collision with the global build_type variable.
local _build_type=$1
if ! is_valid_build_type "$_build_type"; then
fatal "Invalid build type: '$_build_type'. Valid build types are: ${VALID_BUILD_TYPES[@]}" \
"(case-insensitive)."
fi
}
is_valid_build_type() {
expect_num_args 1 "$@"
local -r _build_type=$( echo "$1" | to_lowercase )
[[ "$_build_type" =~ $VALID_BUILD_TYPES_RE ]]
}
set_build_type_based_on_jenkins_job_name() {
if [[ -n "${build_type:-}" ]]; then
if [[ -n "${JOB_NAME:-}" ]]; then
# This message only makes sense if JOB_NAME is set.
log "Build type is already set to '$build_type', not setting it based on Jenkins job name."
fi
normalize_build_type
readonly build_type
return
fi
build_type=debug
if [[ -z "${JOB_NAME:-}" ]]; then
log "Using build type '$build_type' by default because JOB_NAME is not set."
readonly build_type
return
fi
local _build_type # to avoid collision with the global build_type variable
local jenkins_job_name=$( echo "$JOB_NAME" | to_lowercase )
for _build_type in "${VALID_BUILD_TYPES[@]}"; do
if [[ "-$jenkins_job_name-" =~ [-_]$_build_type[-_] ]]; then
log "Using build type '$_build_type' based on Jenkins job name '$JOB_NAME'."
readonly build_type=$_build_type
return
fi
done
readonly build_type
log "Using build type '$build_type' by default: could not determine from Jenkins job name" \
"'$JOB_NAME'."
}
set_default_compiler_type() {
if [[ -z "${YB_COMPILER_TYPE:-}" ]]; then
if [[ "$OSTYPE" =~ ^darwin ]]; then
YB_COMPILER_TYPE=clang
else
YB_COMPILER_TYPE=gcc
fi
export YB_COMPILER_TYPE
readonly YB_COMPILER_TYPE
fi
}
is_clang() {
if [[ $YB_COMPILER_TYPE == "clang" ]]; then
return 0
else
return 1
fi
}
is_gcc() {
if [[ $YB_COMPILER_TYPE == "gcc" ]]; then
return 0
else
return 1
fi
}
is_ubuntu() {
[[ -f /etc/issue ]] && grep -q Ubuntu /etc/issue
}
build_compiler_if_necessary() {
# Sometimes we have to build the compiler before we can run CMake.
if is_clang && is_linux; then
log "Building clang before we can run CMake with compiler pointing to clang"
"$YB_THIRDPARTY_DIR/build_thirdparty.sh" llvm
fi
}
set_compiler_type_based_on_jenkins_job_name() {
if [[ -n "${YB_COMPILER_TYPE:-}" ]]; then
if [[ -n "${JOB_NAME:-}" ]]; then
log "The YB_COMPILER_TYPE variable is already set to '${YB_COMPILER_TYPE}', not setting it" \
"based on the Jenkins job name."
fi
else
local compiler_type
local jenkins_job_name=$( echo "$JOB_NAME" | to_lowercase )
YB_COMPILER_TYPE=""
for compiler_type in "${VALID_COMPILER_TYPES[@]}"; do
if [[ "-$jenkins_job_name-" =~ [-_]$compiler_type[-_] ]]; then
log "Setting YB_COMPILER_TYPE='$compiler_type' based on Jenkins job name '$JOB_NAME'."
YB_COMPILER_TYPE=$compiler_type
break
fi
done
if [[ -z "$YB_COMPILER_TYPE" ]]; then
log "Could not determine compiler type from Jenkins job name '$JOB_NAME'," \
"will use the default."
return
fi
fi
validate_compiler_type
readonly YB_COMPILER_TYPE
export YB_COMPILER_TYPE
}
validate_compiler_type() {
local compiler_type
if [[ $# -eq 0 ]]; then
if [[ -z ${YB_COMPILER_TYPE:-} ]]; then
fatal "$FUNCNAME is called with no arguments but YB_COMPILER_TYPE is not set or is empty"
fi
compiler_type=$YB_COMPILER_TYPE
elif [[ $# -eq 1 ]]; then
compiler_type=$1
else
fatal "$FUNCNAME can only be called with 0 or 1 argument, got $# arguments: $*"
fi
if [[ ! $compiler_type =~ $VALID_COMPILER_TYPES_RE ]]; then
fatal "Invalid compiler type: YB_COMPILER_TYPE='$compiler_type'" \
"(expected one of: ${VALID_COMPILER_TYPES[@]})."
fi
}
validate_cmake_build_type() {
expect_num_args 1 "$@"
local _cmake_build_type=$1
_cmake_build_type=$( echo "$_cmake_build_type" | tr A-Z a-z )
if [[ ! "$_cmake_build_type" =~ $VALID_CMAKE_BUILD_TYPES_RE ]]; then
fatal "Invalid CMake build type (what we're about to pass to our CMake build as" \
"_cmake_build_type): '$_cmake_build_type'." \
"Valid CMake build types are: ${VALID_CMAKE_BUILD_TYPES[@]}."
fi
}
ensure_using_clang() {
if [[ -n ${YB_COMPILER_TYPE:-} && $YB_COMPILER_TYPE != "clang" ]]; then
fatal "ASAN/TSAN builds require clang," \
"but YB_COMPILER_TYPE is already set to '$YB_COMPILER_TYPE'"
fi
YB_COMPILER_TYPE="clang"
}
enable_tsan() {
cmake_opts+=( -DYB_USE_TSAN=1 )
ensure_using_clang
}
# This performs two configuration actions:
# - Sets cmake_build_type based on build_type. cmake_build_type is what's being passed to CMake
# using the CMAKE_BUILD_TYPE variable. CMAKE_BUILD_TYPE can't be "asan" or "tsan".
# - Ensure the YB_COMPILER_TYPE environment variable is set. It is used by our compiler-wrapper.sh
# script to invoke the appropriate C/C++ compiler.
set_cmake_build_type_and_compiler_type() {
if [[ -z "${cmake_opts:-}" ]]; then
cmake_opts=()
fi
if [[ -z ${build_type:-} ]]; then
log "Setting build type to 'debug' by default"
build_type=debug
fi
normalize_build_type
# We're relying on build_type to set more variables, so make sure it does not change later.
readonly build_type
case "$build_type" in
asan)
cmake_opts+=( -DYB_USE_ASAN=1 -DYB_USE_UBSAN=1 )
cmake_build_type=fastdebug
ensure_using_clang
;;
tsan)
enable_tsan
cmake_build_type=fastdebug
;;
tsan_slow)
enable_tsan
cmake_build_type=debug
;;
idebug|irelease|ifastdebug)
cmake_build_type=${build_type:1}
cmake_opts+=( -DYB_INSTRUMENT_FUNCTIONS=1 )
;;
compilecmds)
cmake_build_type=debug
export CMAKE_EXPORT_COMPILE_COMMANDS=1
export YB_EXPORT_COMPILE_COMMANDS=1
;;
*)
cmake_build_type=$build_type
esac
validate_cmake_build_type "$cmake_build_type"
readonly cmake_build_type
if is_mac; then
if [[ -z ${YB_COMPILER_TYPE:-} ]]; then
YB_COMPILER_TYPE=clang
elif [[ $YB_COMPILER_TYPE != "clang" ]]; then
fatal "YB_COMPILER_TYPE can only be 'clang' on Mac OS X," \
"found YB_COMPILER_TYPE=$YB_COMPILER_TYPE."
fi
elif [[ -z ${YB_COMPILER_TYPE:-} ]]; then
# The default on Linux.
YB_COMPILER_TYPE=gcc
fi
validate_compiler_type
readonly YB_COMPILER_TYPE
export YB_COMPILER_TYPE
# We need to set CMAKE_C_COMPILER and CMAKE_CXX_COMPILER outside of CMake. We used to do that from
# CMakeLists.txt, and got into an infinite loop where CMake kept saying:
#
# You have changed variables that require your cache to be deleted.
# Configure will be re-run and you may have to reset some variables.
# The following variables have changed:
# CMAKE_CXX_COMPILER= /usr/bin/c++
#
# Not sure why it printed the old value there, since we tried to assign it the new value, the
# same as what's given below.
#
# So our new approach is to pass the correct command-line options to CMake, and still let CMake
# use the default compiler in CLion-triggered builds.
cmake_opts+=( "-DCMAKE_BUILD_TYPE=$cmake_build_type" )
cmake_opts+=( "${YB_DEFAULT_CMAKE_OPTS[@]}" )
if using_ninja; then
cmake_opts+=( -G Ninja )
make_program=ninja
if ! which ninja &>/dev/null; then
if using_linuxbrew; then
export YB_NINJA_PATH=$YB_LINUXBREW_DIR/bin/ninja
make_program=$YB_NINJA_PATH
elif using_custom_homebrew; then
export YB_NINJA_PATH=$YB_CUSTOM_HOMEBREW_DIR/bin/ninja
make_program=$YB_NINJA_PATH
elif is_mac; then
log "Did not find the 'ninja' executable, auto-installing ninja using Homebrew"
brew install ninja
fi
fi
make_file=build.ninja
else
make_program=make
make_file=Makefile
fi
cmake_opts+=( -DCMAKE_MAKE_PROGRAM=$make_program )
}
set_mvn_parameters() {
local should_use_shared_dirs=false
should_copy_artifacts_to_non_shared_repo=false
if is_jenkins && is_src_root_on_nfs; then
if is_mac && "$is_run_test_script" && [[ -n ${YB_TMP_GROUP_ID:-} ]]; then
should_use_shared_dirs=false
should_copy_artifacts_to_non_shared_repo=true
log "Will not use shared Maven repository ($YB_SHARED_MVN_LOCAL_REPO), but will copy" \
"the artifact with group id ${YB_TMP_GROUP_ID:-undefined} from it to" \
"$YB_NON_SHARED_MVN_LOCAL_REPO"
else
should_use_shared_dirs=true
log "Will use shared Maven repository ($YB_SHARED_MVN_LOCAL_REPO)." \
"Based on parameters: is_run_test_script=$is_run_test_script," \
"YB_TMP_GROUP_ID=${YB_TMP_GROUP_ID:-undefined}," \
"OSTYPE=$OSTYPE"
fi
fi
if [[ -z ${YB_MVN_LOCAL_REPO:-} ]]; then
if "$should_use_shared_dirs"; then
YB_MVN_LOCAL_REPO=$YB_SHARED_MVN_LOCAL_REPO
else
YB_MVN_LOCAL_REPO=$YB_NON_SHARED_MVN_LOCAL_REPO
fi
fi
export YB_MVN_LOCAL_REPO
if [[ -z ${YB_MVN_SETTINGS_PATH:-} ]]; then
if "$should_use_shared_dirs"; then
YB_MVN_SETTINGS_PATH=$YB_SHARED_MVN_SETTINGS
else
YB_MVN_SETTINGS_PATH=$HOME/.m2/settings.xml
fi
fi
export MVN_SETTINGS_PATH
mvn_common_options=(
--batch-mode
-Dmaven.repo.local="$YB_MVN_LOCAL_REPO"
-Dyb.thirdparty.dir="$YB_THIRDPARTY_DIR"
-DbinDir="$BUILD_ROOT/bin"
)
log "The result of set_mvn_parameters:" \
"YB_MVN_LOCAL_REPO=$YB_MVN_LOCAL_REPO," \
"YB_MVN_SETTINGS_PATH=$YB_MVN_SETTINGS_PATH," \
"should_copy_artifacts_to_non_shared_repo=$should_copy_artifacts_to_non_shared_repo"
}
# Put a retry loop here since it is possible that multiple concurrent builds will try to do this
# at the same time. However, this should converge quickly.
rsync_with_retries() {
declare -i attempt=1
declare -i -r max_attempts=5
while true; do
if ( set -x; rsync "$@" ); then
return
fi
if [[ $attempt -eq $max_attempts ]]; then
log "rsync failed after $max_attempts attempts, giving up"
return 1
fi
log "This was rsync attempt $attempt out of $max_attempts. Re-trying after a delay."
sleep 1
let attempt+=1
done
}
copy_artifacts_to_non_shared_mvn_repo() {
if ! "$should_copy_artifacts_to_non_shared_repo"; then
return
fi
local group_id_rel_path=${YB_TMP_GROUP_ID//./\/}
local src_dir=$YB_SHARED_MVN_LOCAL_REPO/$group_id_rel_path
local dest_dir=$YB_MVN_LOCAL_REPO/$group_id_rel_path
log "Copying Maven artifacts from '$src_dir' to '$dest_dir'"
mkdir -p "${dest_dir%/*}"
rsync_with_retries -az "$src_dir/" "$dest_dir"
log "Copying non-YB artifacts from '$YB_SHARED_MVN_LOCAL_REPO' to '$YB_MVN_LOCAL_REPO'"
rsync_with_retries "$YB_SHARED_MVN_LOCAL_REPO/" "$YB_MVN_LOCAL_REPO" --exclude 'org/yb*'
}
# Appends the settings path specified by $YB_MVN_SETTINGS_PATH (in case that path exists), as well
# as other common Maven options used across all invocations of Maven, to the mvn_opts array. The
# caller of this function usually declares mvn_opts as a local array.
append_common_mvn_opts() {
mvn_opts+=(
"${mvn_common_options[@]}"
)
if [[ -f $YB_MVN_SETTINGS_PATH ]]; then
mvn_opts+=(
--settings "$YB_MVN_SETTINGS_PATH"
)
elif [[ $YB_MVN_SETTINGS_PATH != $HOME/.m2/settings.xml ]]; then
log "Non-default maven user settings file specified by YB_MVN_SETTINGS_PATH does not exist:" \
"'$YB_MVN_SETTINGS_PATH'"
fi
}
# A utility function called by both 'build_yb_java_code' and 'build_yb_java_code_with_retries'.
build_yb_java_code_filter_save_output() {
set_mvn_parameters
log "Building Java code in $PWD"
# --batch-mode hides download progress.
# We are filtering out some patterns from Maven output, e.g.:
# [INFO] META-INF/NOTICE already added, skipping
# [INFO] Downloaded: https://repo.maven.apache.org/maven2/org/codehaus/plexus/plexus-classworlds/2.4/plexus-classworlds-2.4.jar (46 KB at 148.2 KB/sec)
# [INFO] Downloading: https://repo.maven.apache.org/maven2/org/apache/maven/doxia/doxia-logging-api/1.1.2/doxia-logging-api-1.1.2.jar
local has_local_output=false # default is output path variable is set by calling function
if [[ -z ${java_build_output_path:-} ]]; then
local java_build_output_path=/tmp/yb-java-build-$( get_timestamp ).$$.tmp
has_local_output=true
fi
local mvn_opts=()
append_common_mvn_opts
if ! is_jenkins; then
mvn_opts+=( -Dmaven.javadoc.skip )
fi
set +e -x # +e: do not fail on grep failure, -x: print the command to stderr.
if mvn "${mvn_opts[@]}" "$@" 2>&1 | \
egrep -v --line-buffered "$MVN_OUTPUT_FILTER_REGEX" | \
tee "$java_build_output_path"; then
set +x # stop printing commands
# We are testing for mvn build failure with grep, since we run mvn with '--fail-never' which
# always returns success. '--fail-at-end' could have been another possibility, but that mode
# skips dependent modules so most tests are often not run. Therefore, we resort to grep.
egrep "BUILD SUCCESS" "$java_build_output_path" &>/dev/null
local mvn_exit_code=$?
set -e
if [[ $has_local_output == "true" ]]; then
rm -f "$java_build_output_path" # cleaning up
fi
if [[ $mvn_exit_code -eq 0 ]]; then
log "Java build SUCCEEDED"
else
# Useful for searching in console output.
log "Java build FAILED: could not find 'BUILD SUCCESS' in Maven output"
fi
return $mvn_exit_code
fi
set -e +x
log "Java build or one of its output filters failed"
if [[ -f $java_build_output_path ]]; then
log "Java build output (from '$java_build_output_path'):"
cat "$java_build_output_path"
log "(End of Java build output)"
rm -f "$java_build_output_path"
else
log "Java build output path file not found at '$java_build_output_path'"
fi
return 1
}
build_yb_java_code() {
local java_build_output_path=/tmp/yb-java-build-$( get_timestamp ).$$.tmp
build_yb_java_code_filter_save_output "$@"
local mvn_exit_code=$?
rm -f "$java_build_output_path"
return $mvn_exit_code
}
build_yb_java_code_with_retries() {
local java_build_output_path=/tmp/yb-java-build-$( get_timestamp ).$$.tmp
declare -i attempt=1
while [[ $attempt -le $MAX_JAVA_BUILD_ATTEMPTS ]]; do
if build_yb_java_code_filter_save_output "$@"; then
rm -f "$java_build_output_path"
return 0
fi
if grep "Could not transfer artifact" "$java_build_output_path" >/dev/null; then
log "Java build attempt $attempt failed due to temporary connectivity issues, re-trying."
else
return 1
fi
rm -f "$java_build_output_path"
let attempt+=1
done
return 1
}
build_yb_java_code_in_all_dirs() {
local java_project_dir
for java_project_dir in "${yb_java_project_dirs[@]}"; do
pushd "$java_project_dir"
if ! time build_yb_java_code_with_retries "$@"; then
log "Failed to build Java code in directory '$java_project_dir'" \
"with these Maven arguments: $*"
return 1
fi
popd
done
}
# Create a directory on an ephemeral drive and link it into the given target location. If there are
# no ephemeral drives, create the directory in place.
# Parameters:
# target_path - The target path to create the directory or symlink at.
# directory_identifier - A unique identifier that will be used in naming the new directory
# created on an ephemeral drive.
create_dir_on_ephemeral_drive() {
expect_num_args 2 "$@"
local target_path=$1
local directory_identifier=$2
if [[ -z ${num_ephemeral_drives:-} ]]; then
# Collect available ephemeral drives. This is only done once.
local ephemeral_mountpoint
# EPHEMERAL_DRIVES_FILTER_REGEX is not supposed to be anchored in the end, so we need to add
# a "$" to filter ephemeral mountpoints correctly.
ephemeral_drives=()
for ephemeral_mountpoint in $EPHEMERAL_DRIVES_GLOB; do
if [[ -d $ephemeral_mountpoint &&
$ephemeral_mountpoint =~ $EPHEMERAL_DRIVES_FILTER_REGEX$ ]]; then
ephemeral_drives+=( "$ephemeral_mountpoint" )
fi
done
declare -r -i num_ephemeral_drives=${#ephemeral_drives[@]} # "-r -i" means readonly integer.
fi
if [[ $num_ephemeral_drives -eq 0 ]]; then
if [[ -n ${YB_VERBOSE:-} && ! -d $target_path ]]; then
log "No ephemeral drives found, creating directory '$target_path' in place."
fi
mkdir_safe "$target_path"
else
local random_drive=${ephemeral_drives[$RANDOM % $num_ephemeral_drives]}
local actual_dir=$random_drive/${USER}__$jenkins_job_and_build/$directory_identifier
mkdir_safe "$actual_dir"
# Create the parent directory that we'll be creating a link in, if necessary.
if [[ ! -d ${target_path%/*} ]]; then
log "Directory $target_path does not exist, creating it before creating a symlink inside."
mkdir_safe "${target_path%/*}"
fi
ln -s "$actual_dir" "$target_path"
log "Created '$target_path' as a symlink to an ephemeral drive location '$actual_dir'."
fi
}
# Skip the most part of the normal C++ build output. Still keep the "100%" lines so we can see
# if the build runs to completion. This only filters stdin, so it is expected that stderr is
# redirected to stdout when invoking the C++ build.
filter_boring_cpp_build_output() {
egrep -v --line-buffered "\
^(\[ *[0-9]{1,2}%\] +)*(\
Building C(XX)? object |\
Running C[+][+] protocol buffer compiler (with YRPC plugin )?on |\
Linking CXX ((static|shared )?library|executable) |\
Built target \
)|\
Scanning dependencies of target |\
^ssh: connect to host .* port [0-9]+: Connection (timed out|refused)|\
Host .* seems to be down, retrying on a different host|\
Connection to .* closed by remote host.|\
ssh: Could not resolve hostname build-workers-.*: Name or service not known"
}
put_path_entry_first() {
expect_num_args 1 "$@"
local path_entry=$1
remove_path_entry "$path_entry"
export PATH=$path_entry:$PATH
}
add_path_entry() {
expect_num_args 1 "$@"
local path_entry=$1
if [[ $PATH != *:$path_entry && $PATH != $path_entry:* && $PATH != *:$path_entry:* ]]; then
export PATH+=:$path_entry
fi
}
# Removes the ccache wrapper directory from PATH so we can find the real path to a compiler, e.g.
# /usr/bin/gcc instead of /usr/lib64/ccache/gcc. This is expected to run in a subshell so that we
# don't make any unexpected changes to the script's PATH.
# TODO: how to do this properly on Mac OS X?
remove_ccache_dir_from_path() {
remove_path_entry /usr/lib64/ccache
}
# Given a compiler type, e.g. gcc or clang, find the actual compiler executable (not a wrapper
# provided by ccache). Takes into account YB_GCC_PREFIX and YB_CLANG_PREFIX variables that allow to
# use custom gcc and clang installations. Sets cc_executable and cxx_executable variables. This is
# used in compiler-wrapper.sh.
find_compiler_by_type() {
compiler_type=$1
validate_compiler_type "$1"
local compiler_type=$1
unset cc_executable
unset cxx_executable
case "$compiler_type" in
gcc)
if [[ -n ${YB_GCC_PREFIX:-} ]]; then
if [[ ! -d $YB_GCC_PREFIX/bin ]]; then
fatal "Directory YB_GCC_PREFIX/bin ($YB_GCC_PREFIX/bin) does not exist"
fi
cc_executable=$YB_GCC_PREFIX/bin/gcc
cxx_executable=$YB_GCC_PREFIX/bin/g++
elif using_linuxbrew; then
cc_executable=$YB_LINUXBREW_DIR/bin/gcc
cxx_executable=$YB_LINUXBREW_DIR/bin/g++
else
cc_executable=gcc
cxx_executable=g++
fi
;;
gcc8)
if [[ -n ${YB_GCC8_PREFIX:-} ]]; then
if [[ ! -d $YB_GCC8_PREFIX/bin ]]; then
fatal "Directory YB_GCC_PREFIX/bin ($YB_GCC_PREFIX/bin) does not exist"
fi
cc_executable=$YB_GCC8_PREFIX/bin/gcc-8
cxx_executable=$YB_GCC8_PREFIX/bin/g++-8
else
cc_executable=$(which gcc-8)
cxx_executable=$(which g++-8)
fi
;;
clang)
if [[ -n ${YB_CLANG_PREFIX:-} ]]; then
if [[ ! -d $YB_CLANG_PREFIX/bin ]]; then
fatal "Directory \$YB_CLANG_PREFIX/bin ($YB_CLANG_PREFIX/bin) does not exist"
fi
cc_executable=$YB_CLANG_PREFIX/bin/clang
elif [[ $OSTYPE =~ ^darwin ]]; then
cc_executable=/usr/bin/clang
else
local clang_path
local clang_found=false
local clang_paths_to_try=(
"$YB_THIRDPARTY_DIR/clang-toolchain/bin/clang"
# clang is present in this location in pre-built third-party archives built before
# the transition to Linuxbrew (https://phabricator.dev.yugabyte.com/D982). This can be
# removed when the transition is complete.
"$YB_THIRDPARTY_DIR/installed/common/bin/clang"
)
for clang_path in "${clang_paths_to_try[@]}"; do
if [[ -f $clang_path ]]; then
cc_executable=$clang_path
clang_found=true
break
fi
done
if ! "$clang_found"; then
fatal "Failed to find clang at the following locations: ${clang_paths_to_try[@]}"
fi
fi
if [[ -z ${cxx_executable:-} ]]; then
cxx_executable=$cc_executable++ # clang -> clang++
fi
;;
zapcc)
if [[ -n ${YB_ZAPCC_INSTALL_PATH:-} ]]; then
cc_executable=$YB_ZAPCC_INSTALL_PATH/bin/zapcc
cxx_executable=$YB_ZAPCC_INSTALL_PATH/bin/zapcc++
else
cc_executable=zapcc
cxx_executable=zapcc++
fi
;;
*)
fatal "Unknown compiler type '$compiler_type'"
esac
# -----------------------------------------------------------------------------------------------
# Validate existence of compiler executables.
# -----------------------------------------------------------------------------------------------
local compiler_var_name
for compiler_var_name in cc_executable cxx_executable; do
if [[ -n ${!compiler_var_name:-} ]]; then
local compiler_path=${!compiler_var_name}
if [[ ! -x $compiler_path && $compiler_path =~ ^[a-z+]+$ ]]; then
# This is a plain "gcc/g++/clang/clang++" compiler command name. Try to find the exact
# compiler path using the "which" command.
set +e
compiler_path=$( remove_ccache_dir_from_path && which "${!compiler_var_name}" )
if [[ $? -ne 0 ]]; then
# "which" did not work, revert to the old value.
compiler_path=${!compiler_var_name}
fi
set -e
fi
if [[ ! -x $compiler_path ]]; then
fatal "Compiler executable does not exist at the path we set $compiler_var_name to" \
"(possibly applying 'which' expansion): $compiler_path" \
"(trying to use compiler type '$compiler_type')."
fi
eval $compiler_var_name=\"$compiler_path\"
fi
done
}
# Make pushd and popd quiet.
# http://stackoverflow.com/questions/25288194/dont-display-pushd-popd-stack-accross-several-bash-scripts-quiet-pushd-popd
pushd() {
local dir_name=$1
if [[ ! -d $dir_name ]]; then
fatal "Directory '$dir_name' does not exist"
fi
command pushd "$@" > /dev/null
}
popd() {
command popd "$@" > /dev/null
}
detect_brew() {
if is_linux; then
detect_linuxbrew
elif is_mac; then
detect_custom_homebrew
else
log "Not a Linux or a macOS platform, the detect_brew function is a no-op."
fi
}
download_and_extract_archive() {
expect_num_args 2 "$@"
extracted_dir=""
local url=$1
local dest_dir_parent=$2
local tar_gz_name=${url##*/}
local install_dir_name=${tar_gz_name%.tar.gz}
local dest_dir=$dest_dir_parent/$install_dir_name
if [[ ! -d $dest_dir && ! -L $dest_dir ]]; then
if [[ ! -d $YB_DOWNLOAD_LOCKS_DIR ]]; then
( umask 0; mkdir -p "$YB_DOWNLOAD_LOCKS_DIR" )
fi
(
umask 0
lock_path=$YB_DOWNLOAD_LOCKS_DIR/$install_dir_name
(
flock -w "$YB_DOWNLOAD_LOCK_TIMEOUT_SEC" 200
if [[ ! -d $dest_dir && ! -L $dest_dir ]]; then
log "[Host $(hostname)] Acquired lock $lock_path, proceeding with archive installation."
(
set -x
"$YB_SRC_ROOT/python/yb/download_and_extract_archive.py" \
--url "$url" --dest-dir-parent "$dest_dir_parent"
)
else
log "[Host $(hostname)] Acquired lock $lock_path but directory $dest_dir already" \
"exists. This is OK."
fi
) 200>"$lock_path"
)
fi
extracted_dir=$dest_dir
}
download_thirdparty() {
download_and_extract_archive "$YB_THIRDPARTY_URL" /opt/yb-build/thirdparty
export YB_THIRDPARTY_DIR=$extracted_dir
local linuxbrew_url_path=$YB_THIRDPARTY_DIR/linuxbrew_url.txt
if [[ -f $linuxbrew_url_path ]]; then
local linuxbrew_url=$(<"$linuxbrew_url_path")
download_and_extract_archive "$linuxbrew_url" /opt/yb-build/brew
export YB_LINUXBREW_DIR=$extracted_dir
else
fatal "Cannot download Linuxbrew: file $linuxbrew_url_path does not exist"
fi
}
install_linuxbrew() {
if ! is_linux; then
fatal "Expected this function to only be called on Linux"
fi
if is_ubuntu; then
return
fi
local version=$1
local linuxbrew_dirname=linuxbrew-$version
local linuxbrew_dir=$YB_LINUXBREW_LOCAL_ROOT/$linuxbrew_dirname
local linuxbrew_archive="${linuxbrew_dir}.tar.gz"
local linuxbrew_archive_checksum="${linuxbrew_archive}.sha256"
local url="https://github.com/YugaByte/brew-build/releases/download/$version/\
linuxbrew-$version.tar.gz"
mkdir -p "$YB_LINUXBREW_LOCAL_ROOT"
if [[ ! -f $linuxbrew_archive ]]; then
echo "Downloading Linuxbrew from $url..."
rm -f "$linuxbrew_archive_checksum"
curl -L "$url" -o "$linuxbrew_archive"
fi
if [[ ! -f $linuxbrew_archive_checksum ]]; then
echo "Downloading Linuxbrew archive checksum file for $url..."
curl -L "$url.sha256" -o "$linuxbrew_archive_checksum"
fi
echo "Verifying Linuxbrew archive checksum ..."
pushd "$YB_LINUXBREW_LOCAL_ROOT"
sha256sum -c --strict "$linuxbrew_archive_checksum"
popd
echo "Installing Linuxbrew into $linuxbrew_dir..."
local tmp=$YB_LINUXBREW_LOCAL_ROOT/tmp/$$_$RANDOM$RANDOM
mkdir -p "$tmp"
tar zxf "$linuxbrew_archive" -C "$tmp"
if mv "$tmp/$linuxbrew_dirname" "$YB_LINUXBREW_LOCAL_ROOT/"; then
pushd "$linuxbrew_dir"
./post_install.sh
popd
fi
}
try_set_linuxbrew_dir() {
local linuxbrew_dir=$1
if [[ -d "$linuxbrew_dir" &&
-d "$linuxbrew_dir/bin" &&
-d "$linuxbrew_dir/lib" &&
-d "$linuxbrew_dir/include" ]]; then
export YB_LINUXBREW_DIR=$(realpath "$linuxbrew_dir")
return 0
else
return 1
fi
}
# -------------------------------------------------------------------------------------------------
# Detecting Homebrew/Linuxbrew
# -------------------------------------------------------------------------------------------------
wait_for_directory_existence() {
expect_num_args 1 "$@"
local dir_path=$1
declare -i attempt=0
while [[ ! -d $dir_path ]]; do
if [[ $attempt -ge $DIRECTORY_EXISTENCE_WAIT_TIMEOUT_SEC ]]; then
fatal "Gave up waiting for directory '$dir_path' to appear after $attempt seconds"
fi
log "Directory '$dir_path' not found, waiting for it to mount"
( set +e; ls "$dir_path"/* >/dev/null )
let attempt+=1
sleep 1
done
}
detect_linuxbrew() {
if ! is_linux; then
fatal "Expected this function to only be called on Linux"
fi
if [[ -n ${YB_LINUXBREW_DIR:-} ]]; then
export YB_LINUXBREW_DIR
return
fi
unset YB_LINUXBREW_DIR
if ! is_linux; then
return
fi
if is_ubuntu; then
# Not using Linuxbrew on Ubuntu.
return
fi
local version_file=$YB_SRC_ROOT/thirdparty/linuxbrew_version.txt
if [[ ! -f $version_file ]]; then
fatal "'$version_file' does not exist"
fi
local linuxbrew_version=$( read_file_and_trim "$version_file" )
local linuxbrew_dirname="linuxbrew-$linuxbrew_version"
local candidates=()
local jenkins_linuxbrew_dir="$SHARED_LINUXBREW_BUILDS_DIR/$linuxbrew_dirname"
if [[ -d $jenkins_linuxbrew_dir ]]; then
candidates=( "$jenkins_linuxbrew_dir" )
elif is_jenkins; then
if is_src_root_on_nfs; then
wait_for_directory_existence "$jenkins_linuxbrew_dir"
candidates=( "$jenkins_linuxbrew_dir" )
else
yb_fatal_exit_code=$YB_EXIT_CODE_NO_SUCH_FILE_OR_DIRECTORY
fatal "Warning: Linuxbrew directory referenced by '$version_file' does not" \
"exist: '$jenkins_linuxbrew_dir', refusing to proceed to prevent " \
"non-deterministic builds."
fi
fi
if [[ ${#candidates[@]} -gt 0 ]]; then
local linuxbrew_dir
for linuxbrew_dir in "${candidates[@]}"; do
if try_set_linuxbrew_dir "$linuxbrew_dir"; then
return
fi
done
fi
local linuxbrew_local_dir="$YB_LINUXBREW_LOCAL_ROOT/$linuxbrew_dirname"
if ! is_jenkins && [[ ! -d $linuxbrew_local_dir ]]; then
install_linuxbrew "$linuxbrew_version"
fi
if ! try_set_linuxbrew_dir "$linuxbrew_local_dir"; then
if [[ ${#candidates[@]} -gt 0 ]]; then
log "Could not find Linuxbrew in any of these directories: ${candidates[@]}."
else
log "Could not find Linuxbrew candidate directories."
fi
log "Failed to install Linuxbrew $linuxbrew_version into $linuxbrew_local_dir."
fi
}
# -------------------------------------------------------------------------------------------------
# Similar to detect_linuxbrew, but for macOS.
# This function was created by copying detect_linuxbrew and replacing Linuxbrew with Homebrew
# in a few places. This was done to avoid destabilizing the Linux environment. Rather than
# refactoring detect_custom_homebrew and detect_linuxbrew functions to extract common parts, we will
# leave that until our whole build environment framework is rewritten in Python.
# Mikhail Bautin, 11/14/2018
# -------------------------------------------------------------------------------------------------
detect_custom_homebrew() {
if ! is_mac; then
fatal "Expected this function to only be called on macOS"
fi
if [[ -n ${YB_CUSTOM_HOMEBREW_DIR:-} ]]; then
export YB_CUSTOM_HOMEBREW_DIR
return
fi
local candidates=(
"$HOME/.homebrew-yb-build"
)
local version_for_jenkins_file=$YB_SRC_ROOT/thirdparty/homebrew_version_for_jenkins.txt
if [[ -f $version_for_jenkins_file ]]; then
local version_for_jenkins=$( read_file_and_trim "$version_for_jenkins_file" )
preferred_homebrew_dir="$SHARED_CUSTOM_HOMEBREW_BUILDS_DIR/homebrew_$version_for_jenkins"
if [[ -d $preferred_homebrew_dir ]]; then
if is_jenkins_user; then
# If we're running on Jenkins (or building something for consumption by Jenkins under the
# "jenkins" user), then the "Homebrew for Jenkins" directory takes precedence.
candidates=( "$preferred_homebrew_dir" "${candidates[@]}" )
else
# Otherwise, the user's local Homebrew build takes precedence.
candidates=( "${candidates[@]}" "$preferred_homebrew_dir" )
fi
elif is_jenkins; then
if is_src_root_on_nfs; then
wait_for_directory_existence "$preferred_homebrew_dir"
else
yb_fatal_exit_code=$YB_EXIT_CODE_NO_SUCH_FILE_OR_DIRECTORY
fatal "Warning: Homebrew directory referenced by '$version_for_jenkins_file' does not" \
"exist: '$preferred_homebrew_dir', refusing to proceed to prevent " \
"non-deterministic builds."
fi
fi
elif is_jenkins; then
log "Warning: '$version_for_jenkins_file' does not exist"
fi
local homebrew_dir
for homebrew_dir in "${candidates[@]}"; do
if [[ -d "$homebrew_dir" &&
-d "$homebrew_dir/bin" &&
-d "$homebrew_dir/lib" &&
-d "$homebrew_dir/include" ]]; then
export YB_CUSTOM_HOMEBREW_DIR=$homebrew_dir
break
fi
done
}
# -------------------------------------------------------------------------------------------------
# End of the detect_custom_homebrew that was created with by copying/pasting and editing
# the detect_linuxbrew function.
# -------------------------------------------------------------------------------------------------
using_linuxbrew() {
if is_linux && [[ -n ${YB_LINUXBREW_DIR:-} ]]; then
return 0 # true in bash
fi
return 1
}
using_custom_homebrew() {
if is_mac && [[ -n ${YB_CUSTOM_HOMEBREW_DIR:-} ]]; then
return 0 # true in bash
fi
return 1 # false in bash
}
set_use_ninja() {
if [[ -z ${YB_USE_NINJA:-} ]]; then
if [[ -n ${BUILD_ROOT:-} ]]; then
if [[ $BUILD_ROOT == *-ninja ]]; then
export YB_USE_NINJA=1
else
export YB_USE_NINJA=0
fi
else
if which ninja &>/dev/null; then
export YB_USE_NINJA=1
elif using_linuxbrew; then
local yb_ninja_path=$YB_LINUXBREW_DIR/bin/ninja
if [[ -f $yb_ninja_path ]]; then
export YB_USE_NINJA=1
export YB_NINJA_PATH=$yb_ninja_path
fi
fi
fi
if using_ninja && [[ -z ${yb_ninja_path:-} ]]; then
set +e
local which_ninja=$( which ninja 2>/dev/null )
set -e
if using_linuxbrew; then
local yb_ninja_path=$YB_LINUXBREW_DIR/bin/ninja
if [[ ! -f $yb_ninja_path ]]; then
fatal "When using Linuxbrew, Ninja must be installed as part of Linuxbrew, but this" \
"file does not exist: $yb_ninja_path"
fi
elif using_custom_homebrew; then
local yb_ninja_path=$YB_CUSTOM_HOMEBREW_DIR/bin/ninja
if [[ ! -f $yb_ninja_path ]]; then
fatal "When using custom Homebrew, Ninja must be installed as part of Linuxbrew, but" \
"this file does not exist: $yb_ninja_path"
fi
elif [[ -f $which_ninja ]]; then
local yb_ninja_path=$which_ninja
else
fatal "Could not set yb_ninja_path, ninja not found on PATH: $PATH"
fi
fi
fi
}
using_ninja() {
if [[ ${YB_USE_NINJA:-} == "1" ]]; then
return 0
else
return 1
fi
}
add_brew_bin_to_path() {
if using_linuxbrew; then
# We need to add Linuxbrew's bin directory to PATH so that we can find the right compiler and
# linker.
put_path_entry_first "$YB_LINUXBREW_DIR/bin"
fi
if using_custom_homebrew; then
# The same for a custom Homebrew installation on macOS.
put_path_entry_first "$YB_CUSTOM_HOMEBREW_DIR/bin"
fi
}
detect_num_cpus() {
if [[ ! ${YB_NUM_CPUS:-} =~ ^[0-9]+$ ]]; then
if is_linux; then
YB_NUM_CPUS=$(grep -c processor /proc/cpuinfo)
elif is_mac; then
YB_NUM_CPUS=$(sysctl -n hw.ncpu)
else
fatal "Don't know how to detect the number of CPUs on OS $OSTYPE."
fi
if [[ ! $YB_NUM_CPUS =~ ^[0-9]+$ ]]; then
fatal "Invalid number of CPUs detected: '$YB_NUM_CPUS' (expected a number)."
fi
fi
}
detect_num_cpus_and_set_make_parallelism() {
detect_num_cpus
if [[ -z ${YB_MAKE_PARALLELISM:-} ]]; then
if [[ ${YB_REMOTE_COMPILATION:-} == "1" ]]; then
declare -i num_build_workers=$( wc -l "$YB_BUILD_WORKERS_FILE" | awk '{print $1}' )
# Add one to the number of workers so that we cause the auto-scaling group to scale up a bit
# by stressing the CPU on each worker a bit more.
declare -i effective_num_build_workers=$(( $num_build_workers + 1 ))
# However, make sure this number is within a reasonable range.
if [[ $effective_num_build_workers -lt $MIN_EFFECTIVE_NUM_BUILD_WORKERS ]]; then
effective_num_build_workers=$MIN_EFFECTIVE_NUM_BUILD_WORKERS
fi
if [[ $effective_num_build_workers -gt $MAX_EFFECTIVE_NUM_BUILD_WORKERS ]]; then
effective_num_build_workers=$MAX_EFFECTIVE_NUM_BUILD_WORKERS
fi
YB_MAKE_PARALLELISM=$(( $effective_num_build_workers * $YB_NUM_CORES_PER_BUILD_WORKER ))
else
YB_MAKE_PARALLELISM=$YB_NUM_CPUS
fi
fi
export YB_MAKE_PARALLELISM
}
validate_thirdparty_dir() {
ensure_directory_exists "$YB_THIRDPARTY_DIR/build_definitions"
ensure_directory_exists "$YB_THIRDPARTY_DIR/patches"
ensure_file_exists "$YB_THIRDPARTY_DIR/build_definitions/__init__.py"
}
# Detect if we're running on Google Compute Platform. We perform this check lazily as there might be
# a bit of a delay resolving the domain name.
detect_gcp() {
# How to detect if we're running on Google Compute Engine:
# https://cloud.google.com/compute/docs/instances/managing-instances#dmi
if [[ -n ${YB_PRETEND_WE_ARE_ON_GCP:-} ]] || \
curl metadata.google.internal --silent --output /dev/null --connect-timeout 1; then
readonly is_running_on_gcp_exit_code=0 # "true" exit code
else
readonly is_running_on_gcp_exit_code=1 # "false" exit code
fi
}
is_running_on_gcp() {
if [[ -z ${is_running_on_gcp_exit_code:-} ]]; then
detect_gcp
fi
return "$is_running_on_gcp_exit_code"
}
is_jenkins_user() {
[[ $USER == "jenkins" ]]
}
is_jenkins() {
if [[ -n ${JOB_NAME:-} ]] && is_jenkins_user; then
return 0 # Yes, we're running on Jenkins.
fi
return 1 # Probably running locally.
}
should_gzip_test_logs() {
is_jenkins || [[ ${YB_GZIP_TEST_LOGS:-0} == "1" ]]
}
# For each file provided as an argument, gzip the given file if it exists and is not already
# compressed.
gzip_if_exists() {
local f
for f in "$@"; do
if [[ -f $f && $f != *.gz && $f != *.bz2 ]]; then
gzip "$f"
fi
done
}
# Check if we're in a Jenkins master build.
is_jenkins_master_build() {
if [[ -n ${JOB_NAME:-} && $JOB_NAME = *-master-* ]]; then
return 0
fi
return 1
}
# Check if we're in a Jenkins Phabricator build (a pre-commit build).
is_jenkins_phabricator_build() {
if [[ -z ${JOB_NAME:-} ]]; then
return 1 # No, not running on Jenkins.
fi
if [[ $JOB_NAME == *-phabricator-* || $JOB_NAME == *-phabricator ]]; then
return 0 # Yes, this is a Phabricator build.
fi
return 1 # No, some other kind of Jenkins job.
}
# Check if we're using an NFS partition in YugaByte's build environment.
is_src_root_on_nfs() {
if [[ $YB_SRC_ROOT =~ $YB_NFS_PATH_RE ]]; then
return 0
fi
return 1
}
using_remote_compilation() {
if [[ ! $YB_REMOTE_COMPILATION =~ ^(0|1)$ ]]; then
# TODO: this will still return from the function as if the value is false. This is how bash
# return values work.
fatal "YB_REMOTE_COMPILATION is supposed to be 0 or 1 by the time using_remote_compilation is" \
"called."
fi
if [[ ${YB_REMOTE_COMPILATION:-} == "1" ]]; then
return 0 # "true" return value
fi
return 1 # "false" return value
}
# This is used for escaping command lines for remote execution.
# From StackOverflow: https://goo.gl/sTKReB
# Using this approach: "Put the whole string in single quotes. This works for all chars except
# single quote itself. To escape the single quote, close the quoting before it, insert the single
# quote, and re-open the quoting."
#
escape_cmd_line() {
escape_cmd_line_rv=""
for arg in "$@"; do
escape_cmd_line_rv+=" '"${arg/\'/\'\\\'\'}"'"
# This should be equivalent to the sed command below. The quadruple backslash encodes one
# backslash in the replacement string. We don't need that in the pure-bash implementation above.
# sed -e "s/'/'\\\\''/g; 1s/^/'/; \$s/\$/'/"
done
# Remove the leading space if necessary.
escape_cmd_line_rv=${escape_cmd_line_rv# }
}
debugging_remote_compilation() {
[[ ${YB_DEBUG_REMOTE_COMPILATION:-undefined} == "1" ]]
}
cmd_line_to_env_vars_for_remote_cmd() {
declare -i i=1
YB_ENCODED_REMOTE_CMD_LINE=""
# This must match the separator in remote_cmd.sh.
declare -r ARG_SEPARATOR=$'=:\t:='
for arg in "$@"; do
# The separator used here must match the separator used in remote_cmd.sh.
YB_ENCODED_REMOTE_CMD_LINE+=$arg$ARG_SEPARATOR
done
# This variable must be accessible to remote_cmd.sh on the other side of ssh.
export YB_ENCODED_REMOTE_CMD_LINE
}
run_remote_cmd() {
local build_host=$1
local executable=$2
shift 2
cmd_line_to_env_vars_for_remote_cmd "$@"
local ssh_args=(
"$build_host"
"$YB_BUILD_SUPPORT_DIR/remote_cmd.sh"
"$PWD"
"$PATH"
"$executable"
)
if debugging_remote_compilation; then
( set -x; ssh "${ssh_args[@]}" ) 2>&1
else
ssh "${ssh_args[@]}"
fi
}
configure_remote_compilation() {
if [[ ! ${YB_REMOTE_COMPILATION:-auto} =~ ^(0|1|auto)$ ]]; then
fatal "Invalid value of the YB_REMOTE_COMPILATION environment variable: can be '0', '1', or" \
"'auto'. Actual value: ${YB_REMOTE_COMPILATION:-undefined}."
fi
# Automatically set YB_REMOTE_COMPILATION in an NFS GCP environment.
if [[ ${YB_REMOTE_COMPILATION:-auto} == "auto" ]]; then
if is_running_on_gcp && is_src_root_on_nfs; then
log "Automatically enabling remote compilation (running in an NFS GCP environment). " \
"Use YB_REMOTE_COMPILATION=0 (or the --no-remote ybd option) to disable this behavior."
YB_REMOTE_COMPILATION=1
else
YB_REMOTE_COMPILATION=0
if is_jenkins; then
# Make it easier to diagnose why we're not using the distributed build. Only enable this on
# Jenkins to avoid confusing output during development.
log "Not using remote compilation: " \
"YB_REMOTE_COMPILATION=${YB_REMOTE_COMPILATION:-undefined}. " \
"See additional diagnostics below."
is_running_on_gcp && log "Running on GCP." || log "This is not GCP."
if is_src_root_on_nfs; then
log "YB_SRC_ROOT ($YB_SRC_ROOT) appears to be on NFS in YugaByte's distributed" \
"build setup."
fi
fi
fi
fi
export YB_REMOTE_COMPILATION
}
read_file_and_trim() {
expect_num_args 1 "$@"
local file_name=$1
if [[ -f $file_name ]]; then
cat "$file_name" | sed -e 's/^[[:space:]]*//; s/[[:space:]]*$//'
else
log "File '$file_name' does not exist"
return 1
fi
}
using_default_thirdparty_dir() {
if [[ -n ${YB_THIRDPARTY_DIR:-} &&
$YB_THIRDPARTY_DIR != "$YB_SRC_ROOT/thirdparty" ]]; then
# YB_THIRDPARTY_DIR is specified and is not the default location
return 1
fi
return 0
}
find_thirdparty_by_url() {
if [[ -n ${YB_THIRDPARTY_URL:-} ]]; then
download_thirdparty
export NO_REBUILD_THIRDPARTY=1
fi
}
# In our internal environment we build third-party dependencies in separate directories on NFS
# so that we can use them across many builds.
find_thirdparty_dir() {
if [[ -n ${YB_THIRDPARTY_URL:-} ]]; then
find_thirdparty_by_url
log "YB_THIRDPARTY_DIR=$YB_THIRDPARTY_DIR"
log "YB_LINUXBREW_DIR=$YB_LINUXBREW_DIR"
found_shared_thirdparty_dir=true
return
fi
found_shared_thirdparty_dir=false
local parent_dir_for_shared_thirdparty=$NFS_PARENT_DIR_FOR_SHARED_THIRDPARTY
if [[ ! -d $parent_dir_for_shared_thirdparty ]]; then
log "Parent directory for shared third-party directories" \
"('$NFS_PARENT_DIR_FOR_SHARED_THIRDPARTY') does not exist, cannot use pre-built" \
"third-party directory from there."
return
fi
local version=$(
read_file_and_trim \
"$YB_BUILD_SUPPORT_DIR/shared_thirdparty_version_for_jenkins_${short_os_name}.txt"
)
local thirdparty_dir_suffix="yugabyte-thirdparty-${version}/thirdparty"
local existing_thirdparty_dir="${parent_dir_for_shared_thirdparty}/${thirdparty_dir_suffix}"
if [[ -d $existing_thirdparty_dir ]]; then
log "Using existing third-party dependencies from $existing_thirdparty_dir"
if is_jenkins; then
log "Cleaning the old dedicated third-party dependency build in '$YB_SRC_ROOT/thirdparty'"
unset YB_THIRDPARTY_DIR
if ! ( set -x; "$YB_SRC_ROOT/thirdparty/clean_thirdparty.sh" --all ); then
log "Failed to clean the old third-party directory. Ignoring this error."
fi
fi
export YB_THIRDPARTY_DIR=$existing_thirdparty_dir
found_shared_thirdparty_dir=true
export NO_REBUILD_THIRDPARTY=1
return
fi
log "Even though the top-level directory '$parent_dir_for_shared_thirdparty'" \
"exists, we could not find a prebuilt shared third-party directory there that exists. " \
"Falling back to building our own third-party dependencies."
}
handle_predefined_build_root_quietly=false
handle_predefined_build_root() {
expect_num_args 0 "$@"
if [[ -z ${predefined_build_root:-} ]]; then
return
fi
if [[ -L $predefined_build_root ]]; then
predefined_build_root=$( readlink "$predefined_build_root" )
fi
if [[ -d $predefined_build_root ]]; then
predefined_build_root=$( cd "$predefined_build_root" && pwd )
fi
if [[ $predefined_build_root != $YB_BUILD_INTERNAL_PARENT_DIR/* && \
$predefined_build_root != $YB_BUILD_EXTERNAL_PARENT_DIR/* ]]; then
# Sometimes $predefined_build_root contains symlinks on its path.
$YB_SRC_ROOT/build-support/validate_build_root.py \
"$predefined_build_root" \
"$YB_BUILD_INTERNAL_PARENT_DIR" \
"$YB_BUILD_EXTERNAL_PARENT_DIR"
fi
local basename=${predefined_build_root##*/}
if [[ $basename =~ $BUILD_ROOT_BASENAME_RE ]]; then
local _build_type=${BASH_REMATCH[1]}
local _compiler_type=${BASH_REMATCH[2]}
local _linking_type=${BASH_REMATCH[3]}
local _dash_ninja=${BASH_REMATCH[4]}
else
fatal "Could not parse build root directory name '$basename'" \
"(full path: '$predefined_build_root'). Expected to match '$BUILD_ROOT_BASENAME_RE'."
fi
if [[ -z ${build_type:-} ]]; then
build_type=$_build_type
if ! "$handle_predefined_build_root_quietly"; then
log "Setting build type to '$build_type' based on predefined build root ('$basename')"
fi
validate_build_type "$build_type"
elif [[ $build_type != $_build_type ]]; then
fatal "Build type from the build root ('$_build_type' from '$predefined_build_root') does " \
"not match current build type ('$build_type')."
fi
if [[ -z ${YB_COMPILER_TYPE:-} ]]; then
export YB_COMPILER_TYPE=$_compiler_type
if ! "$handle_predefined_build_root_quietly"; then
log "Automatically setting compiler type to '$YB_COMPILER_TYPE' based on predefined build" \
"root ('$basename')"
fi
elif [[ $YB_COMPILER_TYPE != $_compiler_type ]]; then
fatal "Compiler type from the build root ('$_compiler_type' from '$predefined_build_root') " \
"does not match YB_COMPILER_TYPE ('$YB_COMPILER_TYPE')."
fi
export YB_USE_NINJA=${YB_USE_NINJA:-}
if [[ $_dash_ninja == "-ninja" && -z ${YB_USE_NINJA:-} ]]; then
if ! "$handle_predefined_build_root_quietly"; then
log "Setting YB_USE_NINJA to 1 based on predefined build root ('$basename')"
fi
export YB_USE_NINJA=1
elif [[ $_dash_ninja == "-ninja" && $YB_USE_NINJA != "1" || \
$_dash_ninja != "-ninja" && $YB_USE_NINJA == "1" ]]; then
fatal "The use of ninja from build root ('$predefined_build_root') does not match that" \
"of the YB_USE_NINJA env var ('$YB_USE_NINJA')"
fi
set_use_ninja
}
# Remove the build/latest symlink to prevent Jenkins from showing every test twice in test results.
# We call this from a few different places just in case.
remove_latest_symlink() {
local latest_build_link=$YB_BUILD_PARENT_DIR/latest
if [[ -h $latest_build_link ]]; then
log "Removing the latest symlink at '$latest_build_link'"
( set -x; unlink "$latest_build_link" )
fi
}
# Assigns a random "test invocation id" that allows to kill stuck processes corresponding to this
# instance of a particular test or the whole test suite.
set_test_invocation_id() {
local timestamp=$( get_timestamp_for_filenames )
export YB_TEST_INVOCATION_ID=test_invocation_${timestamp}_${RANDOM}_${RANDOM}_$$
}
# Kills any processes that have YB_TEST_INVOCATION_ID in their command line. Sets
# killed_stuck_processes=true in case that happens.
kill_stuck_processes() {
expect_num_args 0 "$@"
killed_stuck_processes=false
if [[ -z ${YB_TEST_INVOCATION_ID:-} ]]; then
return
fi
local pid
for pid in $( pgrep -f "$YB_TEST_INVOCATION_ID" ); do
log "Found pid $pid from this test suite (YB_TEST_INVOCATION_ID=$YB_TEST_INVOCATION_ID)," \
"killing it with SIGKILL."
ps -p "$pid" -f
if kill -9 "$pid"; then
killed_stuck_processes=true
log "Killed process $pid with SIGKILL."
fi
done
}
handle_build_root_from_current_dir() {
predefined_build_root=""
if [[ ${YB_IS_THIRDPARTY_BUILD:-} == "1" ]]; then
return
fi
local handle_predefined_build_root_quietly=true
local d=$PWD
while [[ $d != "/" && $d != "" ]]; do
basename=${d##*/}
if [[ ${YB_DEBUG_BUILD_ROOT_BASENAME_VALIDATION:-0} == "1" ]]; then
log "Trying to match basename $basename to regex: $BUILD_ROOT_BASENAME_RE"
fi
if [[ $basename =~ $BUILD_ROOT_BASENAME_RE ]]; then
predefined_build_root=$d
handle_predefined_build_root
return
fi
d=${d%/*}
done
fatal "Working directory of the compiler '$PWD' is not within a valid Yugabyte build root: " \
"'$BUILD_ROOT_BASENAME_RE'"
}
validate_numeric_arg_range() {
expect_num_args 4 "$@"
local arg_name=$1
local arg_value=$2
local -r -i min_value=$3
local -r -i max_value=$4
if [[ ! $arg_value =~ ^[0-9]+$ ]]; then
fatal "Invalid numeric argument value for --$arg_name: '$arg_value'"
fi
if [[ $arg_value -lt $min_value || $arg_value -gt $max_value ]]; then
fatal "Value out of range for --$arg_name: $arg_value, must be between $min_value and" \
"$max_value."
fi
}
# -------------------------------------------------------------------------------------------------
# Python support
# -------------------------------------------------------------------------------------------------
# Checks syntax of all Python scripts in the repository.
check_python_script_syntax() {
if [[ -n ${YB_VERBOSE:-} ]]; then
log "Checking syntax of Python scripts"
fi
pushd "$YB_SRC_ROOT"
local IFS=$'\n'
git ls-files '*.py' | xargs -P 8 -n 1 "$YB_BUILD_SUPPORT_DIR/check_python_syntax.py"
popd
}
add_python_wrappers_dir_to_path() {
# Make sure the Python wrappers directory is the first on PATH
remove_path_entry "$YB_PYTHON_WRAPPERS_DIR"
export PATH=$YB_PYTHON_WRAPPERS_DIR:$PATH
}
activate_virtualenv() {
local virtualenv_parent_dir=$YB_BUILD_PARENT_DIR
local virtualenv_dir=$virtualenv_parent_dir/$YB_VIRTUALENV_BASENAME
if [[ ! $virtualenv_dir = */$YB_VIRTUALENV_BASENAME ]]; then
fatal "Internal error: virtualenv_dir ('$virtualenv_dir') must end" \
"with YB_VIRTUALENV_BASENAME ('$YB_VIRTUALENV_BASENAME')"
fi
if [[ ${YB_RECREATE_VIRTUALENV:-} == "1" && -d $virtualenv_dir ]] && \
! "$yb_readonly_virtualenv"; then
log "YB_RECREATE_VIRTUALENV is set, deleting virtualenv at '$virtualenv_dir'"
rm -rf "$virtualenv_dir"
unset YB_RECREATE_VIRTUALENV
fi
# To run pip2 itself we already need to add our Python wrappers directory to PATH.
add_python_wrappers_dir_to_path
if [[ ! -d $virtualenv_dir ]]; then
if "$yb_readonly_virtualenv"; then
fatal "virtualenv does not exist at '$virtualenv_dir', and we are not allowed to create it"
fi
if [[ -n ${VIRTUAL_ENV:-} && -f $VIRTUAL_ENV/bin/activate ]]; then
local old_virtual_env=$VIRTUAL_ENV
# Re-activate and deactivate the other virtualenv we're in. Otherwise the deactivate
# function might not even be present in our current shell. This is necessary because otherwise
# the --user installation below will fail.
set +eu
. "$VIRTUAL_ENV/bin/activate"
deactivate
set -eu
# Not clear why deactivate does not do this.
remove_path_entry "$old_virtual_env/bin"
fi
# We need to be using system python to install the virtualenv module or create a new virtualenv.
(
set -x
pip2 install virtualenv --user
mkdir -p "$virtualenv_parent_dir"
cd "$virtualenv_parent_dir"
python2 -m virtualenv "$YB_VIRTUALENV_BASENAME"
)
fi
set +u
. "$virtualenv_dir"/bin/activate
set -u
local pip_no_cache=""
if [[ -n ${YB_PIP_NO_CACHE:-} ]]; then
pip_no_cache="--no-cache-dir"
fi
if ! "$yb_readonly_virtualenv"; then
local requirements_file_path="$YB_SRC_ROOT/python_requirements_frozen.txt"
local installed_requirements_file_path=$virtualenv_dir/${requirements_file_path##*/}
if ! cmp --silent "$requirements_file_path" "$installed_requirements_file_path"; then
run_with_retries 10 0.5 pip2 install -r "$requirements_file_path" \
$pip_no_cache
fi
# To avoid re-running pip install, save the requirements that we've installed in the virtualenv.
cp "$requirements_file_path" "$installed_requirements_file_path"
fi
if [[ ${YB_DEBUG_VIRTUALENV:-0} == "1" ]]; then
echo >&2 "
VIRTUALENV DEBUGGING
--------------------
Activated virtualenv in: $virtualenv_dir
Executable: $0
PATH: $PATH
PYTHONPATH: ${PYTHONPATH:-undefined}
VIRTUAL_ENV: ${VIRTUAL_ENV:-undefined}
"
fi
export VIRTUAL_ENV
}
check_python_interpreter_version() {
expect_num_args 3 "$@"
local python_interpreter=$1
local expected_major_version=$2
local minor_version_lower_bound=$3
# Get the Python interpreter version. Filter out debug output we may be adding if
# YB_PYTHON_WRAPPER_DEBUG is set.
local version_str=$(
export yb_log_quiet=true
"$python_interpreter" --version 2>&1 >/dev/null | grep -v "Invoking Python"
)
version_str=${version_str#Python }
local actual_major_version=${version_str%%.*}
local version_str_without_major=${version_str#*.}
local actual_minor_version=${version_str_without_major%%.*}
if [[ ! $actual_major_version =~ ^[0-9]+ ]]; then
fatal "Invalid format of Python major version: $actual_major_version." \
"Version string for interpreter $python_interpreter: $version_str"
fi
if [[ ! $actual_minor_version =~ ^[0-9]+ ]]; then
fatal "Invalid format of Python minor version: $actual_minor_version." \
"Version string for interpreter $python_interpreter: $version_str"
fi
if [[ $actual_major_version -ne $expected_major_version ]]; then
fatal "Expected major version for Python interpreter '$python_interpreter' to be" \
"'$expected_major_version', found '$actual_major_version'. Full Python version:" \
"'$version_str'."
fi
if [[ $actual_minor_version -lt $minor_version_lower_bound ]]; then
fatal "Expected minor version for Python interpreter '$python_interpreter' to be at least " \
"'$minor_version_lower_bound', found '$actual_minor_version'. Full Python version:" \
"'$version_str'."
fi
}
check_python_interpreter_versions() {
check_python_interpreter_version python2 2 7
if is_mac; then
local python_interpreter_basename
for python_interpreter_basename in python python2 python 2.7 python3; do
local homebrew_interpreter_path=/usr/local/bin/$python_interpreter_basename
if [[ -e $homebrew_interpreter_path ]]; then
if [[ ! -L $homebrew_interpreter_path ]]; then
fatal "$homebrew_interpreter_path exists but is not a symlink." \
"Broken Homebrew installation?"
fi
local link_target=$( readlink "$homebrew_interpreter_path" )
if [[ $link_target == /usr/bin/* ]]; then
fatal "Found symlink $homebrew_interpreter_path -> $link_target." \
"Broken Homebrew installation?"
fi
fi
done
fi
}
log_file_existence() {
expect_num_args 1 "$@"
local file_name=$1
if [[ -L $file_name && -f $file_name ]]; then
log "Symlink exists and points to a file: $file_name"
elif [[ -L $file_name && -d $file_name ]]; then
log "Symlink exists and points to a directory: $file_name"
elif [[ -L $file_name ]]; then
log "Symlink exists but it might be broken: $file_name"
elif [[ -f $file_name ]]; then
log "File exists: $file_name"
elif [[ -d $file_name ]]; then
log "Directory exists: $file_name"
elif [[ ! -e $file_name ]]; then
log "File does not exist: $file_name"
else
log "File exists but we could not determine its type: $file_name"
fi
}
is_valid_git_sha1() {
[[ $1 =~ ^[0-9a-f]{40} ]]
}
# Returns current git SHA1 in the variable current_git_sha1.
get_current_git_sha1() {
current_git_sha1=$( git rev-parse HEAD )
if ! is_valid_git_sha1 "$current_git_sha1"; then
fatal "Could not get current git SHA1 in $PWD, got: $current_git_sha1"
fi
}
# sed -i works differently on Linux vs macOS.
sed_i() {
if is_mac; then
sed -i "" "$@"
else
sed -i "$@"
fi
}
lint_java_code() {
local java_project_dir
declare -i num_errors=0
for java_project_dir in "${yb_java_project_dirs[@]}"; do
local IFS=$'\n'
local java_test_files=( $(
find "$java_project_dir" \( -name "Test*.java" -or -name "*Test.java" \) -and \
-not -name "TestUtils.java" -and \
-not -name "*Base.java" -and \
-not -name "Base*Test.java"
) )
local java_test_file
for java_test_file in "${java_test_files[@]}"; do
local log_prefix="YB JAVA LINT: $java_test_file"
if ! grep -Eq '@RunWith\((value[ ]*=[ ]*)?YBParameterizedTestRunner\.class\)' \
"$java_test_file" &&
! grep -Eq '@RunWith\((value[ ]*=[ ]*)?YBTestRunner\.class\)' \
"$java_test_file" &&
! grep -Eq '@RunWith\((value[ ]*=[ ]*)?YBTestRunnerNonTsanOnly\.class\)' \
"$java_test_file"
then
log "$log_prefix: neither YBTestRunner, YBParameterizedTestRunner, nor" \
"YBTestRunnerNonTsanOnly are being used in test"
num_errors+=1
fi
if grep -Fq 'import static org.junit.Assert' "$java_test_file" ||
grep -Fq 'import org.junit.Assert' "$java_test_file"; then
log "$log_prefix: directly importing org.junit.Assert. Should use org.yb.AssertionWrappers."
num_errors+=1
fi
done
done
if [[ $num_errors -eq 0 ]]; then
log "Light-weight lint of YB Java code: SUCCESS"
else
log "Light-weight lint of YB Java code: FAILURE ($num_errors errors found)"
return 1
fi
}
run_with_retries() {
if [[ $# -lt 2 ]]; then
fatal "run_with_retries requires at least three arguments: max_attempts, delay_sec, and " \
"the command to run (at least one additional argument)."
fi
declare -i -r max_attempts=$1
declare -r delay_sec=$2
shift 2
declare -i attempt_index=1
while [[ $attempt_index -le $max_attempts ]]; do
set +e
"$@"
declare exit_code=$?
set -e
if [[ $exit_code -eq 0 ]]; then
return
fi
log "Warning: command failed with exit code $exit_code at attempt $attempt_index: $*." \
"Waiting for $delay_sec sec, will then re-try for up to $max_attempts attempts."
let attempt_index+=1
sleep "$delay_sec"
done
fatal "Failed to execute command after $max_attempts attempts: $*"
}
debug_log_boolean_function_result() {
expect_num_args 1 "$@"
local fn_name=$1
if "$fn_name"; then
log "$fn_name is true"
else
log "$fn_name is false"
fi
}
set_java_home() {
if ! is_mac; then
return
fi
# macOS has a peculiar way of setting JAVA_HOME
local cmd_to_get_java_home="/usr/libexec/java_home --version 1.8"
local new_java_home=$( $cmd_to_get_java_home )
if [[ ! -d $new_java_home ]]; then
fatal "Directory returned by '$cmd_to_get_java_home' does not exist: $new_java_home"
fi
if [[ -n ${JAVA_HOME:-} && $JAVA_HOME != $new_java_home ]]; then
log "Warning: updating JAVA_HOME from $JAVA_HOME to $new_java_home"
else
log "Setting JAVA_HOME: $new_java_home"
fi
export JAVA_HOME=$new_java_home
put_path_entry_first "$JAVA_HOME/bin"
}
update_submodules() {
# This does NOT create any new commits in the top-level repository (the "superproject").
#
# From documentation on "update" from https://git-scm.com/docs/git-submodule:
# Update the registered submodules to match what the superproject expects by cloning missing
# submodules and updating the working tree of the submodules
( cd "$YB_SRC_ROOT"; git submodule update --init --recursive )
}
set_prebuilt_thirdparty_url() {
local build_thirdparty_url_file=$BUILD_ROOT/thirdparty_url.txt
if [[ -f $build_thirdparty_url_file ]]; then
export YB_THIRDPARTY_URL=$(<"$build_thirdparty_url_file")
export YB_DOWNLOAD_THIRDPARTY=1
log "Reusing previously used third-party URL from the build dir: $YB_THIRDPARTY_URL"
return
fi
if [[ ${YB_DOWNLOAD_THIRDPARTY:-} == "1" ]]; then
local auto_thirdparty_url=""
local thirdparty_url_file=$YB_BUILD_SUPPORT_DIR/thirdparty_url_${short_os_name}.txt
if [[ -f $thirdparty_url_file ]]; then
auto_thirdparty_url=$( read_file_and_trim "$thirdparty_url_file" )
if [[ $auto_thirdparty_url != http://* && $auto_thirdparty_url != https://* ]]; then
fatal "Invalid third-party URL: '$auto_thirdparty_url' (expected http:// or https://)." \
"From file: $thirdparty_url_file."
fi
elif [[ -z ${YB_THIRDPARTY_URL:-} ]]; then
fatal "File $thirdparty_url_file not found, cannot set YB_THIRDPARTY_URL"
fi
if [[ -z ${YB_THIRDPARTY_URL:-} ]]; then
export YB_THIRDPARTY_URL=$auto_thirdparty_url
log "Setting third-party URL to $auto_thirdparty_url"
elif [[ -n $auto_thirdparty_url ]]; then
if [[ $auto_thirdparty_url != $YB_THIRDPARTY_URL ]]; then
log "YB_THIRDPARTY_URL is already set to $YB_THIRDPARTY_URL, not trying to set it to" \
"the default value of $auto_thirdparty_url"
fi
else
fatal "YB_THIRDPARTY_URL is not set, and could not determine the default value."
fi
mkdir -p "$BUILD_ROOT"
echo "$YB_THIRDPARTY_URL" >"$build_thirdparty_url_file"
fi
}
# -------------------------------------------------------------------------------------------------
# Initialization
# -------------------------------------------------------------------------------------------------
detect_os
# http://man7.org/linux/man-pages/man7/signal.7.html
if is_mac; then
declare -i -r SIGUSR1_EXIT_CODE=158 # 128 + 30
else
# Linux
declare -i -r SIGUSR1_EXIT_CODE=138 # 128 + 10
fi
# Parent directory for build directories of all build types.
YB_BUILD_INTERNAL_PARENT_DIR=$YB_SRC_ROOT/build
YB_BUILD_EXTERNAL_PARENT_DIR=${YB_SRC_ROOT}__build
if [[ ${YB_USE_EXTERNAL_BUILD_ROOT:-} == "1" ]]; then
YB_BUILD_PARENT_DIR=$YB_BUILD_EXTERNAL_PARENT_DIR
else
YB_BUILD_PARENT_DIR=$YB_BUILD_INTERNAL_PARENT_DIR
fi
if [[ ! -d $YB_BUILD_SUPPORT_DIR ]]; then
fatal "Could not determine YB source directory from '$BASH_SOURCE':" \
"$YB_BUILD_SUPPORT_DIR does not exist."
fi
if [[ -z ${YB_THIRDPARTY_DIR:-} ]]; then
export YB_THIRDPARTY_DIR=$YB_SRC_ROOT/thirdparty
fi
readonly YB_DEFAULT_CMAKE_OPTS=(
"-DCMAKE_C_COMPILER=$YB_COMPILER_WRAPPER_CC"
"-DCMAKE_CXX_COMPILER=$YB_COMPILER_WRAPPER_CXX"
)
YB_PYTHON_WRAPPERS_DIR=$YB_BUILD_SUPPORT_DIR/python-wrappers
if ! "${yb_is_python_wrapper_script:-false}"; then
detect_brew
add_python_wrappers_dir_to_path
fi
|
// import { Link } from "gatsby"
import React from "react"
import { Navbar, Container, Nav } from "react-bootstrap"
import { Link } from "gatsby"
export default function Header() {
return (
<nav>
<Navbar className="nav-bar" fixed="top" variant="dark" expand="md">
<Container>
<Navbar.Brand className="nav-brand" href="/">
Green Team
</Navbar.Brand>
<Navbar.Toggle aria-controls="navbarResponsive" />
<Navbar.Collapse id="navbarResponsive">
<Nav as="ul" className="ms-auto">
<Nav.Item as="li">
<Link to="/" className="nav-link" activeClassName="active">
Home
</Link>
</Nav.Item>
<Nav.Item as="li">
<Link to="/about" className="nav-link" activeClassName="active">
About
</Link>
</Nav.Item>
<Nav.Item as="li">
<Link
to="/#plans"
className="nav-link"
activeClassName="active"
>
Energy Plans
</Link>
</Nav.Item>
<Nav.Item as="li">
<Link
to="/contact"
className="nav-link"
activeClassName="active"
>
Contact
</Link>
</Nav.Item>
</Nav>
</Navbar.Collapse>
</Container>
</Navbar>
</nav>
)
}
|
/******************************************************************************
Copyright (c) 2001-2016, Intel Corporation
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the Intel Corporation nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
******************************************************************************/
/*$FreeBSD$*/
#ifndef _IGB_H_DEFINED_
#define _IGB_H_DEFINED_
#include <sys/types.h>
struct resource {
u_int64_t paddr;
u_int32_t mmap_size;
};
/* datastructure used to transmit a timed packet */
#define IGB_PACKET_LAUNCHTIME 1 /* control when packet transmitted */
#define IGB_PACKET_LATCHTIME 2 /* grab a timestamp of transmission */
struct igb_packet {
struct resource map; /* bus_dma map for packet */
unsigned int offset; /* offset into physical page */
void *vaddr;
u_int32_t len;
u_int32_t flags;
u_int64_t attime; /* launchtime */
u_int64_t dmatime; /* when dma tx desc wb*/
struct igb_packet *next; /* used in the clean routine */
};
typedef struct _device_t {
void *private_data;
u_int16_t pci_vendor_id;
u_int16_t pci_device_id;
u_int16_t domain;
u_int8_t bus;
u_int8_t dev;
u_int8_t func;
} device_t;
/*
* Bus dma allocation structure used by
* e1000_dma_malloc_page and e1000_dma_free_page.
*/
struct igb_dma_alloc {
u_int64_t dma_paddr;
void *dma_vaddr;
unsigned int mmap_size;
};
int igb_probe(device_t *dev);
int igb_attach(char *dev_path, device_t *pdev);
int igb_attach_rx(device_t *pdev);
int igb_attach_tx(device_t *pdev);
int igb_detach(device_t *dev);
int igb_suspend(device_t *dev);
int igb_resume(device_t *dev);
int igb_init(device_t *dev);
int igb_dma_malloc_page(device_t *dev, struct igb_dma_alloc *page);
void igb_dma_free_page(device_t *dev, struct igb_dma_alloc *page);
int igb_xmit(device_t *dev, unsigned int queue_index,
struct igb_packet *packet);
int igb_refresh_buffers(device_t *dev, u_int32_t idx,
struct igb_packet **rxbuf_packets,
u_int32_t num_bufs);
int igb_receive(device_t *dev, unsigned int queue_index,
struct igb_packet **received_packets, u_int32_t *count);
void igb_clean(device_t *dev, struct igb_packet **cleaned_packets);
int igb_get_wallclock(device_t *dev, u_int64_t *curtime, u_int64_t *rdtsc);
int igb_gettime(device_t *dev, clockid_t clk_id, u_int64_t *curtime,
struct timespec *system_time);
int igb_set_class_bandwidth(device_t *dev, u_int32_t class_a, u_int32_t class_b,
u_int32_t tpktsz_a, u_int32_t tpktsz_b);
int igb_set_class_bandwidth2(device_t *dev, u_int32_t class_a_bytes_per_second,
u_int32_t class_b_bytes_per_second);
int igb_setup_flex_filter(device_t *dev, unsigned int queue_id,
unsigned int filter_id, unsigned int filter_len,
u_int8_t *filter, u_int8_t *mask);
int igb_clear_flex_filter(device_t *dev, unsigned int filter_id);
void igb_trigger(device_t *dev, u_int32_t data);
void igb_readreg(device_t *dev, u_int32_t reg, u_int32_t *data);
void igb_writereg(device_t *dev, u_int32_t reg, u_int32_t data);
int igb_lock(device_t *dev);
int igb_unlock(device_t *dev);
int igb_get_mac_addr(device_t *dev, u_int8_t mac_addr[6]);
#endif /* _IGB_H_DEFINED_ */
|
import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-ticket-selection-info',
templateUrl: './ticket-selection-info.component.html',
styleUrls: ['./ticket-selection-info.component.css']
})
export class TicketSelectionInfoComponent implements OnInit {
public show2:boolean = false;
public moreEticket:any ="show more";
showtickets: boolean = false;
constructor() { }
ngOnInit() {
}
clickEvent()
{
this.showtickets = !this.showtickets;
}
lessmore()
{
this.show2 = !this.show2;
if(this.show2)
this.moreEticket = "Show less";
else
this.moreEticket = "show more";
}
}
|
<filename>detox/ios/Detox/Utilities/UIImage+DetoxUtils.h
//
// UIImage+DetoxUtils.h
// Detox
//
// Created by <NAME> on 9/13/20.
// Copyright © 2020 Wix. All rights reserved.
//
#import <UIKit/UIKit.h>
NS_ASSUME_NONNULL_BEGIN
@interface UIImage (DetoxUtils)
- (NSUInteger)dtx_numberOfVisiblePixelsWithThreshold:(CGFloat)threshold totalPixels:(NSUInteger*)totalPixels;
#if DEBUG
- (void)dtx_saveToDesktop;
#endif
@end
NS_ASSUME_NONNULL_END
|
<filename>website/migrations/0009_auto_20180817_1922.py
# Generated by Django 2.0.7 on 2018-08-17 16:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('website', '0008_product_is_featured'),
]
operations = [
migrations.AlterField(
model_name='subscriber',
name='email',
field=models.EmailField(max_length=50, unique=True),
),
]
|
<filename>app/src/main/java/com/hapramp/notification/NotificationSubscriber.java<gh_stars>10-100
package com.hapramp.notification;
import com.google.android.gms.tasks.OnSuccessListener;
import com.google.firebase.messaging.FirebaseMessaging;
import com.hapramp.preferences.HaprampPreferenceManager;
import com.hapramp.utils.HashGenerator;
public class NotificationSubscriber {
/**
* subscribe with `username` topic
*/
public static void subscribeForUserTopic() {
String username = HaprampPreferenceManager.getInstance().getCurrentSteemUsername();
String topic = HashGenerator.getMD5Hash(username);
FirebaseMessaging.getInstance().subscribeToTopic(topic).addOnSuccessListener(new OnSuccessListener<Void>() {
@Override
public void onSuccess(Void aVoid) {
HaprampPreferenceManager.getInstance().setUserTopicSubscribed(true);
}
});
unsubscribeToUnrequiredTopic();
}
/**
* subscribe to listening to new competitions.
*/
public static void subscribeForNewCompetition() {
final String topic = "competitions";
FirebaseMessaging.getInstance().subscribeToTopic(topic).addOnSuccessListener(new OnSuccessListener<Void>() {
@Override
public void onSuccess(Void aVoid) {
}
});
}
/**
* unsubscribe from listening to new competitions.
*/
public static void unsubscribeForNewCompetition() {
String topic = "competitions";
FirebaseMessaging.getInstance().unsubscribeFromTopic(topic).addOnSuccessListener(new OnSuccessListener<Void>() {
@Override
public void onSuccess(Void aVoid) {
}
});
}
/**
* unsubscribe with `username` topic
*/
public static void unsubscribeForUserTopic() {
String username = HaprampPreferenceManager.getInstance().getCurrentSteemUsername();
String topic = HashGenerator.getMD5Hash(username);
FirebaseMessaging.getInstance().unsubscribeFromTopic(topic).addOnSuccessListener(new OnSuccessListener<Void>() {
@Override
public void onSuccess(Void aVoid) {
HaprampPreferenceManager.getInstance().setUserTopicSubscribed(false);
}
});
}
public static void unsubscribeToUnrequiredTopic(){
String username = HaprampPreferenceManager.getInstance().getCurrentSteemUsername();
FirebaseMessaging.getInstance().unsubscribeFromTopic(username).addOnSuccessListener(new OnSuccessListener<Void>() {
@Override
public void onSuccess(Void aVoid) {
HaprampPreferenceManager.getInstance().setUserTopicSubscribed(false);
}
});
}
public static void unsubscribeToAllTopics(){
unsubscribeToUnrequiredTopic();
unsubscribeForUserTopic();
unsubscribeForNewCompetition();
}
}
|
package com.leetcode;
public class Solution_203 {
public ListNode removeElements(ListNode head, int val) {
if (head == null) return null;
ListNode empty = new ListNode(val - 1);
empty.next = head;
ListNode node = empty;
while (node != null) {
if (node.next != null && node.next.val == val) {
node.next = node.next.next;
} else {
node = node.next;
}
}
return empty.next;
}
}
|
import { Explosion } from '../components/Explosion'
import GamePlay from '../scenes/GamePlay'
export class Bomb extends Phaser.Physics.Arcade.Sprite {
// variables
protected currentScene: Phaser.Scene
private hits: number
constructor(params: BombConfig) {
super(params.scene, params.x, params.y, params.key)
// variables
this.hits = params.hit
this.currentScene = <GamePlay>params.scene
this.name = 'bomb'
this.initSprite()
this.currentScene.add.existing(this)
}
protected initSprite() {
this.setDepth(1999)
this.setOrigin(0.5).setScale(3)
let _bomb: Phaser.Types.Animations.Animation = {
key: 'bomb',
frames: this.currentScene.anims.generateFrameNumbers('bomb', {
frames: [0, 1]
}),
frameRate: 20,
yoyo: false,
repeat: -1
}
this.currentScene.anims.create(_bomb)
this.play('bomb')
this.setOrigin(0.5)
// variables
this.currentScene.tweens.add({
targets: this,
y: 650,
duration: 500,
onComplete: () => {
//@ts-ignore
if (this.hits == this.currentScene.delucaMaxHits) {
//@ts-ignore
this.currentScene.win()
}
new Explosion({ scene: this.currentScene, x: this.x, y: this.y, key: '' })
this.destroy()
this.currentScene.cameras.main.shake(200, 0.01, true)
}
})
}
}
|
package request
const AlipayFundAuthOrderFreezeMethod = "alipay.fund.auth.order.freeze"
type AlipayFundAuthOrderFreezeRequest struct {
AuthCode string `json:"auth_code"`
AuthCodeType string `json:"auth_code_type"`
OutOrderNo string `json:"out_order_no"`
OutRequestNo string `json:"out_request_no"`
OrderTitle string `json:"order_title"`
Amount string `json:"amount"`
PayeeLogonId string `json:"payee_logon_id,omitempty"`
PayeeUserId string `json:"payee_user_id,omitempty"`
PayTimeout string `json:"pay_timeout,omitempty"`
ExtraParam string `json:"extra_param,omitempty"`
ProductCode string `json:"product_code,omitempty"`
}
|
import { Body, Controller, Delete, Get, Logger, Param, Post as PostMethod, Put, UseGuards, Req, UseInterceptors } from '@nestjs/common';
import { ApiBearerAuth, ApiUseTags, ApiResponse, ApiOperation } from '@nestjs/swagger';
import { Request } from 'express';
import Solicitud from '../../domain/solicitud.entity';
import { SolicitudService } from '../../service/solicitud.service';
import { PageRequest, Page } from '../../domain/base/pagination.entity';
import { AuthGuard, Roles, RolesGuard, RoleType } from '../../security';
import { HeaderUtil } from '../../client/header-util';
import { LoggingInterceptor } from '../../client/interceptors/logging.interceptor';
@Controller('api/solicitudes')
@UseGuards(AuthGuard, RolesGuard)
@UseInterceptors(LoggingInterceptor)
@ApiBearerAuth()
@ApiUseTags('solicitudes')
export class SolicitudController {
logger = new Logger('SolicitudController');
constructor(private readonly solicitudService: SolicitudService) { }
@Get('/')
@Roles(RoleType.USER)
@ApiOperation({ title: 'Get all solicitudes' })
@ApiResponse({
status: 200,
description: 'List all records',
type: Solicitud
})
async getAll(): Promise<Solicitud[]> {
const results = await this.solicitudService.findAll();
return results;
}
@Get('/:id')
@Roles(RoleType.USER)
@ApiOperation({ title: 'Get a solicitud by id' })
@ApiResponse({
status: 200,
description: 'The found record',
type: Solicitud
})
async getOne(@Param('id') id: string): Promise<Solicitud> {
return await this.solicitudService.findById(id);
}
@PostMethod('/')
@Roles(RoleType.USER)
@ApiOperation({ title: 'Create solicitud' })
@ApiResponse({
status: 201,
description: 'The record has been successfully created.',
type: Solicitud
})
@ApiResponse({ status: 403, description: 'Forbidden.' })
async post(@Req() req: Request, @Body() solicitud: Solicitud): Promise<Solicitud> {
const created = await this.solicitudService.save(solicitud);
HeaderUtil.addEntityCreatedHeaders(req.res, 'Solicitud', created.id);
return created;
}
@Put('/')
@Roles(RoleType.USER)
@ApiOperation({ title: 'Update solicitud' })
@ApiResponse({
status: 200,
description: 'The record has been successfully updated.',
type: Solicitud
})
async put(@Req() req: Request, @Body() solicitud: Solicitud): Promise<Solicitud> {
HeaderUtil.addEntityCreatedHeaders(req.res, 'Solicitud', solicitud.id);
return await this.solicitudService.update(solicitud);
}
@Delete('/:id')
@Roles(RoleType.USER)
@ApiOperation({ title: 'Delete solicitud' })
@ApiResponse({
status: 204,
description: 'The record has been successfully deleted.'
})
async remove(@Req() req: Request, @Param('id') id: string): Promise<Solicitud> {
HeaderUtil.addEntityDeletedHeaders(req.res, 'Solicitud', id);
const toDelete = await this.solicitudService.findById(id);
return await this.solicitudService.delete(toDelete);
}
}
|
package constants
import (
"crypto/md5" //nolint:gosec
"encoding/base64"
"encoding/json"
"fmt"
"testing"
"github.com/qdm12/gluetun/internal/models"
"github.com/stretchr/testify/assert"
)
func digestServerModelVersion(t *testing.T, server interface{}, version uint16) string { //nolint:unparam
bytes, err := json.Marshal(server)
if err != nil {
t.Fatal(err)
}
bytes = append(bytes, []byte(fmt.Sprintf("%d", version))...)
arr := md5.Sum(bytes) //nolint:gosec
return base64.RawStdEncoding.EncodeToString(arr[:])
}
func Test_versions(t *testing.T) {
t.Parallel()
allServers := GetAllServers()
assert.Equal(t, "e8eLGRpb1sNX8mDNPOjA6g", digestServerModelVersion(t, models.CyberghostServer{}, allServers.Cyberghost.Version))
assert.Equal(t, "4yL2lFcxXd/l1ByxBQ7d3g", digestServerModelVersion(t, models.MullvadServer{}, allServers.Mullvad.Version))
assert.Equal(t, "fjzfUqJH0KvetGRdZYEtOg", digestServerModelVersion(t, models.NordvpnServer{}, allServers.Nordvpn.Version))
assert.Equal(t, "gYO+bJZCtQvxVk2dTi5d5Q", digestServerModelVersion(t, models.PIAServer{}, allServers.Pia.Version))
assert.Equal(t, "EZ/SBXQOCS/iJU7A9yc7vg", digestServerModelVersion(t, models.PurevpnServer{}, allServers.Purevpn.Version))
assert.Equal(t, "7yfMpHwzRpEngA/6nYsNag", digestServerModelVersion(t, models.SurfsharkServer{}, allServers.Surfshark.Version))
assert.Equal(t, "7yfMpHwzRpEngA/6nYsNag", digestServerModelVersion(t, models.VyprvpnServer{}, allServers.Vyprvpn.Version))
assert.Equal(t, "7yfMpHwzRpEngA/6nYsNag", digestServerModelVersion(t, models.WindscribeServer{}, allServers.Windscribe.Version))
}
func digestServersTimestamp(t *testing.T, servers interface{}, timestamp int64) string { //nolint:unparam
bytes, err := json.Marshal(servers)
if err != nil {
t.Fatal(err)
}
bytes = append(bytes, []byte(fmt.Sprintf("%d", timestamp))...)
arr := md5.Sum(bytes) //nolint:gosec
return base64.RawStdEncoding.EncodeToString(arr[:])
}
func Test_timestamps(t *testing.T) {
t.Parallel()
allServers := GetAllServers()
assert.Equal(t, "EFMpdq2b9COLevjXmje5zg", digestServersTimestamp(t, allServers.Cyberghost.Servers, allServers.Cyberghost.Timestamp))
assert.Equal(t, "6VjgHtTZOz+TDKpiQOweLA", digestServersTimestamp(t, allServers.Mullvad.Servers, allServers.Mullvad.Timestamp))
assert.Equal(t, "OLI62FoTf2wis25Nw4FLpg", digestServersTimestamp(t, allServers.Nordvpn.Servers, allServers.Nordvpn.Timestamp))
assert.Equal(t, "hAjEIo6FIrUsJuRmKOKPzA", digestServersTimestamp(t, allServers.Pia.Servers, allServers.Pia.Timestamp))
assert.Equal(t, "uiMp4IqH7NmvCIQ7gvR05Q", digestServersTimestamp(t, allServers.PiaOld.Servers, allServers.PiaOld.Timestamp))
assert.Equal(t, "kwJdVWTiBOspfrRwZIA+Sg", digestServersTimestamp(t, allServers.Purevpn.Servers, allServers.Purevpn.Timestamp))
assert.Equal(t, "2rceMJexUNMv0VIqme34iA", digestServersTimestamp(t, allServers.Surfshark.Servers, allServers.Surfshark.Timestamp))
assert.Equal(t, "KdIQWi2tYUM4aMXvWfVBEg", digestServersTimestamp(t, allServers.Vyprvpn.Servers, allServers.Vyprvpn.Timestamp))
assert.Equal(t, "faQUVtOnLMVezN0giHSz3Q", digestServersTimestamp(t, allServers.Windscribe.Servers, allServers.Windscribe.Timestamp))
}
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/FRFoundation/FRFoundation.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SnapKit/SnapKit.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/FRFoundation/FRFoundation.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SnapKit/SnapKit.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
use bcrypt::{hash, verify, DEFAULT_COST};
pub struct Auth {
users: std::collections::HashMap<String, String>,
}
impl Auth {
pub fn new() -> Self {
Auth {
users: std::collections::HashMap::new(),
}
}
pub fn register_user(&mut self, username: &str, password: &str) {
let hashed_password = hash(password, DEFAULT_COST).unwrap();
self.users.insert(username.to_string(), hashed_password);
}
pub fn login_user(&self, username: &str, password: &str) -> bool {
if let Some(hashed_password) = self.users.get(username) {
return verify(password, hashed_password).unwrap_or(false);
}
false
}
}
fn main() {
let mut auth = Auth::new();
auth.register_user("user1", "password123");
assert!(auth.login_user("user1", "password123"));
assert!(!auth.login_user("user1", "wrongpassword"));
} |
<reponame>pradotiago/devops_ac02<gh_stars>0
class Operacoes():
def soma(self, valores):
val = 0
for v in valores:
val = val + v
return val
def subtracao(self,valores):
val = 0
for v in valores:
val = val - v
return val
def mult(self,valores):
val = 2
for v in valores:
val = val * v
return val
|
#! /usr/bin/env python3
# -*-coding:utf-8 -*-
# @Time : 2019/06/18 15:34:39
# @Author : che
# @Email : <EMAIL>
import sys
from xml.etree import ElementTree as ET
e = ET.parse('system_profiler.xml')
for d in e.findall('/array/dict'):
if d.find('string').text == 'SPSoftwareDataType':
sp_data = d.find('array').find('dict')
break
else:
print('SPSoftwareDataType Not Found')
sys.exit(1)
record = []
for child in sp_data.getchildren():
record.append(child.text)
if child.tag == 'string':
print('%-15s -> %s' % tuple(record))
record = []
|
import { Form } from './Form';
import { FormGroup } from '@angular/forms';
export interface DialogFormData<T> {
schema: Form;
data: T;
submit: (data: T, originalData: T) => Promise<boolean>;
}
|
<reponame>KorAP/Kustvakt<gh_stars>1-10
package de.ids_mannheim.korap.utils;
/**
* @author hanl
* @date 19/02/2014
*/
public class BooleanUtils {
public static String dbname;
public static Object getBoolean (Object val) {
if (val == null)
val = false;
if (dbname != null && dbname.equalsIgnoreCase("sqlite")) {
if (val instanceof Boolean) {
return ((boolean) val) ? 1 : 0;
}
else if (val instanceof Integer) {
return ((Integer) val == 1);
}
}
return val;
}
}
|
def encrypt(text, key):
encrypted_text = ""
for i in range(len(text)):
char = text[i]
if (char.isupper()):
encrypted_text += chr((ord(char) + key - 65) % 26 + 65)
else:
encrypted_text += chr((ord(char) + key - 97) % 26 + 97)
return encrypted_text
text = "Hello World"
key = 5
print("Encrypted Text: ", encrypt(text, key)) |
package io.smallrye.mutiny.groups;
import java.util.Collections;
import java.util.List;
import java.util.function.BiFunction;
import java.util.function.Function;
import io.smallrye.mutiny.CompositeException;
import io.smallrye.mutiny.Uni;
import io.smallrye.mutiny.tuples.Tuple2;
import io.smallrye.mutiny.tuples.Tuples;
/**
* Configures the combination of 2 {@link Uni unis}.
*
* @param <T1> the type of item of the first {@link Uni}
* @param <T2> the type of item of the second {@link Uni}
*/
public class UniAndGroup2<T1, T2> extends UniAndGroupIterable<T1> {
public UniAndGroup2(Uni<? extends T1> source, Uni<? extends T2> other) {
super(source, Collections.singletonList(other), false);
}
/**
* Configure the processing to wait until all the {@link Uni unis} to fires an event (item or failure) before
* firing the failure. If several failures have been collected, a {@link CompositeException} is fired wrapping
* the different failures.
*
* @return the current {@link UniAndGroup2}
*/
public UniAndGroup2<T1, T2> collectFailures() {
super.collectFailures();
return this;
}
/**
* @return the resulting {@link Uni}. The items are combined into a {@link Tuple2 Tuple2<T1, T2>}.
*/
public Uni<Tuple2<T1, T2>> asTuple() {
return combinedWith(Tuple2::of);
}
/**
* Creates the resulting {@link Uni}. The items are combined using the given combinator function.
*
* @param combinator the combinator function, must not be {@code null}
* @param <O> the type of item
* @return the resulting {@link Uni}. The items are combined into a {@link Tuple2 Tuple2<T1, T2>}.
*/
@SuppressWarnings("unchecked")
public <O> Uni<O> combinedWith(BiFunction<T1, T2, O> combinator) {
Function<List<?>, O> function = list -> {
Tuples.ensureArity(list, 2);
T1 item1 = (T1) list.get(0);
T2 item2 = (T2) list.get(1);
return combinator.apply(item1, item2);
};
return super.combinedWith(function);
}
}
|
<reponame>navikt/nav-analytics
const findEventFiles = require("../utils/find-event-files");
const readDefinitionFile = require("./read-definition-file");
const filenames = require("../source-files");
module.exports = ()=> {
const definitions = []
findEventFiles(filenames.DEFINITION_JSON).forEach(fp => definitions.push(readDefinitionFile(fp)));
return definitions;
}
|
import re
def parse_autotune_command(command: str) -> dict:
pattern = r'ck\s+autotune\s+([\w:]+)\s+pipeline_from_file=([\w_.]+)\s+@([\w_.]+)'
match = re.match(pattern, command)
if match:
action = "autotune"
type = match.group(1)
input_file = match.group(2)
output_file = match.group(3)
return {
"action": action,
"type": type,
"input_file": input_file,
"output_file": output_file
}
else:
return {} # Return empty dictionary if the command does not match the expected pattern
# Test the function with the given example
command = "ck autotune pipeline:program pipeline_from_file=_setup_program_pipeline_tmp.json @autotune_program_pipeline_base_best.json"
parsed_info = parse_autotune_command(command)
print(parsed_info) # Output: {'action': 'autotune', 'type': 'pipeline:program', 'input_file': '_setup_program_pipeline_tmp.json', 'output_file': 'autotune_program_pipeline_base_best.json'} |
#!/bin/sh
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
usage() {
echo "
usage: $0 <options>
Required not-so-options:
--build-dir=DIR path to Whirr dist.dir
--prefix=PREFIX path to install into
Optional options:
--doc-dir=DIR path to install docs into [/usr/share/doc/whirr]
--lib-dir=DIR path to install Whirr home [/usr/lib/whirr]
--installed-lib-dir=DIR path where lib-dir will end up on target system
--bin-dir=DIR path to install bins [/usr/bin]
--examples-dir=DIR path to install examples [doc-dir/examples]
... [ see source for more similar options ]
"
exit 1
}
OPTS=$(getopt \
-n $0 \
-o '' \
-l 'prefix:' \
-l 'doc-dir:' \
-l 'lib-dir:' \
-l 'installed-lib-dir:' \
-l 'bin-dir:' \
-l 'examples-dir:' \
-l 'build-dir:' -- "$@")
if [ $? != 0 ] ; then
usage
fi
eval set -- "$OPTS"
while true ; do
case "$1" in
--prefix)
PREFIX=$2 ; shift 2
;;
--build-dir)
BUILD_DIR=$2 ; shift 2
;;
--doc-dir)
DOC_DIR=$2 ; shift 2
;;
--lib-dir)
LIB_DIR=$2 ; shift 2
;;
--installed-lib-dir)
INSTALLED_LIB_DIR=$2 ; shift 2
;;
--bin-dir)
BIN_DIR=$2 ; shift 2
;;
--examples-dir)
EXAMPLES_DIR=$2 ; shift 2
;;
--)
shift ; break
;;
*)
echo "Unknown option: $1"
usage
exit 1
;;
esac
done
for var in PREFIX BUILD_DIR ; do
if [ -z "$(eval "echo \$$var")" ]; then
echo Missing param: $var
usage
fi
done
MAN_DIR=$PREFIX/usr/share/man/man1
DOC_DIR=${DOC_DIR:-$PREFIX/usr/share/doc/whirr}
LIB_DIR=${LIB_DIR:-$PREFIX/usr/lib/whirr}
INSTALLED_LIB_DIR=${INSTALLED_LIB_DIR:-/usr/lib/whirr}
EXAMPLES_DIR=${EXAMPLES_DIR:-$DOC_DIR/examples}
BIN_DIR=${BIN_DIR:-$PREFIX/usr/bin}
# First we'll move everything into lib
install -d -m 0755 $LIB_DIR
(cd $BUILD_DIR && tar -cf - .) | (cd $LIB_DIR && tar -xf -)
# Copy in the /usr/bin/whirr wrapper
install -d -m 0755 $BIN_DIR
cat > $BIN_DIR/whirr <<EOF
#!/bin/sh
exec $INSTALLED_LIB_DIR/bin/whirr "\$@"
EOF
chmod 755 $BIN_DIR/whirr
install -d -m 0755 $MAN_DIR
gzip -c whirr.1 > $MAN_DIR/whirr.1.gz
|
#!/usr/bin/env bash
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
set -ex
[ -n "$1" ] || ( echo "Install dir missing"; exit 1 )
sudo apt-get update
sudo apt-get install -y \
autoconf \
autoconf-archive \
automake \
binutils-dev \
bison \
cmake \
flex \
g++ \
gcc \
git \
libboost-all-dev \
libdouble-conversion-dev \
libevent-dev \
libgflags-dev \
libgoogle-glog-dev \
libiberty-dev \
libjemalloc-dev \
libkrb5-dev \
liblzma-dev \
libnuma-dev \
libsnappy-dev \
libsasl2-dev \
libssl-dev \
libtool \
make \
python-dev \
ragel \
scons \
zlib1g-dev \
zip
sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-5 50
sudo update-alternatives --install /usr/bin/g++ g++ /usr/bin/g++-5 50
cd "$(dirname "$0")" || ( echo "cd fail"; exit 1 )
./get_and_build_everything.sh ubuntu-14.04 "$@"
|
def string_similarity(str1, str2):
str1 = str1.lower()
str2 = str2.lower()
l1 = len(str1)
l2 = len(str2)
maxlen = max(l1, l2)
similar = 0
for i in range(maxlen):
if str1[i] == str2[i]:
similar += 1
return similar / maxlen |
x = 15
if x > 10:
print("Mission successful") |
package org.firstinspires.ftc.teamcode;
import com.qualcomm.robotcore.hardware.DcMotor;
/**
* Created by kevinwang on 8/24/18.
*/
public class Whatever_Drive_Code {
static void Drive(DcMotor frontLeft, DcMotor backLeft, DcMotor frontRight, DcMotor backRight, int mode, double direction){
// 0 mode is to stop
// 1 mode is driving front and back, direction 1 is forward, direction -1 is backwards
// 2 mode is strafing left and right, direction 1 is left, direction -1 is right
// 3 mode is rotating, direction 1 is clockwise, direction -1 is counter-clockwise *not used
double drive = 0;
double strafe = 0;
double rotate = 0;
if (mode == 0){
drive = 0;
strafe = 0;
rotate = 0;
}
else if (mode == 1){
drive = direction;
strafe = 0;
rotate = 0;
}
else if (mode == 2){
drive = 0;
strafe = direction;
rotate = 0;
}
else if (mode == 3){
drive = 0;
strafe = 0;
rotate = direction;
}
else {
frontLeft.setPower(0);
backLeft.setPower(0);
frontRight.setPower(0);
backRight.setPower(0);
}
double frontLeftPower = drive - strafe - rotate;
double backLeftPower = drive + strafe - rotate;
double frontRightPower = drive + strafe + rotate;
double backRightPower = drive - strafe + rotate;
frontLeft.setPower(frontLeftPower);
backLeft.setPower(backLeftPower);
frontRight.setPower(frontRightPower);
backRight.setPower(backRightPower);
}
}
|
/*
Copyright (c) 2011, <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
var natural = require('lib/natural');
describe('bayes classifier', function() {
describe('classifier', function() {
it('should classify with arrays', function() {
var classifier = new natural.BayesClassifier();
classifier.addDocument(['fix', 'box'], 'computing');
classifier.addDocument(['write', 'code'], 'computing');
classifier.addDocument(['script', 'code'], 'computing');
classifier.addDocument(['write', 'book'], 'literature');
classifier.addDocument(['read', 'book'], 'literature');
classifier.addDocument(['study', 'book'], 'literature');
classifier.train();
expect(classifier.classify(['bug', 'code'])).toBe('computing');
expect(classifier.classify(['read', 'thing'])).toBe('literature');
});
it('should provide all classification scores', function() {
var classifier = new natural.BayesClassifier();
classifier.addDocument(['fix', 'box'], 'computing');
classifier.addDocument(['write', 'code'], 'computing');
classifier.addDocument(['script', 'code'], 'computing');
classifier.addDocument(['write', 'book'], 'literature');
classifier.addDocument(['read', 'book'], 'literature');
classifier.addDocument(['study', 'book'], 'literature');
classifier.train();
expect(classifier.getClassifications('i write code')[0].label).toBe('computing');
expect(classifier.getClassifications('i write code')[1].label).toBe('literature');
});
it('should classify with strings', function() {
var classifier = new natural.BayesClassifier();
classifier.addDocument('i fixed the box', 'computing');
classifier.addDocument('i write code', 'computing');
classifier.addDocument('nasty script code', 'computing');
classifier.addDocument('write a book', 'literature');
classifier.addDocument('read a book', 'literature');
classifier.addDocument('study the books', 'literature');
classifier.train();
expect(classifier.classify('a bug in the code')).toBe('computing');
expect(classifier.classify('read all the books')).toBe('literature');
});
it('should classify and re-classify after document-removal', function() {
var classifier = new natural.BayesClassifier()
, arr
, item
, classifications = {};
// Add some good/bad docs and train
classifier.addDocument('foo bar baz', 'good');
classifier.addDocument('qux zooby', 'bad');
classifier.addDocument('asdf qwer', 'bad');
classifier.train();
expect(classifier.classify('foo')).toBe('good');
expect(classifier.classify('qux')).toBe('bad');
// Remove one of the bad docs, retrain
classifier.removeDocument('qux zooby', 'bad');
classifier.retrain();
// Simple `classify` will still return a single result, even if
// ratio for each side is equal -- have to compare actual values in
// the classifications, should be equal since qux is unclassified
arr = classifier.getClassifications('qux');
for (var i = 0, ii = arr.length; i < ii; i++) {
item = arr[i];
classifications[item.label] = item.value;
}
expect(classifications.good).toEqual(classifications.bad);
// Re-classify as good, retrain
classifier.addDocument('qux zooby', 'good');
classifier.retrain();
// Should now be good, original docs should be unaffected
expect(classifier.classify('foo')).toBe('good');
expect(classifier.classify('qux')).toBe('good');
});
it('should serialize and deserialize a working classifier', function() {
var classifier = new natural.BayesClassifier();
classifier.addDocument('i fixed the box', 'computing');
classifier.addDocument('i write code', 'computing');
classifier.addDocument('nasty script code', 'computing');
classifier.addDocument('write a book', 'literature');
classifier.addDocument('read a book', 'literature');
classifier.addDocument('study the books', 'literature');
var obj = JSON.stringify(classifier);
var newClassifier = natural.BayesClassifier.restore(JSON.parse(obj));
newClassifier.addDocument('kick a ball', 'sports');
newClassifier.addDocument('hit some balls', 'sports');
newClassifier.addDocument('kick and punch', 'sports');
newClassifier.train();
expect(newClassifier.classify('a bug in the code')).toBe('computing');
expect(newClassifier.classify('read all the books')).toBe('literature');
expect(newClassifier.classify('kick butt')).toBe('sports');
});
it('should save and load a working classifier', function() {
var classifier = new natural.BayesClassifier();
classifier.addDocument('i fixed the box', 'computing');
classifier.addDocument('i write code', 'computing');
classifier.addDocument('nasty script code', 'computing');
classifier.addDocument('write a book', 'literature');
classifier.addDocument('read a book', 'literature');
classifier.addDocument('study the books', 'literature');
classifier.train();
classifier.save('bayes_classifier.json', function(err) {
natural.BayesClassifier.load('bayes_classifier.json', null,
function(err, newClassifier){
newClassifier.addDocument('kick a ball', 'sports');
newClassifier.addDocument('hit some balls', 'sports');
newClassifier.addDocument('kick and punch', 'sports');
newClassifier.train();
expect(newClassifier.classify('a bug in the code')).toBe('computing');
expect(newClassifier.classify('read all the books')).toBe('literature');
expect(newClassifier.classify('kick butt')).toBe('sports');
asyncSpecDone();
});
});
});
});
});
|
<filename>tapestry-core/src/test/java/org/apache/tapestry5/integration/app1/StuffTreeModelAdapter.java
// Copyright 2011 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.integration.app1;
import org.apache.tapestry5.tree.TreeModelAdapter;
import java.util.List;
public class StuffTreeModelAdapter implements TreeModelAdapter<Stuff>
{
public boolean isLeaf(Stuff value)
{
// Special case:
if (value.name.equals("Empty Folder"))
{
return false;
}
return !hasChildren(value);
}
public boolean hasChildren(Stuff value)
{
return value.children != null && !value.children.isEmpty();
}
public List<Stuff> getChildren(Stuff value)
{
return value.children;
}
public String getLabel(Stuff value)
{
return value.name;
}
}
|
import url from 'url';
const getUrl = (protocol: string, host: string | undefined): string =>
url.format({
protocol,
host,
});
export default getUrl;
|
<filename>src/main/java/cloud/google/datastore/entity/ResponseHandle.java<gh_stars>1-10
/**
* Copyright (C) 2014 xuanhung2401.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cloud.google.datastore.entity;
import java.lang.reflect.Field;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.HashMap;
import java.util.List;
import cloud.google.datastore.entity.core.Entity;
import cloud.google.datastore.entity.core.Result;
import cloud.google.datastore.entity.lookup.LookupResponse;
import cloud.google.datastore.entity.query.QueryResponse;
import cloud.google.util.Utility;
import com.google.gson.internal.LinkedTreeMap;
/**
* @author xuanhung2401
*
*/
/**
* This class used to handle google api response, convert response string to
* entity or list entity by type T.
* */
public class ResponseHandle {
public static <T> List<T> handleLookupIdsResponse(Class<T> clazz,
String resp) {
List<T> list = new ArrayList<T>();
LookupResponse l = Utility.fromJsonToObject(LookupResponse.class, resp);
if (l != null && l.getFound() != null && l.getFound().size() > 0) {
for (Result<?> res : l.getFound()) {
T obj = convertEntity(res.getEntity(), clazz);
if (obj != null) {
list.add(obj);
}
}
}
return list;
}
public static <T> T handleLookupIdResponse(Class<T> clazz, String resp) {
LookupResponse l = Utility.fromJsonToObject(LookupResponse.class, resp);
if (l != null && l.getFound() != null && l.getFound().size() > 0) {
for (Result<?> res : l.getFound()) {
T obj = convertEntity(res.getEntity(), clazz);
if (obj != null) {
return obj;
}
}
}
return null;
}
public static <T> List<T> handleQueryResponse(Class<T> clazz, String resp) {
List<T> list = new ArrayList<T>();
QueryResponse l = Utility.fromJsonToObject(QueryResponse.class, resp);
if (l != null) {
for (Result<?> res : l.getBatch().getEntityResults()) {
T obj = convertEntity(res.getEntity(), clazz);
if (obj != null) {
list.add(obj);
}
}
}
return list;
}
@SuppressWarnings("unchecked")
public static <T> T convertEntity(Entity<?> entity, Class<T> clazz) {
HashMap<String, HashMap<String, Object>> properties = entity
.getProperties();
try {
Object obj = Class.forName(clazz.getName()).newInstance();
for (Field f : obj.getClass().getDeclaredFields()) {
f.setAccessible(true);
HashMap<String, Object> fValue = properties.get(f.getName());
if (fValue != null) {
Object value = fValue.get(Utility.generateGoogleDataType(f
.getGenericType().toString()));
if (Utility.isStringField(f.getGenericType().toString())) {
f.set(obj, (String) value);
} else if (Utility.isIntegerField(f.getGenericType()
.toString())) {
f.set(obj, Integer.parseInt((String) value));
} else if (Utility.isBooleanField(f.getGenericType()
.toString())) {
f.set(obj, Boolean.parseBoolean((String) value));
} else if (Utility.isDateTimeField(f.getGenericType()
.toString())) {
Calendar cal = Calendar.getInstance();
SimpleDateFormat sdf = new SimpleDateFormat(
"yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
try {
cal.setTime(sdf.parse((String) value));
} catch (ParseException e) {
e.printStackTrace();
}
f.set(obj, cal.getTime());
} else if (f.getGenericType() instanceof ParameterizedType) {
ParameterizedType pt = (ParameterizedType) f
.getGenericType();
if (pt.getActualTypeArguments().length > 0) {
Type t = pt.getActualTypeArguments()[0];
if (Utility.isStringField(t.toString())) {
List<String> listObj = new ArrayList<String>();
List<LinkedTreeMap<String, String>> listL = (List<LinkedTreeMap<String, String>>) value;
for (LinkedTreeMap<String, String> linkedTreeMap : listL) {
listObj.add(linkedTreeMap
.get("stringValue"));
}
f.set(obj, listObj);
} else if (Utility.isIntegerField(t.toString())) {
List<Integer> listObj = new ArrayList<Integer>();
List<LinkedTreeMap<String, String>> listL = (List<LinkedTreeMap<String, String>>) value;
for (LinkedTreeMap<String, String> linkedTreeMap : listL) {
listObj.add(Integer.parseInt(linkedTreeMap
.get("integerValue")));
}
f.set(obj, listObj);
} else if (Utility.isDoubleField(t.toString())) {
List<Double> listObj = new ArrayList<Double>();
List<LinkedTreeMap<String, Double>> listL = (List<LinkedTreeMap<String, Double>>) value;
for (LinkedTreeMap<String, Double> linkedTreeMap : listL) {
listObj.add(linkedTreeMap
.get("doubleValue"));
}
f.set(obj, listObj);
}
}
}
}
}
return clazz.cast(obj);
} catch (InstantiationException | IllegalAccessException
| ClassNotFoundException e) {
e.printStackTrace();
}
return null;
}
}
|
class AzCopy < DebianFormula
homepage 'https://github.com/Azure/azure-storage-azcopy/'
version '10.7.0'
url "https://github.com/Azure/azure-storage-azcopy/archive/v#{version}.tar.gz"
sha256 'cfdc53dd2c5d30adddeb5270310ff566b4417a9f5eec6c9f6dfbe10d1feb6213'
name 'azcopy'
arch 'x86_64'
build_depends 'golang (= 1.15.6+thepwagner1)'
end
|
#!/bin/bash
# Deploys the pipeline in the Tools account
source ../env/env_accounts.sh
source ../env/env_deployment.sh
# Replace with your docker build image repo name in ECR
BuildImageRepo=my-build-image
echo "Deploying Pipeline..."
aws cloudformation deploy --stack-name example-pipeline\
--template-file example_pipeline.yaml \
--capabilities CAPABILITY_NAMED_IAM \
--profile $ToolsAccountProfile \
--parameter-overrides \
TestAccount=$TestAccount \
ProdAccount=$ProdAccount \
ExternPipelineAccessRoleName=$ExternPipelineAccessRoleName \
CloudformationDeployerRoleName=$CloudformationDeployerRoleName \
BuildImageRepo=$BuildImageRepo
|
package com.kastking.supplierProduct.service;
import com.kastking.supplierProduct.domain.MisSupplierProduct;
import java.util.List;
/**
* 供应商产品Service接口
*
* @author James
* @date 2020-02-07
*/
public interface IMisSupplierProductService {
/**
* 查询供应商产品
*
* @param productId 供应商产品ID
* @return 供应商产品
*/
public MisSupplierProduct selectMisSupplierProductById(Long productId);
/**
* 查询供应商产品列表
*
* @param misSupplierProduct 供应商产品
* @return 供应商产品集合
*/
public List<MisSupplierProduct> selectMisSupplierProductList(MisSupplierProduct misSupplierProduct);
/**
* 新增供应商产品
*
* @param misSupplierProduct 供应商产品
* @return 结果
*/
public int insertMisSupplierProduct(MisSupplierProduct misSupplierProduct);
/**
* 修改供应商产品
*
* @param misSupplierProduct 供应商产品
* @return 结果
*/
public int updateMisSupplierProduct(MisSupplierProduct misSupplierProduct);
/**
* 批量删除供应商产品
*
* @param ids 需要删除的数据ID
* @return 结果
*/
public int deleteMisSupplierProductByIds(String ids);
/**
* 删除供应商产品信息
*
* @param productId 供应商产品ID
* @return 结果
*/
public int deleteMisSupplierProductById(Long productId);
/**
* 导入供应商产品数据
*
* @param misSupplierProductList 供应商产品数据列表
* @param isUpdateSupport 是否更新支持,如果已存在,则进行更新数据
* @param operName 操作用户
* @return 结果
*/
public String importMisSupplierProduct(List<MisSupplierProduct> misSupplierProductList, Boolean isUpdateSupport, String operName);
/**
* 验证SKU唯一
*
* @param misSupplierProduct 供应商产品
* @return 结果
*/
public Long verificationSku(MisSupplierProduct misSupplierProduct);
}
|
from oeqa.oetest import oeRuntimeTest
class OpenCLTest(oeRuntimeTest):
def test_opencl_app_can_execute(self):
(status, output) = self.target.run('/usr/bin/opencl-bench-opencl')
self.assertEqual(status, 0, msg="OpenCL error messages: %s" % output)
|
<reponame>i509VCB/Spunbric<gh_stars>1-10
package org.spongepowered.spunbric.mod.data.value.mutable;
import org.spongepowered.api.data.Key;
import org.spongepowered.api.data.value.Value;
import org.spongepowered.spunbric.mod.data.value.AbstractBaseValue;
import org.spongepowered.spunbric.mod.data.value.immutable.ImmutableFabricValue;
import java.util.function.Function;
public class MutableFabricValue<E> extends AbstractBaseValue<E> implements Value.Mutable<E> {
public MutableFabricValue(Key<? extends Value<E>> key, E defaultValue) {
super(key, defaultValue, defaultValue);
}
public MutableFabricValue(Key<? extends Value<E>> key, E defaultValue, E actualValue) {
super(key, defaultValue, actualValue);
}
@Override
public Mutable<E> set(E value) {
this.actualValue = value;
return this;
}
@Override
public Mutable<E> transform(Function<E, E> function) {
this.actualValue = function.apply(this.actualValue);
return this;
}
@Override
public Immutable<E> asImmutable() {
return new ImmutableFabricValue(this.getKey(), this.defaultValue, this.actualValue); // This really should be cached in the future
//return ImmutableFabricValue.cachedOf(this.getKey(), this.defaultValue, this.actualValue);
}
@Override
public Mutable<E> copy() {
return new MutableFabricValue<>(this.getKey(), this.defaultValue, this.actualValue);
}
}
|
#! /bin/sh cd /sys/devices/platform/bone_capemgr File=slots if grep -q "Override Board Name,00A0,Override Manuf,univ-emmc" "$File"; then
cd
###Overide capes with eeprom
uboot_overlay_addr0=/lib/firmware/BB-UART1-00A0.dtbo
uboot_overlay_addr1=/lib/firmware/BB-UART2-00A0.dtbo
uboot_overlay_addr2=/lib/firmware/BB-UART4-00A0.dtbo
uboot_overlay_addr3=/lib/firmware/BB-UART5-00A0.dtbo
echo "\n Pin configuration available"
echo "\n UART 4 configuration p9.11 and p9.13"
sudo config-pin P9.11 uart
sudo config-pin -q P9.11
sudo config-pin P9.13 uart
sudo config-pin -q P9.13
echo "\n UART 1 configuration p9.26 and p9.24"
sudo config-pin P9.24 uart
sudo config-pin -q P9.24
sudo config-pin P9.26 uart
sudo config-pin -q P9.26 |
<reponame>kodeaqua/script-praktikum<gh_stars>1-10
#include <gl/freeglut.h>
void draw()
{
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClear(GL_COLOR_BUFFER_BIT);
glLoadIdentity();
glPushMatrix();
glRotatef(50.0f, 0.0f, 0.0f, 1.0f);
glColor3f(0.0f, 0.0f, 1.0f);
glRectf(-0.3, 0.18, 0.18, -0.3);
glPopMatrix();
glPushMatrix();
glScalef(2.0f, 2.0f, 2.0f);
glColor3f(0.0f, 1.0f, 0.0f);
glRectf(-0.18, 0.18, 0.04, -0.04);
glPopMatrix();
glPushMatrix();
glTranslatef(-0.50, 0.50, 0);
glColor3f(1.0f, 0.0f, 0.0f);
glRectf(-0.18, 0.18, 0.2, -0.18);
glPopMatrix();
glFlush();
}
int main(int argc, char **argv)
{
glutInit(&argc, argv);
glutInitWindowSize(640, 480);
glutInitDisplayMode(GLUT_SINGLE | GLUT_RGBA);
glutCreateWindow("Segitiga");
glutDisplayFunc(draw);
glutMainLoop();
return 0;
}
|
#include "planet.h"
void planet::renderPlanet()
{
int num_segments = 8;
for(int ii = 0; ii < num_segments; ii++)
{
float theta = 2.0f * 3.1415926f * float(ii) / float(num_segments);//get the current angle
float x = radius * cosf(theta);//calculate the x component
float y = radius * sinf(theta);//calculate the y component
vertexArray[vertexArrayCount] = x;
vertexArrayCount++;
vertexArray[vertexArrayCount] = y;
vertexArrayCount++;
}
// VAO rendering
glEnableClientState(GL_VERTEX_ARRAY);
{
glVertexPointer(2, GL_FLOAT, 0, &vertexArray);
glDrawArrays(GL_LINE_LOOP, 0, vertexArrayCount/2);
}
glDisableClientState(GL_VERTEX_ARRAY);
vertexArrayCount = 0;
}
// Render planet to an x and y offset from the center
void planet::renderPlanet(int xOffset, int yOffset)
{
int num_segments = 8;
for(int ii = 0; ii < num_segments; ii++)
{
float theta = 2.0f * 3.1415926f * float(ii) / float(num_segments);//get the current angle
float x = radius * cosf(theta);//calculate the x component
float y = radius * sinf(theta);//calculate the y component
vertexArray[vertexArrayCount] = x + xOffset;
vertexArrayCount++;
vertexArray[vertexArrayCount] = y + yOffset;
vertexArrayCount++;
}
// VAO rendering
glEnableClientState(GL_VERTEX_ARRAY);
{
glVertexPointer(2, GL_FLOAT, 0, &vertexArray);
glDrawArrays(GL_LINE_LOOP, 0, vertexArrayCount/2);
}
glDisableClientState(GL_VERTEX_ARRAY);
vertexArrayCount = 0;
}
int planet::getPlanetRadius()
{
return radius;
}
|
<filename>src/main/java/io/github/vampirestudios/obsidian/api/fabric/TridentInterface.java
package io.github.vampirestudios.obsidian.api.fabric;
import net.minecraft.client.util.ModelIdentifier;
import net.minecraft.entity.projectile.TridentEntity;
import net.minecraft.util.Identifier;
/**
* An interface to implement for all custom tridents in fabric. <br>
* Note: This is meant to be used on a TridentItem class, the functionality will not work otherwise.
*
* @see SimpleTridentItem
*/
public interface TridentInterface {
/**
* should be `namespace:item_name#inventory`.
*
* @return the model identifier
*/
ModelIdentifier getInventoryModelIdentifier();
/**
* @return The Identifier for the texture of the trident entity
*/
Identifier getEntityTexture();
/**
* Modifies the trident entity for this trident item, allowing for custom tridents that have different features. <br>
* Look at {@link SimpleTridentItem#modifyTridentEntity} for an example of how to construct a new trident entity from a vanilla one.
*
* @param trident The vanilla trident to base custom trident of off
* @return The custom trident
*/
TridentEntity modifyTridentEntity(TridentEntity trident);
} |
package cyclops.stream.spliterator;
public interface ReversableSpliterator<T> extends CopyableSpliterator<T> {
boolean isReverse();
void setReverse(boolean reverse);
default ReversableSpliterator<T> invert() {
setReverse(!isReverse());
return this;
}
ReversableSpliterator<T> copy();
}
|
<filename>src/stream/MarketDataFilter.cpp
/**
* Copyright 2017 <NAME>. Distributed under the MIT license.
*/
#include "greentop/stream/MarketDataFilter.h"
namespace greentop {
namespace stream {
MarketDataFilter::MarketDataFilter(const Optional<int32_t>& ladderLevels,
const std::set<std::string>& fields) :
ladderLevels(ladderLevels),
fields(fields) {
}
void MarketDataFilter::fromJson(const Json::Value& json) {
if (json.isMember("ladderLevels")) {
ladderLevels = json["ladderLevels"].asInt();
}
if (json.isMember("fields")) {
for (unsigned i = 0; i < json["fields"].size(); ++i) {
fields.insert(json["fields"][i].asString());
}
}
}
Json::Value MarketDataFilter::toJson() const {
Json::Value json(Json::objectValue);
if (ladderLevels.isValid()) {
json["ladderLevels"] = ladderLevels.toJson();
}
if (fields.size() > 0) {
for (std::set<std::string>::const_iterator it = fields.begin(); it != fields.end(); ++it) {
json["fields"].append(*it);
}
}
return json;
}
bool MarketDataFilter::isValid() const {
return true;
}
const Optional<int32_t>& MarketDataFilter::getLadderLevels() const {
return ladderLevels;
}
void MarketDataFilter::setLadderLevels(const Optional<int32_t>& ladderLevels) {
this->ladderLevels = ladderLevels;
}
const std::set<std::string>& MarketDataFilter::getFields() const {
return fields;
}
void MarketDataFilter::setFields(const std::set<std::string>& fields) {
this->fields = fields;
}
}
}
|
<gh_stars>0
/*
* Copyright 2014-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.dbflute.cbean.result.grouping;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/**
* The class of row resource for grouping making.
* @param <ENTITY> The type of entity.
* @author jflute
*/
public class GroupingRowResource<ENTITY> implements Serializable {
// ===================================================================================
// Definition
// ==========
/** The serial version UID for object serialization. (Default) */
private static final long serialVersionUID = 1L;
// ===================================================================================
// Attribute
// =========
protected final List<ENTITY> _groupingRowList = new ArrayList<ENTITY>();
protected int _elementCurrentIndex;
protected int _breakCount;
// ===================================================================================
// Easy-to-Use
// ===========
/**
* @return Does the list of grouping row size up the break count?
*/
public boolean isSizeUpBreakCount() {
return _elementCurrentIndex == (_breakCount - 1);
}
// ===================================================================================
// Accessor
// ========
/**
* @return The list of grouping row. (NotNull and NotEmpty)
*/
public List<ENTITY> getGroupingRowList() {
return _groupingRowList;
}
/**
* Add the element entity to the list of grouping row. {INTERNAL METHOD}
* @param groupingRow The element entity to the list of grouping row.
*/
public void addGroupingRowList(ENTITY groupingRow) {
_groupingRowList.add(groupingRow);
}
/**
* @return The entity of element current index. (NotNull)
*/
public ENTITY getCurrentEntity() {
return _groupingRowList.get(_elementCurrentIndex);
}
/**
* @return The index of current element.
*/
public int getElementCurrentIndex() {
return _elementCurrentIndex;
}
/**
* Set the index of current element. {INTERNAL METHOD}
* @param elementCurrentIndex The index of current element.
*/
public void setElementCurrentIndex(int elementCurrentIndex) {
_elementCurrentIndex = elementCurrentIndex;
}
/**
* @return The count of break loop.
*/
public int getBreakCount() {
return _breakCount;
}
/**
* Set the count of break loop. {INTERNAL METHOD}
* @param breakCount The count of break loop.
*/
public void setBreakCount(int breakCount) {
_breakCount = breakCount;
}
}
|
<filename>src/movies/data/repositories/MoviesRepository.ts<gh_stars>0
// Need to use the React-specific entry point to import createApi
import { createApi, fetchBaseQuery } from '@reduxjs/toolkit/query/react';
import { IMoviesApiRest, IMoviesTransform } from '../models/interfaces';
import { toMovieDomain } from '../models/transformers';
// Define a service using a base URL and expected endpoints
export const MoviesRepository = createApi({
reducerPath: 'MoviesRepository',
baseQuery: fetchBaseQuery({ baseUrl: 'https://api.themoviedb.org/3/' }),
endpoints: (builder) => ({
getMoviesDiscovery: builder.query<IMoviesTransform, string>({
query: () =>
`discover/movie?api_key=${process.env.NEXT_PUBLIC_WEBSITE_MOVIE_API}`,
transformResponse: (response: IMoviesApiRest): IMoviesTransform => ({
...response,
results: toMovieDomain(response.results),
}),
}),
}),
});
// Export hooks for usage in functional components, which are
// auto-generated based on the defined endpoints
export const { useGetMoviesDiscoveryQuery } = MoviesRepository;
|
# Author Sudhir Reddy @ SAS
#
###------------------------------------------
#
# This script launch two daemons both webservers.
#
# 1. SCR REST API server. This should correcpond to ENTRYPOINT set in original SCR container image
# we need to set java.io.tmpdir as sagemaker invoked docker containers (secured ones) do not let you write to /tmp.
# Also override the port to 9090 as original 8080 is called by sagemaker.
export _JAVA_OPTIONS=-Djava.io.tmpdir=/opt/tmp
#export CATALINA_BASE=/opt
#export CATALINA_TMPDIR=/opt/tmp
/usr/lib/jvm/jre/bin/java -Xrs -cp /opt/scr/viya/home/solo:/opt/scr/viya/home/solo/lib/* com.sas.mas.solo.Application -Djava.library.path=/opt/scr/viya/home/SASFoundation/sasexe --server.port=9090 &
#------
# 2.Launches python server which basically does redirects to SCR server. Sagemaker inference mandates /invocations and /ping URI which is not supported by SCR REST API server.
# that is the background of this second daemon
gunicorn --bind 0.0.0.0:8080 sagemaker_server:app &
# Wait for any process to exit
wait -n
# Exit with status of process that exited first
exit $?
|
<filename>opencga-storage/opencga-storage-hbase/src/test/java/org/opencb/opencga/storage/hbase/variant/VariantHbaseWriterTest.java<gh_stars>0
/*
* Copyright 2015 OpenCB
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.opencb.opencga.storage.hbase.variant;
import com.mongodb.DB;
import com.mongodb.MongoClient;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import org.opencb.biodata.models.variant.Variant;
import org.opencb.biodata.models.variant.VariantFactory;
import org.opencb.biodata.models.variant.VariantSource;
import org.opencb.opencga.core.auth.IllegalOpenCGACredentialsException;
import org.opencb.opencga.core.auth.MonbaseCredentials;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import static org.junit.Assert.*;
/**
* @author <NAME> <NAME> <<EMAIL>>
*/
public class VariantHbaseWriterTest {
private static final String tableName = "test_VariantVcfMonbaseDataWriterTest";
private static VariantSource study = new VariantSource("testStudy", "testAlias", "testStudy", null, null);
private static MonbaseCredentials credentials;
private static Configuration config;
private static VariantHbaseWriter writer;
private static List<Variant> variants;
@BeforeClass
public static void testConstructorAndOpen() {
try {
// Credentials for the writer
credentials = new MonbaseCredentials("172.24.79.30", 60010, "172.24.79.30", 2181, "localhost", 9999, tableName, "cgonzalez", "cgonzalez");
// HBase configuration with the active credentials
config = HBaseConfiguration.create();
config.set("hbase.master", credentials.getHbaseMasterHost() + ":" + credentials.getHbaseMasterPort());
config.set("hbase.zookeeper.quorum", credentials.getHbaseZookeeperQuorum());
config.set("hbase.zookeeper.property.clientPort", String.valueOf(credentials.getHbaseZookeeperClientPort()));
// Monbase writer
writer = new VariantHbaseWriter(study, tableName, credentials);
assertTrue(writer.open());
} catch (IllegalOpenCGACredentialsException e) {
fail(e.getMessage());
}
assertNotNull("Monbase credentials must be not null", credentials);
assertNotNull("Monbase writer must be not null", writer);
}
@BeforeClass
public static void initializeDataToInsert() {
List<String> sampleNames = Arrays.asList( "NA001", "NA002", "NA003" );
String[] fields1 = new String[] { "1", "100000", "rs1100000", "A", "T,G", "40", "PASS",
"DP=5;AP=10;H2", "GT:DP", "1/1:4", "1/0:2", "0/0:3" };
String[] fields2 = new String[] {"1", "200000", "rs1200000", "G", "T", "30", "LowQual",
"DP=2;AP=5", "GT:DP", "1/1:3", "1/1:1", "0/0:5" };
String[] fields3 = new String[] {"1", "300000", "rs1300000", "C", "T", "50", "PASS",
"DP=1;AP=6", "GT:DP", "1/0:3", "0/1:1", "0/0:5" };
String[] fields4 = new String[] {"2", "100000", "rs2100000", "G", "A", "60", "STD_FILTER",
"DP=3;AP=8", "GT:DP", "1/1:3", "1/0:1", "0/0:5" };
String[] fields5 = new String[] {"3", "200000", "rs3200000", "G", "C", "80", "LowQual;STD_FILTER",
"DP=2;AP=6", "GT:DP", "1/0:3", "1/1:1", "0/1:5" };
Variant rec1 = VariantFactory.createVariantFromVcf(sampleNames, fields1).get(0);
Variant rec2 = VariantFactory.createVariantFromVcf(sampleNames, fields2).get(0);
Variant rec3 = VariantFactory.createVariantFromVcf(sampleNames, fields3).get(0);
Variant rec4 = VariantFactory.createVariantFromVcf(sampleNames, fields4).get(0);
Variant rec5 = VariantFactory.createVariantFromVcf(sampleNames, fields5).get(0);
// VariantStats stats1 = new VariantStats("1", 100000, "A", "T,G", 0.01, 0.30, "A", "A/T", 2, 0, 1, true, 0.02, 0.10, 0.30, 0.15);
// VariantStats stats2 = new VariantStats("1", 200000, "G", "T", 0.05, 0.20, "T", "T/T", 1, 1, 0, true, 0.05, 0.30, 0.30, 0.10);
// rec1.setStats(stats1);
// rec2.setStats(stats2);
variants = Arrays.asList(rec1, rec2, rec3, rec4, rec5);
}
@Test
public void testPre() {
assertTrue("Table creation could not be performed", writer.pre());
}
// @Test
// public void testWriteBatch() throws IOException, InterruptedException, ClassNotFoundException {
// HBaseAdmin admin = new HBaseAdmin(config);
// assertTrue(admin.tableExists(tableName));
// writer.writeBatch(variants);
// writer.post();
//
// Variant rec1 = variants.get(0);
// Variant rec4 = variants.get(3);
//
// // Query number of inserted records
// Job job = RowCounter.createSubmittableJob(config, new String[] { tableName } );
// job.waitForCompletion(true);
// assertTrue(job.isSuccessful());
// // How to count in HBase test suite: http://searchcode.com/codesearch/view/25291904
// Counter counter = job.getCounters().findCounter("org.apache.hadoop.hbase.mapreduce.RowCounter$RowCounterMapper$Counters", "ROWS");
// assertEquals("The number of inserted records is incorrect", 5, counter.getValue());
//
// // Query information from a couple records
// HTable table = new HTable(config, tableName);
// Scan regionScan = new Scan("01_0000100000".getBytes(), "03_0000200001".getBytes());
// ResultScanner variantScanner = table.getScanner(regionScan);
// Result[] results = variantScanner.next((int) counter.getValue());
// Result result1 = results[0];
// Result result4 = results[3];
//
// // Get basic variant fields from Protocol Buffers message
// NavigableMap<byte[], byte[]> infoMap = result1.getFamilyMap("i".getBytes());
// byte[] byteInfo = infoMap.get((studyName + "_data").getBytes());
// VariantFieldsProtos.VariantInfo protoInfo = VariantFieldsProtos.VariantInfo.parseFrom(byteInfo);
// assertEquals("rec1 reference must be A", rec1.getReference(), protoInfo.getReference());
// assertEquals("rec1 alternate must be T", rec1.getAlternate(), StringUtils.join(protoInfo.getAlternateList(), ","));
// assertEquals("rec1 format must be GT:DP", rec1.getFormat(), StringUtils.join(protoInfo.getFormatList(), ":"));
//
// // Get samples
// NavigableMap<byte[], byte[]> sampleMap = result1.getFamilyMap("d".getBytes());
//
// for (Map.Entry<byte[], byte[]> entry : sampleMap.entrySet()) {
// String name = (new String(entry.getKey(), Charset.forName("UTF-8"))).replaceAll(studyName + "_", "");
// VariantFieldsProtos.VariantSample sample = VariantFieldsProtos.VariantSample.parseFrom(entry.getValue());
// switch (name) {
// case "NA001":
// assertEquals("Record 1, sample NA001 must be 1/1:4", "1/1:4", sample.getSample());
// break;
// case "NA002":
// assertEquals("Record 1, sample NA002 must be 1/1:4", "1/0:2", sample.getSample());
// break;
// case "NA003":
// assertEquals("Record 1, sample NA002 must be 1/1:4", "0/0:3", sample.getSample());
// }
// }
//
// // Get basic variant fields from Protocol Buffers message
// infoMap = result4.getFamilyMap("i".getBytes());
// byteInfo = infoMap.get((studyName + "_data").getBytes());
// protoInfo = VariantFieldsProtos.VariantInfo.parseFrom(byteInfo);
// assertEquals("rec4 reference must be A", rec4.getReference(), protoInfo.getReference());
// assertEquals("rec4 alternate must be T", rec4.getAlternate(), StringUtils.join(protoInfo.getAlternateList(), ","));
// assertEquals("rec4 format must be GT:DP", rec4.getFormat(), StringUtils.join(protoInfo.getFormatList(), ":"));
//
// // Get samples
// sampleMap = result4.getFamilyMap("d".getBytes());
//
// for (Map.Entry<byte[], byte[]> entry : sampleMap.entrySet()) {
// String name = (new String(entry.getKey(), Charset.forName("UTF-8"))).replaceAll(studyName + "_", "");
// VariantFieldsProtos.VariantSample sample = VariantFieldsProtos.VariantSample.parseFrom(entry.getValue());
// switch (name) {
// case "NA004":
// assertEquals("Record 4, sample NA001 must be 1/1:3", "1/1:3", sample.getSample());
// break;
// case "NA002":
// assertEquals("Record 4, sample NA002 must be 1/0:1", "1/0:1", sample.getSample());
// break;
// case "NA003":
// assertEquals("Record 4, sample NA002 must be 0/0:5", "0/0:5", sample.getSample());
// }
// }
// }
//
// @Test
// public void testWriteVariantStats() throws UnknownHostException, IOException {
// VariantStats stats1 = new VariantStats("1", 100000, "A", "T,G", 0.01, 0.30, "A", "A/T", 2, 0, 1, true, 0.02, 0.10, 0.30, 0.15);
// VariantStats stats2 = new VariantStats("1", 200000, "G", "T", 0.05, 0.20, "T", "T/T", 1, 1, 0, true, 0.05, 0.30, 0.30, 0.10);
//
// variants.get(0).setStats(stats1);
// variants.get(1).setStats(stats2);
//
// assertTrue(writer.writeVariantStats(variants));
// writer.post();
//
// // Query studyStats inserted in HBase
// HTable table = new HTable(config, tableName);
// Scan regionScan = new Scan("01_0000100000".getBytes(), "01_0000200001".getBytes());
// ResultScanner variantScanner = table.getScanner(regionScan);
// Result result1 = variantScanner.next();
// Result result2 = variantScanner.next();
//
// NavigableMap<byte[], byte[]> infoMap = result1.getFamilyMap("i".getBytes());
// byte[] byteStats = infoMap.get((studyName + "_stats").getBytes());
// VariantFieldsProtos.VariantStats protoInfo = VariantFieldsProtos.VariantStats.parseFrom(byteStats);
// assertEquals(stats1.getMaf(), protoInfo.getMaf(), 0.001);
// assertEquals(stats1.getCasesPercentDominant(), protoInfo.getCasesPercentDominant(), 0.001);
// assertArrayEquals(stats1.getAltAlleles(), new String[] { "T", "G" });
//
// infoMap = result2.getFamilyMap("i".getBytes());
// byteStats = infoMap.get((studyName + "_stats").getBytes());
// protoInfo = VariantFieldsProtos.VariantStats.parseFrom(byteStats);
// assertEquals(stats2.getMaf(), protoInfo.getMaf(), 0.001);
// assertEquals(stats2.getCasesPercentDominant(), protoInfo.getCasesPercentDominant(), 0.001);
// assertArrayEquals(stats2.getAltAlleles(), new String[] { "T" });
//
//
// // Query studyStats inserted in Mongo
// MongoClient mongoClient = new MongoClient(credentials.getMongoHost());
// DB db = mongoClient.getDB(credentials.getMongoDbName());
// DBCollection variantsCollection = db.getCollection("variants");
//
// DBObject query = new BasicDBObject("position", "01_0000100000");
// query.put("studies.studyId", studyName);
// DBObject returnValues = new BasicDBObject("studies.stats", 1);
// DBObject variantsInStudy = variantsCollection.findOne(query, returnValues);
// assertNotNull(variantsInStudy);
//
// BasicDBList studiesDbObject = (BasicDBList) variantsInStudy.get("studies");
// DBObject studyObj = (DBObject) studiesDbObject.get(0);
// DBObject statsObj = (BasicDBObject) studyObj.get("stats");
// double maf = ((Double) statsObj.get("maf")).doubleValue();
// String alleleMaf = statsObj.get("alleleMaf").toString();
// int missing = ((Integer) statsObj.get("missing")).intValue();
//
// assertEquals(stats1.getMaf(), maf, 0.001);
// assertEquals(stats1.getMafAllele(), alleleMaf);
// assertEquals(stats1.getMissingGenotypes(), missing);
//
// query = new BasicDBObject("position", "01_0000200000");
// query.put("studies.studyId", studyName);
// returnValues = new BasicDBObject("studies.stats", 1);
// variantsInStudy = variantsCollection.findOne(query, returnValues);
// assertNotNull(variantsInStudy);
//
// studiesDbObject = (BasicDBList) variantsInStudy.get("studies");
// studyObj = (DBObject) studiesDbObject.get(0);
// statsObj = (BasicDBObject) studyObj.get("stats");
// maf = ((Double) statsObj.get("maf")).doubleValue();
// alleleMaf = statsObj.get("alleleMaf").toString();
// missing = ((Integer) statsObj.get("missing")).intValue();
//
// assertEquals(stats2.getMaf(), maf, 0.001);
// assertEquals(stats2.getMafAllele(), alleleMaf);
// assertEquals(stats2.getMissingGenotypes(), missing);
//
// mongoClient.close();
// }
//
// @Test
// public void testWriteVariantEffect() throws IOException, InterruptedException, ClassNotFoundException {
// VariantEffect eff1 = new VariantEffect("1", 100000, "A", "T", "", "RP11-206L10.6",
// "intron", "processed_transcript", "1", 714473, 739298, "1", "", "", "",
// "ENSG00000237491", "ENST00000429505", "RP11-206L10.6", "SO:0001627",
// "intron_variant", "In intron", "feature", -1, "", "");
// VariantEffect eff2 = new VariantEffect("1", 100000, "A", "T", "ENST00000358533", "AL669831.1",
// "downstream", "protein_coding", "1", 722513, 727513, "1", "", "", "",
// "ENSG00000197049", "ENST00000358533", "AL669831.1", "SO:0001633",
// "5KB_downstream_variant", "Within 5 kb downstream of the 3 prime end of a transcript", "feature", -1, "", "");
// VariantEffect eff3 = new VariantEffect("1", 100000, "C", "A", "ENST00000434264", "RP11-206L10.7",
// "downstream", "lincRNA", "1", 720070, 725070, "1", "", "", "",
// "ENSG00000242937", "ENST00000434264", "RP11-206L10.7", "SO:0001633",
// "5KB_downstream_variant", "Within 5 kb downstream of the 3 prime end of a transcript", "feature", -1, "", "");
//
// variants.get(0).addEffect(eff1);
// variants.get(0).addEffect(eff2);
// variants.get(0).addEffect(eff3);
//
// assertTrue(writer.writeVariantEffect(variants));
// writer.post();
//
//// // TODO Query number of inserted records in HBase
//// Job job = RowCounter.createSubmittableJob(config, new String[] { tableName + "effect" } );
//// job.waitForCompletion(true);
//// assertTrue(job.isSuccessful());
//// // How to count in HBase test suite: http://searchcode.com/codesearch/view/25291904
//// Counter counter = job.getCounters().findCounter("org.apache.hadoop.hbase.mapreduce.RowCounter$RowCounterMapper$Counters", "ROWS");
//// assertEquals("The number of inserted effects is incorrect", 3, counter.getValue());
//
// // Query effects inserted in Mongo
// MongoClient mongoClient = new MongoClient(credentials.getMongoHost());
// DB db = mongoClient.getDB(credentials.getMongoDbName());
// DBCollection variantsCollection = db.getCollection("variants");
//
// DBObject query = new BasicDBObject("position", "01_0000100000");
// query.put("studies.studyId", studyName);
// DBObject returnValues = new BasicDBObject("studies.effects", 1);
// DBObject variantsInStudy = variantsCollection.findOne(query, returnValues);
// assertNotNull(variantsInStudy);
//
// BasicDBList studiesDbObject = (BasicDBList) variantsInStudy.get("studies");
// DBObject studyObj = (DBObject) studiesDbObject.get(0);
// Set<String> effectsObj = new HashSet<>((List<String>) studyObj.get("effects"));
// Set<String> oboList = new HashSet<>(Arrays.asList("intron_variant", "5KB_downstream_variant"));
//
// assertEquals(oboList, effectsObj);
//
// mongoClient.close();
// }
// @Test
// public void testWriteStudy() throws UnknownHostException {
// VariantStudy study = new VariantStudy(studyName, "s1", "Study created for testing purposes",
// Arrays.asList("Cristina", "Alex", "Jesus"), Arrays.asList("vcf", "ped"));
// VariantGlobalStats studyStats = new VariantGlobalStats();
// studyStats.setVariantsCount(5);
// studyStats.setSnpsCount(3);
// studyStats.setAccumQuality(45.0f);
// study.setStats(studyStats);
//
//// assertTrue(writer.writeStudy(study));
// writer.post();
//
// // Query study inserted in Mongo
// MongoClient mongoClient = new MongoClient(credentials.getMongoHost());
// DB db = mongoClient.getDB(credentials.getMongoDbName());
// DBCollection variantsCollection = db.getCollection("studies");
//
// DBObject query = new BasicDBObject("name", studyName);
// DBObject studyObj = variantsCollection.findOne(query);
// assertNotNull(studyObj);
//
// String alias = studyObj.get("alias").toString();
// List<String> authors = (List<String>) studyObj.get("authors");
// DBObject stats = (DBObject) studyObj.get("globalStats");
// int variantsCount = ((Integer) stats.get("variantsCount")).intValue();
// float accumQuality = ((Double) stats.get("accumulatedQuality")).floatValue();
//
// assertEquals(study.getAlias(), alias);
// assertEquals(study.getAuthors(), authors);
// assertEquals(studyStats.getVariantsCount(), variantsCount);
// assertEquals(studyStats.getAccumQuality(), accumQuality, 1e-6);
// }
@AfterClass
public static void deleteTables() throws IOException {
// Delete HBase tables
HBaseAdmin admin = new HBaseAdmin(config);
admin.disableTables(tableName);
admin.deleteTables(tableName);
// Delete Mongo collection
MongoClient mongoClient = new MongoClient(credentials.getMongoHost());
DB db = mongoClient.getDB(credentials.getMongoDbName());
db.dropDatabase();
mongoClient.close();
}
}
|
// import 'mocha'
// import { expect } from 'chai'
// import { parseSource } from '../lib/parser'
// import { Query, Arg, QueryBlock, QueryColumn, SimpleTable, TableChain, WhereDirective, WhereType, ForeignKeyChain, KeyReference } from '../lib/ast/query'
// describe('query', () => {
// it('with no args, directives, or nested', () => {
// const queries = parseSource(`query thing: table [
// some_col, other_col
// ]`)
// expect(queries).lengthOf(1)
// const query = queries[0]
// expect(query).eql(new Query(
// 'thing', [],
// new QueryBlock(
// 'thing', 'table', new SimpleTable('table'), true,
// [new QueryColumn('some_col', 'some_col'), new QueryColumn('other_col', 'other_col')],
// [], [], undefined, undefined,
// ),
// ))
// })
// it('hella layers query', () => {
// const queries = parseSource(`query hellaLayersQuery($id_limit: int = 4): first_level(@where: id <= $id_limit) [
// id
// my_word: word
// seconds: second_level [
// id
// my_word: word
// thirds: third_level(@limit: 1) {
// id
// my_word: word
// }
// ]
// ]`)
// expect(queries).lengthOf(1)
// const query = queries[0]
// const arg = new Arg(1, 'id_limit', 'int', false, 4)
// expect(query).eql(new Query(
// 'hellaLayersQuery', [arg], new QueryBlock(
// 'hellaLayersQuery', 'first_level', new SimpleTable('first_level'), true,
// [
// new QueryColumn('id', 'id'),
// new QueryColumn('word', 'my_word'),
// new QueryBlock(
// 'seconds', 'second_level', new SimpleTable('second_level'), true,
// [
// new QueryColumn('id', 'id'),
// new QueryColumn('word', 'my_word'),
// new QueryBlock(
// 'thirds', 'third_level', new SimpleTable('third_level'), false,
// [
// new QueryColumn('id', 'id'),
// new QueryColumn('word', 'my_word'),
// ],
// [], [], 1,
// )
// ],
// [], [],
// )
// ],
// [new WhereDirective('id', arg, WhereType.Lte)], [],
// ))
// )
// })
// })
|
import style from '../styles/Home.module.css'
import {Component, createRef} from 'react'
function clamp(value) {
return value < 0 ? 0 : value > 1 ? 1 : value;
}
function preventDefault(event) {
event.preventDefault()
}
// todo: make functional
export default class SplitSlider extends Component {
constructor(props) {
super(props);
this.state = {
sliderPos: 0.25,
active: false,
}
this.slider = createRef()
this.start = 0
this.width = 0
this.onMouseDown = this.onMouseDown.bind(this)
this.onMouseMove = this.onMouseMove.bind(this)
this.onTouchStart = this.onTouchStart.bind(this)
this.onTouchMove = this.onTouchMove.bind(this)
}
onMouseMove(event) {
const offset = event.clientX - this.start
const ratio = clamp(offset / this.width)
this.setState({sliderPos: ratio})
}
onMouseDown() {
this.setState({active: true})
const {left, right} = this.slider.current.getBoundingClientRect()
this.start = left
this.width = right - left
document.addEventListener('mousemove', this.onMouseMove)
document.addEventListener('mouseup', () => {
this.setState({active: false})
document.removeEventListener('mousemove', this.onMouseMove)
})
}
onTouchStart(event) {
// event.preventDefault()
this.setState({active: true})
const {left, right} = this.slider.current.getBoundingClientRect()
this.start = left
this.width = right - left
this.slider.current.addEventListener('touchmove', this.onTouchMove, {passive: false, capture: false})
this.slider.current.addEventListener('touchend', () => {
this.setState({active: false})
this.slider.current.removeEventListener('touchmove', this.onTouchMove)
})
}
onTouchMove(event) {
event.preventDefault()
const offset = event.touches[0].clientX - this.start
const ratio = clamp(offset / this.width)
this.setState({sliderPos: ratio})
}
render() {
const {sliderPos} = this.state
const {right, left} = this.props
return (
<div className={`${style.splitSlider} ${this.state.active ? style.splitSliderActive: ''}`} onMouseDown={this.onMouseDown} onTouchStart={this.onTouchStart} ref={this.slider}>
<div className={style.splitSliderOuter}>
<img onMouseDown={preventDefault} className={style.splitSliderImg} src={right} alt='' height={897} width={1500} />
<div className={style.splitSliderInner} style={{width:`${sliderPos * 100}%`}}>
<img onMouseDown={preventDefault} className={`${style.splitSliderImgFg}`} src={left} alt='' height={897} width={1500} />
</div>
</div>
{/*<div*/}
{/* className={style.splitSliderHandle}*/}
{/* style={{left: sliderPos * 100 + '%'}}*/}
{/* onMouseDown={this.onMouseDown}*/}
{/* onTouchStart={this.onTouchStart}>*/}
{/* <div className={style.splitSliderHandleBar}>*/}
{/* <div className={style.splitSliderHandleRidge} />*/}
{/* </div>*/}
{/*</div>*/}
</div>
)
}
}
|
<filename>server/src/test/java/com/breakersoft/plow/test/scheduler/dao/StatsDaoTests.java
package com.breakersoft.plow.test.scheduler.dao;
public class StatsDaoTests {
}
|
<filename>Esercitazioni/antoandgarEs1/E5/E5Main.scala
object E5Main extends App {
var score = 0
var trials = 0
def test(f:Int=>Int, a:Int, b: Int, r:Int) = {
trials += 1
val g:(Int,Int)=>Int = E5.somma(f)
val s:Int = g(a,b)
println("Test "+trials+": " + s + " [corretto: " + r + "]")
score += (if (s==r) 1 else 0)
}
test(x=>2*x, 5, 7, 36)
test(x=>x+1, 1, 4, 14)
test(x=>2*x+1, 0, 10, 121)
println("Risultato: " + score + "/" + trials)
}
|
package coap
import (
"errors"
"net"
"net/url"
"time"
gocoap "github.com/dustin/go-coap"
)
const (
network = "udp"
maxPktLen = 65536
defPort = ":5683"
)
var errInvalidScheme = errors.New("Invalid porotcol scheme")
type conn struct {
conn *net.UDPConn
buf []byte
}
func parseAddr(addr *url.URL) (string, error) {
if addr.Scheme != "coap" {
return "", errInvalidScheme
}
var a, p string
if addr.Port() == "" {
p = defPort
}
a = addr.Host
return a + p, nil
}
// Dial connects a CoAP client.
func dial(addr string) (*conn, error) {
uaddr, err := net.ResolveUDPAddr(network, addr)
if err != nil {
return nil, err
}
s, err := net.DialUDP(network, nil, uaddr)
if err != nil {
return nil, err
}
return &conn{s, make([]byte, maxPktLen)}, nil
}
// Send a message. Get a response if there is one.
func (c *conn) send(req gocoap.Message) (*gocoap.Message, error) {
err := transmit(c.conn, nil, req)
if err != nil {
return nil, err
}
if !req.IsConfirmable() {
return nil, nil
}
rv, err := receive(c.conn, c.buf)
if err != nil {
return nil, err
}
return &rv, nil
}
// Receive a message.
func (c *conn) receive() (*gocoap.Message, error) {
rv, err := receive(c.conn, c.buf)
if err != nil {
return nil, err
}
return &rv, nil
}
// Transmit a message.
func transmit(l *net.UDPConn, a *net.UDPAddr, m gocoap.Message) error {
d, err := m.MarshalBinary()
if err != nil {
return err
}
if a == nil {
_, err = l.Write(d)
} else {
_, err = l.WriteTo(d, a)
}
return err
}
// Receive a message.
func receive(conn *net.UDPConn, buf []byte) (gocoap.Message, error) {
conn.SetReadDeadline(time.Now().Add(time.Minute * 5))
nr, _, err := conn.ReadFromUDP(buf)
if err != nil {
return gocoap.Message{}, err
}
return gocoap.ParseMessage(buf[:nr])
}
|
#! /bin/bash
#SBATCH -o /home/martin/workspace/sweet/benchmarks/rexi_tests_lrz_freq_waves/2015_12_27_scalability_rexi_fd/run_rexi_par_m000256_t001_n0064_r4096_a1.txt
###SBATCH -e /home/martin/workspace/sweet/benchmarks/rexi_tests_lrz_freq_waves/2015_12_27_scalability_rexi_fd/run_rexi_par_m000256_t001_n0064_r4096_a1.err
#SBATCH -J rexi_par_m000256_t001_n0064_r4096_a1
#SBATCH --get-user-env
#SBATCH --clusters=mpp2
#SBATCH --ntasks=4096
#SBATCH --cpus-per-task=1
#SBATCH --exclusive
#SBATCH --export=NONE
#SBATCH --time=08:00:00
#declare -x NUMA_BLOCK_ALLOC_VERBOSITY=1
declare -x KMP_AFFINITY="granularity=thread,compact,1,0"
declare -x OMP_NUM_THREADS=1
echo "OMP_NUM_THREADS=$OMP_NUM_THREADS"
echo
. /etc/profile.d/modules.sh
module unload gcc
module unload fftw
module unload python
module load python/2.7_anaconda_nompi
module unload intel
module load intel/16.0
module unload mpi.intel
module load mpi.intel/5.1
module load gcc/5
cd /home/martin/workspace/sweet/benchmarks/rexi_tests_lrz_freq_waves/2015_12_27_scalability_rexi_fd
cd ../../../
. local_software/env_vars.sh
# force to use FFTW WISDOM data
declare -x SWEET_FFTW_LOAD_WISDOM_FROM_FILE="FFTW_WISDOM_nofreq_T0"
time -p mpiexec.hydra -genv OMP_NUM_THREADS 1 -envall -ppn 28 -n 4096 ./build/rexi_par_m_tno_a1 --initial-freq-x-mul=2.0 --initial-freq-y-mul=1.0 -f 1 -g 1 -H 1 -X 1 -Y 1 --compute-error 1 -t 50 -R 4 -C 0.3 -N 64 -U 0 -S 0 --use-specdiff-for-complex-array 1 --rexi-h 0.2 --timestepping-mode 1 --staggering 0 --rexi-m=256 -C -5.0
|
<filename>src/main/java/com/smartbear/jenkins/plugins/loadcomplete/Utils.java
/*
* The MIT License
*
* Copyright (c) 2018, SmartBear Software
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.smartbear.jenkins.plugins.loadcomplete;
import hudson.model.Node;
import hudson.model.TaskListener;
import hudson.remoting.Callable;
import hudson.remoting.VirtualChannel;
import org.jenkinsci.remoting.RoleChecker;
import java.lang.ref.WeakReference;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Semaphore;
public class Utils {
private Utils() {
}
public static boolean isWindows(VirtualChannel channel, TaskListener listener) {
try {
return channel.call(new Callable<Boolean, Exception>() {
@Override
public void checkRoles(RoleChecker roleChecker) throws SecurityException {
// Stub
}
public Boolean call() throws Exception {
String os = System.getProperty("os.name");
if (os != null) {
os = os.toLowerCase();
}
return (os != null && os.contains("windows"));
}
});
} catch (Exception e) {
LCLog.error(listener, Messages.LCTestBuilder_RemoteCallingFailed(), e);
return false;
}
}
public static long getSystemTime(VirtualChannel channel, TaskListener listener) {
try {
return channel.call(new Callable<Long, Exception>() {
@Override
public void checkRoles(RoleChecker roleChecker) throws SecurityException {
// Stub
}
public Long call() throws Exception {
return System.currentTimeMillis();
}
});
} catch (Exception e) {
LCLog.error(listener, Messages.LCTestBuilder_RemoteCallingFailed(), e);
return 0;
}
}
public static class BusyNodeList {
private Map<WeakReference<Node>, Semaphore> nodeLocks = new HashMap<WeakReference<Node>, Semaphore>();
public void lock(Node node, TaskListener listener) throws InterruptedException {
Semaphore semaphore = null;
synchronized (this) {
for (WeakReference<Node> nodeRef : nodeLocks.keySet()) {
Node actualNode = nodeRef.get();
if (actualNode != null && actualNode == node) {
semaphore = nodeLocks.get(nodeRef);
}
}
if (semaphore == null) {
semaphore = new Semaphore(1, true);
nodeLocks.put(new WeakReference<Node>(node), semaphore);
} else {
listener.getLogger().println();
LCLog.info(listener, Messages.LCTestBuilder_WaitingForNodeRelease());
}
}
semaphore.acquire();
}
public void release(Node node) throws InterruptedException {
Semaphore semaphore = null;
synchronized (this) {
for (WeakReference<Node> nodeRef : nodeLocks.keySet()) {
Node actualNode = nodeRef.get();
if (actualNode != null && actualNode == node) {
semaphore = nodeLocks.get(nodeRef);
}
}
}
if (semaphore != null) {
semaphore.release();
}
Thread.sleep(200);
// cleanup the unused items
synchronized (this) {
List<WeakReference<Node>> toRemove = new ArrayList<WeakReference<Node>>();
for (WeakReference<Node> nodeRef : nodeLocks.keySet()) {
Node actualNode = nodeRef.get();
if (actualNode != null && actualNode == node) {
semaphore = nodeLocks.get(nodeRef);
if (semaphore.availablePermits() > 0) {
toRemove.add(nodeRef);
}
}
}
for (WeakReference<Node> nodeRef : toRemove) {
nodeLocks.remove(nodeRef);
}
}
}
}
} |
function validateEmail($email) {
return filter_var($email, FILTER_VALIDATE_EMAIL);
} |
<gh_stars>0
import { CommandResult } from './shell-api';
import { expect } from 'chai';
describe('CommandResult', () => {
describe('#shellApiType', () => {
it('returns the type', () => {
const commandResult = new CommandResult('ResultType', 'value');
expect(commandResult.shellApiType()).to.equal('ResultType');
});
});
describe('#toReplString', () => {
it('returns the value', () => {
const commandResult = new CommandResult('ResultType', 'value');
expect(commandResult.toReplString()).to.equal('value');
});
});
});
|
#!/bin/bash
git clone -b monolith https://github.com/express42/reddit.git
cd reddit && bundle install
systemctl daemon-reload
systemctl enable puma
|
echo "1st Argument: $1"
sum=0
# $# gets the total number arguments
while [[ $# -gt 0 ]]; do
num=$1
sum=$(( sum+num ))
# the shift command shifts each argument one place to the left
shift
done
echo $sum
|
<reponame>ministryofjustice/mtp-api
# Generated by Django 2.2.16 on 2020-10-07 13:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('credit', '0038_auto_20200804_1139'),
]
operations = [
migrations.AlterModelOptions(
name='credit',
options={'get_latest_by': 'received_at', 'ordering': ('received_at', 'id'), 'permissions': (('view_any_credit', 'Can view any credit'), ('review_credit', 'Can review credit'), ('credit_credit', 'Can credit credit'))},
),
migrations.AlterModelOptions(
name='privateestatebatch',
options={'get_latest_by': 'date', 'ordering': ('date',), 'verbose_name_plural': 'private estate batches'},
),
migrations.RemoveIndex(
model_name='credit',
name='credit_cred_owner_i_cac17f_idx',
),
migrations.AddIndex(
model_name='credit',
index=models.Index(fields=['owner', 'reconciled', 'resolution'], name='credit_cred_owner_i_cac17f_idx'),
),
]
|
<reponame>sander-adhese/prebid-server-java
package org.prebid.server.bidder.adocean;
import com.fasterxml.jackson.core.type.TypeReference;
import com.iab.openrtb.request.Banner;
import com.iab.openrtb.request.BidRequest;
import com.iab.openrtb.request.Device;
import com.iab.openrtb.request.Format;
import com.iab.openrtb.request.Imp;
import com.iab.openrtb.request.Site;
import com.iab.openrtb.request.User;
import com.iab.openrtb.response.Bid;
import io.vertx.core.MultiMap;
import io.vertx.core.http.HttpMethod;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URIBuilder;
import org.apache.http.client.utils.URLEncodedUtils;
import org.apache.http.message.BasicNameValuePair;
import org.prebid.server.bidder.Bidder;
import org.prebid.server.bidder.adocean.model.AdoceanResponseAdUnit;
import org.prebid.server.bidder.model.BidderBid;
import org.prebid.server.bidder.model.BidderError;
import org.prebid.server.bidder.model.HttpCall;
import org.prebid.server.bidder.model.HttpRequest;
import org.prebid.server.bidder.model.Result;
import org.prebid.server.exception.PreBidException;
import org.prebid.server.json.JacksonMapper;
import org.prebid.server.proto.openrtb.ext.ExtPrebid;
import org.prebid.server.proto.openrtb.ext.request.ExtUser;
import org.prebid.server.proto.openrtb.ext.request.adocean.ExtImpAdocean;
import org.prebid.server.proto.openrtb.ext.response.BidType;
import org.prebid.server.util.HttpUtil;
import java.io.IOException;
import java.math.BigDecimal;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.TreeSet;
import java.util.stream.Collectors;
/**
* Adocean {@link Bidder} implementation.
*/
public class AdoceanBidder implements Bidder<Void> {
private static final TypeReference<ExtPrebid<?, ExtImpAdocean>> ADOCEAN_EXT_TYPE_REFERENCE =
new TypeReference<ExtPrebid<?, ExtImpAdocean>>() {
};
private static final String VERSION = "1.1.0";
private static final int MAX_URI_LENGTH = 8000;
private static final String DEFAULT_BID_CURRENCY = "USD";
private static final String MEASUREMENT_CODE_TEMPLATE = " <script> +function() { "
+ "var wu = \"%s\"; "
+ "var su = \"%s\".replace(/\\[TIMESTAMP\\]/, Date.now()); "
+ "if (wu && !(navigator.sendBeacon && navigator.sendBeacon(wu))) { (new Image(1,1)).src = wu } "
+ "if (su && !(navigator.sendBeacon && navigator.sendBeacon(su))) { (new Image(1,1)).src = su } }(); "
+ "</script> ";
private final String endpointUrl;
private final JacksonMapper mapper;
public AdoceanBidder(String endpointUrl, JacksonMapper mapper) {
this.endpointUrl = HttpUtil.validateUrl(Objects.requireNonNull(endpointUrl));
this.mapper = Objects.requireNonNull(mapper);
}
@Override
public Result<List<HttpRequest<Void>>> makeHttpRequests(BidRequest request) {
final User user = request.getUser();
final ExtUser extUser = user != null ? user.getExt() : null;
final String consent = extUser != null ? extUser.getConsent() : null;
final String consentString = StringUtils.isNotBlank(consent) ? consent : "";
final List<HttpRequest<Void>> httpRequests = new ArrayList<>();
for (Imp imp : request.getImp()) {
try {
final ExtImpAdocean extImpAdocean = parseImpExt(imp);
final Map<String, String> slaveSizes = new HashMap<>();
slaveSizes.put(extImpAdocean.getSlaveId(), getImpSizes(imp));
if (addRequestAndCheckIfDuplicates(httpRequests, extImpAdocean, imp.getId(), slaveSizes,
request.getTest())) {
continue;
}
httpRequests.add(createSingleRequest(request, imp, extImpAdocean, consentString, slaveSizes));
} catch (PreBidException e) {
return Result.withError(BidderError.badInput(e.getMessage()));
}
}
return Result.withValues(httpRequests);
}
private ExtImpAdocean parseImpExt(Imp imp) {
try {
return mapper.mapper().convertValue(imp.getExt(), ADOCEAN_EXT_TYPE_REFERENCE).getBidder();
} catch (IllegalArgumentException e) {
throw new PreBidException(e.getMessage(), e);
}
}
private boolean addRequestAndCheckIfDuplicates(List<HttpRequest<Void>> httpRequests, ExtImpAdocean extImpAdocean,
String impid, Map<String, String> slaveSizes, Integer test) {
for (HttpRequest<Void> request : httpRequests) {
try {
final URIBuilder uriBuilder = new URIBuilder(request.getUri());
final List<NameValuePair> queryParams = uriBuilder.getQueryParams();
final String masterId = queryParams.stream()
.filter(param -> param.getName().equals("id"))
.findFirst()
.map(NameValuePair::getValue)
.orElse(null);
if (masterId != null && masterId.equals(extImpAdocean.getMasterId())) {
final String newSlaveId = queryParams.stream()
.filter(param -> param.getName().equals("aid"))
.map(param -> param.getValue().split(":")[0])
.filter(slaveId -> slaveId.equals(extImpAdocean.getSlaveId()))
.findFirst()
.orElse(null);
if (StringUtils.isNotBlank(newSlaveId)) {
continue;
}
queryParams.add(new BasicNameValuePair("aid", extImpAdocean.getSlaveId() + ":" + impid));
final List<String> sizeValues = test != null
? setSlaveSizesParam(slaveSizes, test == 1)
: setSlaveSizesParam(slaveSizes, false);
if (CollectionUtils.isNotEmpty(sizeValues)) {
queryParams.add(new BasicNameValuePair("aosspsizes", String.join("-", sizeValues)));
}
final String url = HttpUtil.encodeUrl(String.valueOf(queryParams));
if (url.length() < MAX_URI_LENGTH) {
return true;
}
}
} catch (URISyntaxException e) {
throw new PreBidException(e.getMessage());
}
}
return false;
}
private HttpRequest<Void> createSingleRequest(BidRequest request, Imp imp, ExtImpAdocean extImpAdocean,
String consentString, Map<String, String> slaveSizes) {
return HttpRequest.<Void>builder()
.method(HttpMethod.GET)
.uri(buildUrl(imp.getId(), extImpAdocean, consentString, request.getTest(), request.getUser(),
slaveSizes))
.headers(getHeaders(request))
.build();
}
private String getImpSizes(Imp imp) {
final Banner banner = imp.getBanner();
if (banner == null) {
return "";
}
final List<Format> format = banner.getFormat();
if (CollectionUtils.isNotEmpty(format)) {
final List<String> sizes = new ArrayList<>();
format.forEach(singleFormat -> sizes.add(String.format("%sx%s", singleFormat.getW(), singleFormat.getH())));
return String.join("_", sizes);
}
final Integer w = banner.getW();
final Integer h = banner.getH();
if (w != null && h != null) {
return String.format("%sx%s", w, h);
}
return "";
}
private String buildUrl(String impid, ExtImpAdocean extImpAdocean, String consentString, Integer test, User user,
Map<String, String> slaveSizes) {
final String url = endpointUrl.replace("{{Host}}", extImpAdocean.getEmitterDomain());
final int randomizedPart = test != null && test == 1 ? 10000000 : 10000000 + (int) (Math.random() * 89999999);
final String updateUrl = String.format("%s/_%s/ad.json", url, randomizedPart);
final URIBuilder uriBuilder = new URIBuilder()
.setPath(updateUrl)
.addParameter("pbsrv_v", VERSION)
.addParameter("id", extImpAdocean.getMasterId())
.addParameter("nc", "1")
.addParameter("nosecure", "1")
.addParameter("aid", extImpAdocean.getSlaveId() + ":" + impid);
if (StringUtils.isNotBlank(consentString)) {
uriBuilder.addParameter("gdpr_consent", consentString);
uriBuilder.addParameter("gdpr", "1");
}
if (user != null && StringUtils.isNotBlank(user.getBuyeruid())) {
uriBuilder.addParameter("hcuserid", user.getBuyeruid());
}
final List<String> sizeValues = test != null
? setSlaveSizesParam(slaveSizes, test == 1)
: setSlaveSizesParam(slaveSizes, false);
if (CollectionUtils.isNotEmpty(sizeValues)) {
uriBuilder.addParameter("aosspsizes", String.join("-", sizeValues));
}
return uriBuilder.toString();
}
private List<String> setSlaveSizesParam(Map<String, String> slaveSizes, boolean orderByKey) {
final Set<String> slaveIDs = orderByKey ? new TreeSet<>(slaveSizes.keySet()) : slaveSizes.keySet();
return slaveIDs.stream()
.filter(slaveId -> StringUtils.isNotBlank(slaveSizes.get(slaveId)))
.map(rawSlaveID -> String.format("%s~%s", rawSlaveID.replaceFirst("adocean", ""),
slaveSizes.get(rawSlaveID)))
.collect(Collectors.toList());
}
private static MultiMap getHeaders(BidRequest request) {
final MultiMap headers = HttpUtil.headers();
final Device device = request.getDevice();
if (device != null) {
HttpUtil.addHeaderIfValueIsNotEmpty(headers, HttpUtil.USER_AGENT_HEADER, device.getUa());
HttpUtil.addHeaderIfValueIsNotEmpty(headers, HttpUtil.X_FORWARDED_FOR_HEADER, device.getIp());
HttpUtil.addHeaderIfValueIsNotEmpty(headers, HttpUtil.X_FORWARDED_FOR_HEADER, device.getIpv6());
}
final Site site = request.getSite();
if (site != null) {
HttpUtil.addHeaderIfValueIsNotEmpty(headers, HttpUtil.REFERER_HEADER, site.getPage());
}
return headers;
}
@Override
public Result<List<BidderBid>> makeBids(HttpCall<Void> httpCall, BidRequest bidRequest) {
final List<NameValuePair> params;
try {
params = URLEncodedUtils.parse(new URI(httpCall.getRequest().getUri()), StandardCharsets.UTF_8);
} catch (URISyntaxException e) {
return Result.withError(BidderError.badInput(e.getMessage()));
}
final Map<String, String> auctionIds = params != null ? params.stream()
.filter(param -> param.getName().equals("aid"))
.map(param -> param.getValue().split(":"))
.collect(Collectors.toMap(name -> name[0], value -> value[1])) : null;
final List<AdoceanResponseAdUnit> adoceanResponses;
try {
adoceanResponses = getAdoceanResponseAdUnitList(httpCall.getResponse().getBody());
} catch (PreBidException e) {
return Result.withError(BidderError
.badServerResponse("Failed to decode: No content to map due to end-of-input"));
}
final List<BidderBid> bidderBids = adoceanResponses.stream()
.filter(adoceanResponse -> !adoceanResponse.getError().equals("true"))
.filter(adoceanResponse -> auctionIds != null
&& StringUtils.isNotBlank(auctionIds.get(adoceanResponse.getId())))
.map(adoceanResponse -> BidderBid.of(createBid(auctionIds, adoceanResponse), BidType.banner,
getBidCurrency(adoceanResponse)))
.collect(Collectors.toList());
return Result.withValues(bidderBids);
}
private static Bid createBid(Map<String, String> auctionIds, AdoceanResponseAdUnit adoceanResponse) {
final String adm = String.format(MEASUREMENT_CODE_TEMPLATE, adoceanResponse.getWinUrl(),
adoceanResponse.getStatsUrl()) + HttpUtil.decodeUrl(adoceanResponse.getCode());
return Bid.builder()
.id(adoceanResponse.getId())
.impid(auctionIds.get(adoceanResponse.getId()))
.adm(adm)
.price(new BigDecimal(adoceanResponse.getPrice()))
.w(Integer.valueOf(adoceanResponse.getWidth()))
.h(Integer.valueOf(adoceanResponse.getHeight()))
.crid(adoceanResponse.getCrid())
.build();
}
private static String getBidCurrency(AdoceanResponseAdUnit adoceanResponse) {
return adoceanResponse.getCurrency() != null
? adoceanResponse.getCurrency()
: DEFAULT_BID_CURRENCY;
}
private List<AdoceanResponseAdUnit> getAdoceanResponseAdUnitList(String responseBody) {
try {
return mapper.mapper().readValue(
responseBody,
mapper.mapper().getTypeFactory().constructCollectionType(List.class, AdoceanResponseAdUnit.class));
} catch (IOException ex) {
throw new PreBidException(ex.getMessage());
}
}
}
|
#!/bin/sh
chmod +x src/bin/output
cp src/bin/output o2dash-test |
#!/bin/sh
apk add --no-cache curl g++ git make mercurial
curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh| sh -s -- -b $(go env GOPATH)/bin ${GOLANGCI_VERSION}
|
#!/bin/bash
# Fail if anything not planed to go wrong, goes wrong
set -eu
# Test if command exists.
exists() {
test -x "$(command -v "$1")"
}
#exenam is executable
# we assume it is in the users path
# however, this could be set explicitly, e.g.
# exenam="/Users/rorden/Documents/cocoa/dcm2niix/console/dcm2niix" batch.sh
exenam=${examnam:-dcm2niix}
#basedir is folder with "Ref" and "In" subfolders.
# we assume it is the same same folder as the script
# however, this could be set explicitly, e.g.
# basedir="/Users/rorden/dcm_qa" batch.sh
if [ -z ${basedir:-} ]; then
basedir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
fi
#### no need to edit subsequent lines
#folder paths
indir=${basedir}/In/Enhanced
outdir=${basedir}/Out
refdir=${basedir}/Ref
gzip -d -q ${indir}/*.gz || true
gzip -d -q ${refdir}/*.gz || true
# Check inputs.
exists $exenam ||
{
echo >&2 "I require $exenam but it's not installed. Aborting."
exit 1
}
if [ ! -d "$indir" ]; then
echo "Error: Unable to find $indir"
exit 1
fi
if [ ! -d "$refdir" ]; then
echo "Error: Unable to find $refdir"
exit 1
fi
if [ ! -d "$outdir" ]; then
mkdir $outdir
fi
if [ ! -z "$(ls $outdir)" ]; then
echo "Cleaning output directory: $outdir"
rm $outdir/*
fi
# detect big endian https://github.com/rordenlab/dcm2niix/issues/333
littleEndian=$(echo I | tr -d [:space:] | od -to2 | head -n1 | awk '{print $2}' | cut -c6)
if [[ $littleEndian == "1" ]]; then
#echo "little-endian hardware: retaining little-endian"
#return blank so we are compatible with earlier versions of dcm2niix
endian=""
else
echo "big-endian hardware: forcing little-endian NIfTIs"
endian="--big-endian n"
fi
# Convert enhanced images.
cmd="$exenam $endian -b y -z n -f %s_%p_enhanced -o $outdir $indir"
echo "Running command:"
echo $cmd
$cmd
# Validate JSON.
exists python &&
{
printf "\n\n\nValidating JSON files.\n\n\n"
for file in $outdir/*.json; do
echo -n "$file "
! python -m json.tool "$file" > /dev/null || echo " -- Valid."
done
printf "\n\n\n"
}
#remove macOS hidden files if they exist
dsstore=${refdir}/.DS_Store
[ -e $dsstore ] && rm $dsstore
dsstore=${outdir}/.DS_Store
[ -e $dsstore ] && rm $dsstore
#check differences
cmd="diff -x '.*' -br $refdir $outdir -I ConversionSoftwareVersion"
echo "Running command:"
echo $cmd
$cmd
|
/*
* Copyright © 2012-2016 VMware, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an “AS IS” BASIS, without
* warranties or conditions of any kind, EITHER EXPRESS OR IMPLIED. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
'use strict';
var module = angular.module('lightwave.ui.sso');
module.controller('LockoutPolicyCntrl', [ '$scope', '$rootScope', 'TenantService',
function($scope, $rootScope, TenantService) {
$scope.updateLockoutPolicy = updateLockoutPolicy;
init();
function init(){
$rootScope.globals.errors = null;
$rootScope.globals.popup_errors = null;
}
function updateLockoutPolicy(lockoutPolicy) {
$rootScope.globals.errors = null;
var policy = {
lockoutPolicy: lockoutPolicy
};
TenantService
.UpdateDirConfiguration($rootScope.globals.currentUser, policy)
.then(function (res) {
if (res.status == 200) {
$rootScope.globals.errors = {details: 'Lockout policy updated successfully', success:true};
$scope.refresh();
$scope.closeThisDialog('save');
}
else {
$rootScope.globals.popup_errors = res.data;
}
});
}
}]); |
#!/bin/bash
# this script creates all the users/keys on the external cluster
# those keys will be injected via the import-external-cluster.sh once this one is done running
# so you can run import-external-cluster.sh right after this script
set -Eeuo pipefail
#############
# VARIABLES #
#############
: "${CLIENT_CHECKER_NAME:=client.healthchecker}"
: "${RGW_POOL_PREFIX:=default}"
#############
# FUNCTIONS #
#############
function is_available {
command -v "$@" &>/dev/null
}
function checkEnv() {
if ! is_available ceph; then
echo "'ceph' binary is expected'"
exit 1
fi
if ! ceph -s 1>/dev/null; then
echo "cannot connect to the ceph cluster"
exit 1
fi
}
function createCheckerKey() {
checkerKey=$(ceph auth get-or-create "$CLIENT_CHECKER_NAME" mon 'allow r, allow command quorum_status' osd 'allow rwx pool='"$RGW_POOL_PREFIX"'.rgw.meta, allow r pool=.rgw.root, allow rw pool='"$RGW_POOL_PREFIX"'.rgw.control, allow x pool='"$RGW_POOL_PREFIX"'.rgw.buckets.index, allow x pool='"$RGW_POOL_PREFIX"'.rgw.log'|awk '/key =/ { print $3}')
echo "export ROOK_EXTERNAL_USER_SECRET=$checkerKey"
echo "export ROOK_EXTERNAL_USERNAME=$CLIENT_CHECKER_NAME"
}
function createCephCSIKeyringRBDNode() {
cephCSIKeyringRBDNodeKey=$(ceph auth get-or-create client.csi-rbd-node mon 'profile rbd' osd 'profile rbd'|awk '/key =/ { print $3}')
echo "export CSI_RBD_NODE_SECRET_SECRET=$cephCSIKeyringRBDNodeKey"
}
function createCephCSIKeyringRBDProvisioner() {
cephCSIKeyringRBDProvisionerKey=$(ceph auth get-or-create client.csi-rbd-provisioner mon 'profile rbd' mgr 'allow rw' osd 'profile rbd'|awk '/key =/ { print $3}')
echo "export CSI_RBD_PROVISIONER_SECRET=$cephCSIKeyringRBDProvisionerKey"
}
function createCephCSIKeyringCephFSNode() {
cephCSIKeyringCephFSNodeKey=$(ceph auth get-or-create client.csi-cephfs-node mon 'allow r' mgr 'allow rw' osd 'allow rw tag cephfs *=*' mds 'allow rw'|awk '/key =/ { print $3}')
echo "export CSI_CEPHFS_NODE_SECRET=$cephCSIKeyringCephFSNodeKey"
}
function createCephCSIKeyringCephFSProvisioner() {
cephCSIKeyringCephFSProvisionerKey=$(ceph auth get-or-create client.csi-cephfs-provisioner mon 'allow r' mgr 'allow rw' osd 'allow rw tag cephfs metadata=*'|awk '/key =/ { print $3}')
echo "export CSI_CEPHFS_PROVISIONER_SECRET=$cephCSIKeyringCephFSProvisionerKey"
}
########
# MAIN #
########
checkEnv
createCheckerKey
createCephCSIKeyringRBDNode
createCephCSIKeyringRBDProvisioner
createCephCSIKeyringCephFSNode
createCephCSIKeyringCephFSProvisioner
echo -e "successfully created users and keys, execute the above commands and run import-external-cluster.sh to inject them in your Kubernetes cluster."
|
package com.wpisen.trace.agent.core;
import com.wpisen.trace.agent.bootstrap.AgentApplication;
import com.wpisen.trace.agent.bootstrap.TraceSessionInfo;
import com.wpisen.trace.agent.common.util.JarUtil;
import com.wpisen.trace.agent.common.util.StringUtils;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.lang.instrument.ClassFileTransformer;
import java.lang.instrument.IllegalClassFormatException;
import java.lang.instrument.Instrumentation;
import java.net.HttpURLConnection;
import java.net.URL;
import java.security.ProtectionDomain;
import java.util.Properties;
import java.util.Set;
/**
* Created by wpisen on 16/11/2.
*/
public class DefaultApplication implements AgentApplication, ClassFileTransformer, AgentFinal {
private AgentLoader agenLoader;
private AgentConfigServer configServer;
static DefaultApplication instance;
private String currentAppId;
private TraceSessionInfo sessionInfo; // 当前应用会话信息
// TODO 所有入口通过该方法获取
public static DefaultApplication getInstance() {
if (instance == null) {
instance = new DefaultApplication();
}
return instance;
}
@Override
public void init(TraceSessionInfo session, Properties localProperties, String[] collectPaths, Instrumentation inst) throws Exception {
configServer = new AgentConfigServer(session.getConfigs(), localProperties);
// 初始加载 agenLoader
agenLoader = new AgentLoader();
this.sessionInfo =session;
// 加载指定采集器组件
if (collectPaths != null) {
for (String coolect : collectPaths) {
File f = new File(coolect);
if (f.exists() && f.isFile()) {
agenLoader.loadItemByJar(f.toURI().toURL());
} else if (f.exists() && f.isDirectory()) {
agenLoader.loadItemByDirectory(f);
}
}
}
// 加载默认collect 包
Set<URL> collectJarUrls = JarUtil.findSources(getClass()
.getClassLoader(), "Trace_collects_lib", ".jar");
for (URL collectJarUrl : collectJarUrls) {
agenLoader.loadItemByJar(collectJarUrl);
}
// 加载至静态配置
AgentFinal.LOCAL_CONFIG.putAll(localProperties);
AgentFinal.LOCAL_CONFIG.put(AgentFinal.CLIENT_SESSION_KEY, session);
//startEchos(session);
}
/**
* 通过后台线程 用一定的频率发送心跳
* @param session
*/
@SuppressWarnings("unused")
private void startEchos(TraceSessionInfo session) {
final String url=configServer.getRemoteApiAddress()+"?method=client.sendEcho&sessionId="+session.getSessionId();
Thread thread=new Thread("trace-echo-Thread-"+configServer.getEchoFrequency()+"_Sec"){
@Override
public void run() {
while (true) {
try {
HttpURLConnection conn = (HttpURLConnection) new URL(url).openConnection();
conn.setConnectTimeout(3000);
conn.setReadTimeout(3000);
InputStream i = conn.getInputStream();
i.read(new byte[1024]);
i.close();
conn.disconnect();
Thread.sleep(configServer.getEchoFrequency()*1000);
} catch (IOException e) {
new Exception("agent心跳服务发送失败", e).printStackTrace();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
};
thread.setDaemon(true);
thread.start();
}
@Override
public ClassFileTransformer getTransformer() {
return this;
}
@Override
public byte[] transform(ClassLoader loader, String className, Class<?> classBeingRedefined, ProtectionDomain protectionDomain, byte[] classfileBuffer) throws IllegalClassFormatException {
byte[] result = null;
try {
if (currentAppId==null&&loader != null) {
// 通过classLoader确认 当前应用信息
affirmCurrentApp(loader);
}
if (agenLoader != null && className != null && loader != null) {
String cname = className.replaceAll("[/]", ".");
byte[] agentResult = agenLoader.loadClassByte(cname, loader);
if (agentResult != null)
result = agentResult;
}
} catch (Throwable e) {
e.printStackTrace();
}
return result;
}
private void affirmCurrentApp(ClassLoader loader) {
if (loader == null)
return;
for (String id : configServer.getAppIds()) {
if (StringUtils.hasText(configServer.getNamespace(id))
&& loader.getResource(configServer.getNamespace(id)) != null) {
currentAppId = id;
break;
}
}
if (currentAppId != null) {
configServer.initAppInfo(currentAppId);
}
}
public AgentConfigServer getConfigServer() {
return configServer;
}
public TraceSessionInfo getSessionInfo() {
return sessionInfo;
}
public AppInfo getAppInfo() {
if( configServer.getCurrentAppInfo()==null)
return new AppInfo("-1","undefined");
return configServer.getCurrentAppInfo();
}
public void setConfigServer(AgentConfigServer configServer) {
this.configServer = configServer;
}
}
|
package repository
import (
"database/sql"
"fmt"
"strings"
"time"
"github.com/doug-martin/goqu/v9"
"github.com/lib/pq"
)
type Clock interface {
Now() time.Time
}
type DefaultClock struct{}
func (c *DefaultClock) Now() time.Time { return time.Now() }
func insert(db *goqu.Database, table string, fields []string, values []interface{}) (sql.Result, error) {
insertSql := createInsertQuery(table, fields, values)
return db.Exec(insertSql, values...)
}
func upsert(db *goqu.Database, table string, key string, fields []string, values []interface{}) (sql.Result, error) {
return upsertCombinedKey(db, table, []string{key}, fields, values)
}
func upsertCombinedKey(db *goqu.Database, table string, key []string, fields []string, values []interface{}) (sql.Result, error) {
insertSql := createInsertQuery(table, append(key, fields...), values)
insertSql += " ON CONFLICT (" + strings.Join(key, ",") + ") DO UPDATE SET "
for i, f := range fields {
if i != 0 {
insertSql += ","
}
insertSql += f + " = EXCLUDED." + f
}
return db.Exec(insertSql, values...)
}
func createInsertQuery(table string, fields []string, values []interface{}) string {
insertSql := "INSERT INTO " + table + "(" + strings.Join(fields, ",") + ") VALUES "
for i := 0; i < len(values); i += len(fields) {
if i != 0 {
insertSql += ","
}
insertSql += "("
for k, _ := range fields {
if k != 0 {
insertSql += ","
}
insertSql += fmt.Sprintf("$%d", i+k+1)
}
insertSql += ")"
}
return insertSql
}
func ParseNullString(nullString sql.NullString) string {
if !nullString.Valid {
return ""
}
return nullString.String
}
func ParseNullBool(nullBool sql.NullBool) bool {
if !nullBool.Valid {
return false
}
return nullBool.Bool
}
func ParseNullInt(nullInt sql.NullInt64) int64 {
if !nullInt.Valid {
return 0
}
return nullInt.Int64
}
func ParseNullFloat(nullFloat sql.NullFloat64) float64 {
if !nullFloat.Valid {
return 0
}
return nullFloat.Float64
}
func ParseNullTime(nullTime pq.NullTime) *time.Time {
if !nullTime.Valid {
return nil
}
return &nullTime.Time
}
func ParseNullTimeDefault(nullTime pq.NullTime) time.Time {
if !nullTime.Valid {
return time.Time{}
}
return nullTime.Time
}
|
#!/bin/bash
echo '
-------------------------------------------------------------------------
| Time : Wed Apr 29 21:50:24 CST 2020
| Author: Melo.DC
| Email : melo.dachar@gmail.com
| Func. : install mutt
| Param.: None
| Exam. :
------------------------------------------------------------------------
'
echo install mutt
if [ -n "$(uname -a | grep -i ubuntu)" ]; then
# sudo apt-get update && sudo apt-get upgrade
git clone https://gitee.com/wszqkzqk/deepin-wine-for-ubuntu.git $dtl/git
cd $dtl/git/deepin-wine-for-ubuntu
./install.sh
wget -P $dtl/wget http://mirrors.aliyun.com/deepin/pool/non-free/d/deepin.com.weixin.work/deepin.com.weixin.work_2.4.16.1347deepin0_i386.deb
wget -P $dtl/wget http://mirrors.aliyun.com/deepin/pool/non-free/d/deepin.com.wechat/deepin.com.wechat_2.6.8.65deepin0_i386.deb
wget -P $dtl/wget http://mirrors.aliyun.com/deepin/pool/non-free/d/deepin.com.wechat/deepin.com.wechat_2.6.2.31deepin0_i386.deb
sudo dpkg -i $dtl/wget/deepin.com.weixin.work_2.4.16.1347deepin0_i386.deb/
sudo dpkg -i $dtl/wget/ddeepin.com.wechat_2.6.8.65deepin0_i386.deb
unzip -d ~/.deepinwine/Deepin-WXWork/drive_c/windows/Fonts $dtl/download-small/simsun.zip
unzip -d ~/.deepinwine/Deepin-WeChat/drive_c/windows/Fonts $dtl/download-small/simsun.zip
elif [ -n "$(uname -a | grep -i centos)" ]; then
yum update
elif [ -n "$(uname -a | grep -i darwin)" ]; then
brew install mutt
else
echo Unknown system
fi
echo copy .mutt to home folder
|
<gh_stars>0
// Variables used by Scriptable.
// These must be at the very top of the file. Do not edit.
// icon-color: deep-green; icon-glyph: magic;
// Get Team ID from https://www.thesportsdb.com and add it as widget parameter
const TEAM_ID = args.widgetParameter || 135252
const DARK_MODE = true
const widgetSize = config.widgetFamily || 'small'
const textSize = 9.5
const logoSize = 38
const logoSmallSize = 22
const spacing = { normal: 8, smaller: 6, vs: 5, widget: 10 }
console.log(TEAM_ID)
const fetchData = async (url, type = 'loadJSON') => {
const request = new Request(url)
const res = await request[type]()
return res
}
const getTeamData = async id => {
const url = 'https://www.thesportsdb.com/api/v1/json/1/lookupteam.php?id='
const teamUrl = url + id
let fullData = await fetchData(teamUrl);
console.log(fullData);
const data = fullData.teams[0]
return {
image: await fetchData(`${data.strTeamBadge}/preview`, 'loadImage'),
stadium: data.strStadium
}
}
const getTeamEvents = async () => {
const url = 'https://www.thesportsdb.com/api/v1/json/1/eventsnext.php?id='
const data = await fetchData(url + TEAM_ID)
return data.events
}
const getUpcomingEventData = async event => {
const home = await getTeamData(event.idHomeTeam)
const away = await getTeamData(event.idAwayTeam)
return {
competition: event.strLeague,
homeLogo: home.image,
awayLogo: away.image,
homeTeam: event.strHomeTeam,
awayTeam: event.strAwayTeam,
date: event.strTimestamp,
stadium: home.stadium,
}
}
const getRestEventsData = async events => {
const output = []
for (const event of events) {
const isHomeTeam = event.idHomeTeam ==TEAM_ID
const team = await getTeamData(event[isHomeTeam ? 'idAwayTeam' : 'idHomeTeam'])
output.push({
competition: event.strLeague,
logo: team.image,
team: event[isHomeTeam ? 'strAwayTeam' : 'strHomeTeam'],
date: event.strTimestamp,
stadium: 'stadium',
text: isHomeTeam ? 'vs' : 'at',
})
}
return output
}
const getFormattedDate = (timestamp, useToday = true) => {
const millisPerDay = 24 * 60 * 60 * 1000
const formats = [
"MMM d, yyyy 'at' h:mm a",
"'Tomorrow at' h:mm a",
"'Today at' h:mm a",
]
const date = new Date(timestamp)
const matchDay = (new Date(date)).setHours(0, 0, 0, 0)
const today = (new Date()).setHours(0, 0, 0, 0)
const diff = (matchDay - today) / millisPerDay
const format = useToday ? (diff < 1 ? 2 : diff < 2 ? 1 : 0) : 0
const dateFormatter = new DateFormatter()
dateFormatter.dateFormat = formats[format]
return dateFormatter.string(date)
}
const addText = (el, string, type) => {
const text = el.addText(string)
text.font = type === 'bold' ?
Font.boldSystemFont(textSize * 1.2) :
Font.regularSystemFont(textSize)
text.textColor = new Color(DARK_MODE ? '#ffffff' : '#000000', 1)
text.lineLimit = 1
text.textOpacity = type === 'small' ? 0.5 : 1
text.centerAlignText()
}
const addImage = (el, src, size = logoSize) => {
const image = el.addImage(src)
image.imageSize = new Size(size, size)
}
const addSpacer = (el, type) => {
el.addSpacer(spacing[type])
}
const addStack = (el, type = 'horizontal', centered = false, size) => {
const stack = el.addStack()
if (type === 'vertical') stack.layoutVertically()
else stack.layoutHorizontally()
if (centered) stack.centerAlignContent()
if (size) stack.size = size
return stack
}
const addLogos = (el, homeLogo, awayLogo) => {
const s = addStack(el, 'horizontal', true)
addSpacer(s)
addImage(s, homeLogo)
addSpacer(s, 'vs')
addText(s, 'vs')
addSpacer(s, 'vs')
addImage(s, awayLogo)
addSpacer(s)
}
const initWidget = () => {
const w = new ListWidget()
w.backgroundColor = new Color(DARK_MODE ? '#1B1B1B' : '#FFFFFF', 1)
w.setPadding(
spacing.widget, spacing.widget,
spacing.widget, spacing.widget,
)
return w
}
const addCenteredText = (el, text, type) => {
const s = addStack(el, 'horizontal', true)
addSpacer(s)
addText(s, text, type)
addSpacer(s)
}
const initUpcomingEvent = (el, event) => {
addSpacer(el)
addCenteredText(el, event.competition)
addSpacer(el, 'normal')
addLogos(el, event.homeLogo, event.awayLogo)
addSpacer(el, 'normal')
addCenteredText(el, event.homeTeam.toUpperCase(), 'bold')
addCenteredText(el, event.awayTeam.toUpperCase(), 'bold')
addSpacer(el, 'smaller')
addCenteredText(el, getFormattedDate(event.date))
addCenteredText(el, event.stadium)
addSpacer(el)
}
const initRestEvents = (el, events) => {
events.forEach((data, idx) => {
const hs = addStack(el, 'horizontal', true)
addText(hs, data.text, 'small')
addSpacer(hs, 'vs')
addImage(hs, data.logo, logoSmallSize)
addSpacer(hs, 'vs')
const vs = addStack(hs, 'vertical')
addText(vs, data.team.toUpperCase(), 'bold')
addText(vs, getFormattedDate(data.date, false), 'small')
if (idx < 3) addSpacer(el, 'small')
})
}
const createNextMatchWidget = async () => {
const events = await getTeamEvents()
const widget = initWidget()
if (widgetSize === 'small') {
const upcomingEventData = await getUpcomingEventData(events[0])
initUpcomingEvent(widget, upcomingEventData)
} else if (widgetSize === 'medium') {
const upcomingEventData = await getUpcomingEventData(events[0])
const restEventData = await getRestEventsData(events.slice(1, 5))
const s = addStack(widget, 'horizontal', true)
initUpcomingEvent(addStack(s, 'vertical', true, new Size(130, 135)), upcomingEventData)
addSpacer(s, 'normal')
initRestEvents(addStack(s, 'vertical', true, new Size(160, 135)), restEventData)
}
return widget
}
const widget = await createNextMatchWidget()
Script.setWidget(widget)
Script.complete()
await widget.presentMedium() |
<filename>13/13a.py
#!/usr/bin/env python3
import argparse
import numpy as np
from collections import Counter
import sys
np.set_printoptions(threshold=sys.maxsize)
class ship:
def __init__(self, directions):
pass
def chugchug(self):
pass
def closest(n, m):
# For digit N, find closest integer divisible by m
q = int(n / m)
return m * q
def main(args):
"""
"""
with open(args.input, 'r') as fh:
earliest_departure = int(fh.readline())
buses = fh.readline().split(',')
bus_lines = list()
for b in buses:
if b == 'x':
continue
bus_lines.append(int(b))
# print(earliest_departure)
time_table = dict()
for b in bus_lines:
close = closest(earliest_departure, b)
if close < earliest_departure:
close += b
time_table[b] = close
smallest = earliest_departure * 2
smallest_bus = None
for t in time_table:
et = time_table[t]
if et <= smallest:
smallest = et
smallest_bus = t
#print(smallest, smallest_bus, smallest - earliest_departure, (smallest - earliest_departure) * smallest_bus)
print((smallest - earliest_departure) * smallest_bus)
if __name__ == '__main__':
desc = 'Advent 13a'
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('--input', type=str, help='Puzzle Input')
args = parser.parse_args()
main(args)
|
<gh_stars>100-1000
YUI.add('aui-modal-tests', function(Y) {
//--------------------------------------------------------------------------
// Modal Tests
//--------------------------------------------------------------------------
var suite = new Y.Test.Suite('aui-modal'),
modal,
boundingBox,
CSS_MODAL_OPEN = Y.getClassName('modal-open'),
ERROR_PLUGIN_AVAILABLE = '{0} plugin should not be available',
ERROR_PLUGIN_MISSING = '{0} plugin was not plugged',
ERROR_PLUGIN_OVERRIDEN = '{0} attribute should not be overriden',
ERROR_PLUGIN_PLUGGED = '{0} plugin should not be already plugged',
TOUCH_ENABLED = Y.UA.touchEnabled;
//--------------------------------------------------------------------------
// Test Case for Plug/Unplug
//--------------------------------------------------------------------------
suite.add(new Y.Test.Case({
name: 'Plug/Unplug',
setUp: function() {
if (modal) {
modal.destroy();
}
modal = new Y.Modal().render('#modal');
boundingBox = modal.get('boundingBox');
},
tearDown: function() {
modal.destroy();
modal = null;
boundingBox = null;
},
//----------------------------------------------------------------------
// Tests
//----------------------------------------------------------------------
'toggle drag functionality': function() {
if (!TOUCH_ENABLED) {
Y.Assert.isUndefined(
modal.dd,
Y.Lang.sub(ERROR_PLUGIN_OVERRIDEN, ['dd']));
Y.Assert.isUndefined(
modal.hasPlugin('dd'),
Y.Lang.sub(ERROR_PLUGIN_PLUGGED, ['dd']));
boundingBox.simulate('click');
}
Y.Assert.isNotUndefined(
modal.hasPlugin('dd'),
Y.Lang.sub(ERROR_PLUGIN_MISSING, ['dd']));
modal.set('draggable', false);
Y.Assert.isUndefined(
modal.hasPlugin('dd'),
Y.Lang.sub(ERROR_PLUGIN_AVAILABLE, ['dd']));
modal.set('draggable', true);
if (!TOUCH_ENABLED) {
Y.Assert.isUndefined(
modal.hasPlugin('dd'),
Y.Lang.sub(ERROR_PLUGIN_PLUGGED, ['dd']));
boundingBox.simulate('click');
}
Y.Assert.isNotUndefined(
modal.hasPlugin('dd'),
Y.Lang.sub(ERROR_PLUGIN_MISSING, ['dd']));
}
}));
//--------------------------------------------------------------------------
// Test Case for Events
//--------------------------------------------------------------------------
suite.add(new Y.Test.Case({
name: 'Events',
setUp: function() {
if (modal) {
modal.destroy();
}
modal = new Y.Modal().render('#modal');
boundingBox = modal.get('boundingBox');
},
tearDown: function() {
modal.destroy();
modal = null;
boundingBox = null;
},
//----------------------------------------------------------------------
// Tests
//----------------------------------------------------------------------
'listen after visibleChange with destroyOnHide enabled': function() {
var mock = new Y.Mock();
Y.Mock.expect(
mock, {
args: [YUITest.Mock.Value.Object],
method: 'afterVisibleChange'
}
);
modal.after('visibleChange', mock.afterVisibleChange);
modal.set('destroyOnHide', true);
modal.hide();
Y.Mock.verify(mock);
}
}));
//--------------------------------------------------------------------------
// Test Case for Scroll
//--------------------------------------------------------------------------
suite.add(new Y.Test.Case({
name: 'Scroll',
setUp: function() {
if (modal) {
modal.destroy();
}
modal = new Y.Modal().render('#modal');
boundingBox = modal.get('boundingBox');
},
tearDown: function() {
modal.destroy();
modal = null;
boundingBox = null;
},
//----------------------------------------------------------------------
// Tests
//----------------------------------------------------------------------
'check modal-open class after visibleChange': function() {
var elements = Y.all('body,html');
modal.show();
var modalOpen = elements.hasClass(CSS_MODAL_OPEN);
Y.Assert.isTrue(modalOpen[0]);
Y.Assert.isTrue(modalOpen[1]);
modal.hide();
modalOpen = elements.hasClass(CSS_MODAL_OPEN);
Y.Assert.isFalse(modalOpen[0]);
Y.Assert.isFalse(modalOpen[1]);
}
}));
Y.Test.Runner.add(suite);
}, '', {
requires: ['aui-modal', 'aui-node-base', 'node-event-simulate', 'test']
});
|
<filename>android_app_lms/DeliveryApp/app/src/main/java/edu/vesit/deliveryapp/RegisterActivity.java
package edu.vesit.deliveryapp;
import android.content.Context;
import android.os.Bundle;
import android.provider.Settings;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
import java.util.Random;
public class RegisterActivity extends AppCompatActivity
{
private Context context;
EditText register_name, register_email, register_password, register_password_reenter, register_phone;
Button register_delivery_boy;
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_register);
context = this;
register_name = (EditText) findViewById(R.id.register_name);
register_email = (EditText) findViewById(R.id.register_email);
register_password = (EditText) findViewById(R.id.register_password);
register_password_reenter = (EditText) findViewById(R.id.register_password_reenter);
register_phone = (EditText) findViewById(R.id.register_phone);
register_delivery_boy = (Button) findViewById(R.id.register_delivery_boy);
register_delivery_boy.setOnClickListener(new View.OnClickListener()
{
@Override
public void onClick(View v)
{
String enteredName = register_name.getText().toString();
String enteredEmail = register_email.getText().toString();
String enteredPassword = register_password.getText().toString();
String enteredPasswordRe = register_password_reenter.getText().toString();
String enteredPhone = register_phone.getText().toString();
if (enteredName.trim().equals(""))
Toast.makeText(context, "Please enter a name", Toast.LENGTH_SHORT).show();
else if (enteredEmail.trim().equals(""))
Toast.makeText(context, "Please enter a valid email", Toast.LENGTH_SHORT).show();
else if (enteredPassword.trim().equals(""))
Toast.makeText(context, "Please enter a valid password", Toast.LENGTH_SHORT).show();
else if (enteredPhone.trim().equals(""))
Toast.makeText(context, "Please enter a valid phone number", Toast.LENGTH_SHORT).show();
else if (!enteredPassword.equals(enteredPasswordRe))
Toast.makeText(context, "The entered passwords do not match", Toast.LENGTH_SHORT).show();
else
{
String id = Settings.Secure.getString(getContentResolver(), Settings.Secure.ANDROID_ID);
id += ("_" + new Random().nextInt(1000));
Log.e("Registered id : ", id);
new RegisterUserTask(context).execute(enteredName, enteredEmail, enteredPassword, enteredPhone, id);
}
}
});
}
} |
<filename>blog/src/resolvers/mutation.js
const bcrypt = require("bcryptjs");
const jwt = require("jsonwebtoken");
const { APP_SECRET, getUserId } = require("../utils");
async function signup(parent, args, context, info) {
// 1
const password = await bcrypt.hash(args.password, 10);
// 2
const user = await context.prisma.user.create({
data: { ...args, password }
});
// 3
const token = jwt.sign({ userId: user.id }, APP_SECRET);
// 4
return {
token,
user
};
}
async function login(parent, args, context, info) {
// 1
const user = await context.prisma.user.findUnique({
where: { email: args.email }
});
if (!user) {
throw new Error("No such user found");
}
// 2
const valid = await bcrypt.compare(args.password, user.password);
if (!valid) {
throw new Error("Invalid password");
}
const token = jwt.sign({ userId: user.id }, APP_SECRET);
// 3
return {
token,
user
};
}
async function updateUser(parent, args, context, info) {
const updateUser = await context.prisma.user.update({
where: {
id: parseInt(args.id)
},
data: {
email: args.email,
name: args.name
}
});
return updateUser;
}
async function deleteUser(parent, args, context, info) {
const deleteUser = await context.prisma.user.delete({
where: {
id: parseInt(args.id)
}
});
return deleteUser;
}
async function createPost(parent, args, context, info) {
const { userId } = context;
return await context.prisma.post.create({
data: {
title: args.title,
author: { connect: { id: userId } }
}
});
}
async function updatePost(parent, args, context, info) {
const updatePost = await context.prisma.post.update({
where: {
id: parseInt(args.id)
},
data: {
title: args.title,
content: args.content,
published: args.published
}
});
return updatePost;
}
async function deletePost(parent, args, context, info) {
const deletePost = await context.prisma.post.delete({
where: {
id: parseInt(args.id)
}
});
return deletePost;
}
async function follow(parent, args, context, info) {
const { userId } = context;
if (!userId) throw Error("Not authenticated");
if (userId === parseInt(args.id)) {
throw Error("Can't follow himself");
}
try {
await context.prisma.user.update({
data: {
following: {
connect: { id: parseInt(args.id) }
}
},
where: {
id: userId
}
});
return true;
} catch {
return false;
}
}
async function unfollow(parent, args, context, info) {
const { userId } = context;
if (!userId) throw Error("Not authenticated");
if (userId === parseInt(args.id)) {
throw Error("Can't unfollow himself");
}
try {
await context.prisma.user.update({
where: {
id: userId
},
data: {
following: {
disconnect: { id: parseInt(args.id) }
}
}
});
return true;
} catch {
return false;
}
}
async function like(parent, args, context, info) {
const { userId } = context;
if (!userId) throw Error("You need to be authenticated");
const [isLikeExist] = await context.prisma.like.findMany({
where: {
AND: [{ user: { id: userId } }, { post: { id: parseInt(args.id) } }]
},
select: {
id: true
}
});
if (isLikeExist) {
throw Error([isLikeExist]);
context.prisma.like.delete({
where: {
AND: [{ user: { id: userId } }, { post: { id: parseInt(args.id) } }]
}
});
return true;
}
if (!isLikeExist) {
// throw Error("Gonna be liked");
try {
await context.prisma.like.create({
data: {
user: { connect: { id: userId } },
post: { connect: { id: parseInt(args.id) } }
}
});
return true;
} catch {
return false;
}
}
}
async function unlike(parent, args, context, info) {
try {
context.prisma.like.delete({
where: {
post: { id: parseInt(args.id) }
}
});
return true;
} catch {
return false;
}
}
async function createComment(parent, args, context, info) {
const { userId } = context;
if (!userId) throw Error("You need to be authenticated");
return await context.prisma.comment.create({
data: {
text: args.text,
user: { connect: { id: userId } },
post: { connect: { id: parseInt(args.id) } }
}
});
}
async function createCommentToComment(parent, args, context, info) {
const { userId } = context;
if (!userId) throw Error("You need to be authenticated");
return await context.prisma.commentToComment.create({
data: {
text: args.text,
user: { connect: { id: userId } },
comment: { connect: { id: parseInt(args.id) } }
}
});
}
module.exports = {
signup,
login,
updateUser,
deleteUser,
createPost,
updatePost,
deletePost,
follow,
unfollow,
like,
createComment,
createCommentToComment,
unlike
};
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.magnifying_glass_remove = void 0;
var magnifying_glass_remove = {
"viewBox": "0 0 64 64",
"children": [{
"name": "g",
"attribs": {
"id": "MAGNIFYING_GLASS__x2F__REMOVE_1_",
"enable-background": "new "
},
"children": [{
"name": "g",
"attribs": {
"id": "MAGNIFYING_GLASS__x2F__REMOVE"
},
"children": [{
"name": "g",
"attribs": {
"id": "MAGNIFYING_GLASS__x2F__REMOVE"
},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M62.243,53.758L44.658,36.173C46.768,32.602,48,28.449,48,24C48,10.745,37.255,0,24,0S0,10.745,0,24s10.745,24,24,24\r\n\t\t\t\tc4.449,0,8.602-1.232,12.173-3.342l17.585,17.584C54.843,63.329,56.343,64,58,64c3.314,0,6-2.686,6-6\r\n\t\t\t\tC64,56.343,63.328,54.843,62.243,53.758z M24,42c-9.941,0-18-8.059-18-18c0-9.941,8.059-18,18-18c9.941,0,18,8.059,18,18\r\n\t\t\t\tC42,33.941,33.941,42,24,42z M34,21H14c-1.657,0-3,1.343-3,3s1.343,3,3,3h20c1.657,0,3-1.343,3-3C37,22.344,35.657,21,34,21z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M62.243,53.758L44.658,36.173C46.768,32.602,48,28.449,48,24C48,10.745,37.255,0,24,0S0,10.745,0,24s10.745,24,24,24\r\n\t\t\t\tc4.449,0,8.602-1.232,12.173-3.342l17.585,17.584C54.843,63.329,56.343,64,58,64c3.314,0,6-2.686,6-6\r\n\t\t\t\tC64,56.343,63.328,54.843,62.243,53.758z M24,42c-9.941,0-18-8.059-18-18c0-9.941,8.059-18,18-18c9.941,0,18,8.059,18,18\r\n\t\t\t\tC42,33.941,33.941,42,24,42z M34,21H14c-1.657,0-3,1.343-3,3s1.343,3,3,3h20c1.657,0,3-1.343,3-3C37,22.344,35.657,21,34,21z"
},
"children": []
}]
}]
}]
}]
}]
}]
}]
};
exports.magnifying_glass_remove = magnifying_glass_remove; |
import type { RequestHandler } from './request-handler';
/**
* Request processing capability.
*
* Modifies request processing context in a certain way when delegates to handler.
*
* Request processing capabilities could be {@link RequestCapability.combine combined}.
*
* @typeParam TInput - A type of request processing means required in order to apply this capability.
* @typeParam TExt - A type of extension to request processing means this capability applies.
*/
export abstract class RequestCapability<TInput, TExt = object> {
/**
* Builds request capability by the given `provider`.
*
* @typeParam TInput - A type of request processing means required by this provider.
* @typeParam TExt - A type of extension to request processing means this provider applies.
* @param provider - Request processing capability provider.
*
* @returns Request processing capability that call the given `provider` in order to apply.
*/
static of<TInput, TExt>(
this: void,
provider: RequestCapability.Provider<TInput, TExt>,
): RequestCapability<TInput, TExt> {
const capability: RequestCapability<TInput, TExt> = {
for: provider,
and<TNext>(next: RequestCapability<TInput & TExt, TNext>): RequestCapability<TInput, TExt & TNext> {
return RequestCapability.combine(capability, next);
},
};
return capability;
}
/**
* Combines two request processing capabilities.
*
* @typeParam TInput - A type of request processing means expected by the `first` capability.
* @typeParam TExt - A type of request processing means extension applied by the `first` capability.
* @typeParam TNext - A type of request processing means extension applied by the `second` capability.
* @param first - First capability to combine.
* @param second - Second capability to combine. Receives requests modified by the `first` one.
*
* @return Combined request processing capability that applies modifications to request by the `first` capability,
* and then - by the `second` one.
*/
static combine<TInput, TExt, TNext>(
this: void,
first: RequestCapability<TInput, TExt>,
second: RequestCapability<TInput & TExt, TNext>,
): RequestCapability<TInput, TExt & TNext> {
const chain: RequestCapability<TInput, TExt & TNext> = {
for<TMeans extends TInput>(delegate: RequestHandler<TMeans & TExt & TNext>): RequestHandler<TMeans> {
return first.for(second.for(delegate));
},
and<T>(next: RequestCapability<TInput & TExt & TNext, T>): RequestCapability<TInput, TExt & TNext & T> {
return RequestCapability.combine<TInput, TExt & TNext, T>(chain, next);
},
};
return chain;
}
/**
* Provides request processing capability to the given handler.
*
* Builds request processing handler that modifies request and delegates to target `handler`.
*
* @typeParam TMeans - A type of request processing means expected by constructed handler.
* @param handler - Request processing handler that will receive modified request context.
*
* @returns New request processing handler.
*/
abstract for<TMeans extends TInput>(handler: RequestHandler<TMeans & TExt>): RequestHandler<TMeans>;
/**
* Combines this capability with the `next` one.
*
* @typeParam TNext - A type of extension to request processing means applied by `next` capability.
* @param next - Next capability that receives requests modified by this capability.
*
* @return New request processing capability that applies modifications to request by this capability first,
* and then - by the `next` one.
*
* @see RequestCapability.combine
*/
and<TNext>(next: RequestCapability<TInput & TExt, TNext>): RequestCapability<TInput, TExt & TNext> {
return RequestCapability.combine<TInput, TExt, TNext>(this, next);
}
}
export namespace RequestCapability {
/**
* Request processing capability provider signature.
*
* Builds a request processing handler that modifies request and delegates to another one.
*
* @typeParam TInput - A type of request processing means required by this provider.
* @typeParam TExt - A type of extension to request processing means this provider applies.
*/
export type Provider<TInput, TExt = object> =
/**
* @typeParam TMeans - A type of request processing means expected by constructed handler.
*
* @param handler - Request processing handler that will receive modified request context.
*
* @returns New request processing handler.
*/
<TMeans extends TInput>(
this: void,
handler: RequestHandler<TMeans & TExt>,
) => RequestHandler<TMeans>;
}
|
import { BooksService } from '../../services/books.service';
import { asyncData } from './async-observable-helper';
import { Book } from '../../beans/book';
import { SpyObject } from './spy.obj';
export class MockBooksService {
getBooksSpy: any;
getBookSpy: any;
getRatingAverageSpy: any;
convertFromRatingSpy: any;
getRatingClassSpy: any;
fakeResponse: any;
public mockService: any;
fakeBook = { id : 12, name: 'Devenez un ninja avec Angular', author: 'Ninja-Squad',
price: 1, description: 'Devenir un Ninja avec Angular',
category: 'book', isNew: false,
comments: [ { rate: 5, user: '<NAME>', comment: 'En Français.' } ]
};
fakeBook3 = { id: 3, name: 'Instant AngularJS Starter', author: '<NAME>',
price: 16.26, description: 'Description...',
category: 'book', isNew: false,
comments: null
};
fakeBooks: Book[];
constructor() {
this.fakeResponse = null;
this.fakeBooks = new Array<Book>(2);
this.fakeBooks.push(this.fakeBook);
this.fakeBooks.push(this.fakeBook3);
this.mockService = this.createSpyObj();
}
private createSpyObj(): any {
return jasmine.createSpyObj('bookService', ['getBooks',
'getBook',
'getRatingAverage',
'convertFromRating',
'getRatingClass']);
}
public createAsyncDataSet200() {
this.getBooksSpy = this.mockService.getBooks.and.returnValue(asyncData(this.fakeBooks));
this.getBookSpy = this.mockService.getBook.and.returnValue(asyncData(this.fakeBook));
this.getRatingAverageSpy = this.mockService.getRatingAverage.and.returnValue(this);
this.convertFromRatingSpy = this.mockService.convertFromRating.and.returnValue(this);
this.getRatingClassSpy = this.mockService.getRatingClass.and.returnValue(this);
}
subscribe(callback: any) {
callback(this.fakeResponse);
}
setResponse(json: any): void {
this.fakeResponse = json;
}
getProviders(): Array < any > {
return [{ provide: BooksService, useValue: this }];
}
}
|
<gh_stars>1-10
package de.rieckpil.blog;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@RestController
@RequestMapping("/hello")
public class HelloWorldController {
@Value("${message}")
private String message;
@GetMapping
public ResponseEntity<String> getMessage() {
return ResponseEntity.ok(message);
}
}
|
import LRU from "lru-cache";
import redis, { ClientOpts as RedisOpts } from "redis";
import { AsyncRedis } from "./asyncRedis";
import { Lock, LockError, LockExpiredError, LockTakenError } from "./lock";
import log from "./log";
import omit from "./omit";
import {
CacheKey,
serializeKey,
serializeValue,
serializeError,
deserializeValueOrError,
} from "./serialization";
// Re-export things our users might want.
export { CacheKey, LockError, LockExpiredError, LockTakenError };
/**
* A user-supplied function that computes a value to cache.
*
* This function may have an optional `displayName` property, which may be used
* as part of the cache key if this function is passed to `wrap`.
*
* By default, `WorkFn` functions do not accept arguments, but if you're using
* the `wrap` feature, you may want to supply an optional argument list.
*/
type WorkFn<T, Args extends CacheKey[] = []> = ((
...args: Args
) => Promise<T>) & {
displayName?: string;
};
/** Default TTL for cached values, in seconds. */
const DEFAULT_TTL = 600;
/**
* How long should we cache errors, in seconds?
*
* This may be too short.
*/
const ERROR_TTL = 1;
/** What Redis URL should we use by default? */
export function defaultRedisUrl(): string {
return process.env.REDIS_URL || "redis://localhost:6379";
}
/** Default options for configuring our Redis client. */
export const DEFAULT_REDIS_LOCK_OPTS: RedisOpts = {
retry_strategy(options) {
return Math.min(options.attempt * 100, 3000);
},
url: process.env.LOCK_URL || defaultRedisUrl(),
};
/** Create a Redis client with `DEFAULT_REDIS_LOCK_OPTIONS`. */
function defaultRedisLockClient(): redis.RedisClient {
log.trace("creating Redis lock client");
return redis.createClient(DEFAULT_REDIS_LOCK_OPTS);
}
/** Default options for configuring our Redis client. */
export const DEFAULT_REDIS_CACHE_OPTS: RedisOpts = {
retry_strategy(options) {
return Math.min(options.attempt * 100, 3000);
},
url: process.env.CACHE_URL || defaultRedisUrl(),
};
/** Create a Redis client with `DEFAULT_REDIS_LOCK_OPTIONS`. */
function defaultRedisCacheClient(): redis.RedisClient {
log.trace("creating Redis cache client");
return redis.createClient(DEFAULT_REDIS_LOCK_OPTS);
}
/// Internal error thrown when we can't find a key in any of our caches.
class KeyNotFoundError extends Error {
constructor(key: string) {
super(`could not find key ${key}`);
}
}
/** Options that can be passed to `get` and `wrap`. */
export type GetOptions = {
/**
* "Time to live." The time to cache a value, in seconds.
*
* Defaults to `DEFAULT_TTL`.
*/
ttl?: number;
};
/**
* Cache expensive-to-compute values. Unlike typical caches, we take a Redis
* lock to ensure that we only perform expensive computations once.
*
* Cached values may be stored locally or in Redis, but they are stored in
* serialized form, and they are deserialized when returned from the cache.
*/
export class LockAndCache {
/** The Redis client we use for locking. */
private _lockClient: AsyncRedis;
/** The Redis client we use for caching. */
private _cacheClient: AsyncRedis;
/**
* An in-memory cache that deletes the "least recently used" (LRU) items.
*
* We use this to implement a fast caching layer that doesn't require hitting
* Redis in the simple case. We could use an all-in-one solution like
* `cache-manager`, but most of those libraries are huge and contain tons of
* callback-era code.
*/
private _lruCache: LRU<string, string>;
/**
* Create a new `LockAndCache`.
*
* ```
* const cache = LockAndCache()
* try {
* const val = cache.get('key', async function () { return 'value' })
* } finally {
* cache.close()
* }
* ```
*
* Note that if you don't call `close()` on all your caches, your program will
* never never exit, because the Node `redis` module is like that.
*
* @param caches Caches to use. Defaults to `tieredCache()`.
* @param lockClient Redis client to use for locking. Defaults to using
* `DEFAULT_REDIS_OPTIONS`.
* @param memoryCacheSize How many items should we also cache in local memory?
*/
constructor({
lockClient = defaultRedisLockClient(),
cacheClient = defaultRedisCacheClient(),
memoryCacheSize = 100,
} = {}) {
log.trace("creating LockAndCache");
this._lockClient = new AsyncRedis(lockClient);
this._cacheClient = new AsyncRedis(cacheClient);
this._lruCache = new LRU<string, string>({ max: memoryCacheSize });
}
/**
* Try to fetch an item from our cache.
*
* Throws `KeyNotFoundError` if we don't have that key cached.
*/
private async _cacheGet<T>(key: CacheKey): Promise<T> {
const keyStr = serializeKey(key);
// Check our caches. `_lruCache` returns `null` on a cache miss, and
// `_cacheClient` returns `undefined`. `== null` is the idiomatic way to
// check for either.
let serializedValue: string | undefined | null = this._lruCache.get(keyStr);
// It wasn't in our LRU, so let's try the network.
if (serializedValue == null) {
serializedValue = await this._cacheClient.get(keyStr);
if (serializedValue != null) {
// We found it in Redis but not in RAM, so let's re-cache it here.
const redisTtl = await this._cacheClient.ttl(keyStr);
if (redisTtl > 0) {
log.trace(
`re-caching ${keyStr} from Redis into RAM with TTL ${redisTtl}`
);
this._lruCache.set(keyStr, serializedValue, redisTtl * 1000);
}
}
}
// If we didn't find anything, raise an error. If we did, deserialize it
// (and `throw` it if it was an error).
if (serializedValue == null) {
throw new KeyNotFoundError(keyStr);
}
return deserializeValueOrError<T>(serializedValue);
}
/** Store serialized data in our cache. */
private async _cacheSetSerialized(
key: CacheKey,
serializedData: string,
ttl: number
): Promise<void> {
const keyStr = serializeKey(key);
this._lruCache.set(keyStr, serializedData, ttl * 1000);
const r = await this._cacheClient.set(keyStr, serializedData, "ex", ttl);
if (r !== "OK")
throw new Error(`error caching at ${keyStr}: ${JSON.stringify(r)}`);
}
/** Store a value in our cache. */
private async _cacheSetValue(
key: CacheKey,
value: unknown,
ttl: number
): Promise<void> {
await this._cacheSetSerialized(key, serializeValue(value), ttl);
}
/** Store an error in our cache. */
private async _cacheSetError(
key: CacheKey,
err: Error,
ttl: number
): Promise<void> {
await this._cacheSetSerialized(key, serializeError(err), ttl);
}
/**
* Shut down this cache manager. No other functions may be called after this.
*/
close(): void {
log.debug("closing cache connections");
this._lockClient
.quit()
.catch((err) => log.error("error quitting lock client", err));
this._cacheClient
.quit()
.catch((err) => log.error("error quitting cache client", err));
}
/** Delete a key from our cache. */
async delete(key: CacheKey): Promise<void> {
await this._cacheClient.del(serializeKey(key));
}
/**
* Either fetch a value from our cache, or compute it, cache it and return it.
*
* @param key The cache key to use.
* @param options Cache options. `ttl` is in seconds.
* @param work A function which performs an expensive caculation that we want
* to cache.
*/
async get<T>(
key: CacheKey,
options: GetOptions,
work: WorkFn<T>
): Promise<T> {
log.debug("get", key);
const ttl = options.ttl != null ? options.ttl : DEFAULT_TTL;
// See if we can find something in the cache without the overhead of taking
// a lock.
try {
return await this._cacheGet<T>(key);
} catch (err) {
if (!(err instanceof KeyNotFoundError)) throw err;
}
// It looks like we need to take a lock.
const lockKey = serializeKey(key);
const lock = new Lock(this._lockClient, `lock:${lockKey}`);
await lock.lock();
try {
// Now we have the lock. See if the last lock holder left us anything.
try {
return await this._cacheGet<T>(key);
} catch (err) {
if (!(err instanceof KeyNotFoundError)) throw err;
}
// Nope, nothing in the cache. We'll have to compute it, so keep this lock
// for longer time.
lock.extendIndefinitely();
// Try running `work` and cache what we get.
try {
log.debug("calling work to compute value");
const result = await work();
await this._cacheSetValue(key, result, ttl);
return result;
} catch (err) {
await this._cacheSetError(key, err, ERROR_TTL);
throw err;
}
} finally {
// Let go of our lock. This may throw an error if our lock extensions
// failed.
await lock.release();
}
}
/**
* Given a work function, wrap it in a `cache.get`, using the function's
* arguments as part of our cache key.
*
* @param options Cache options. `name` is the base name of our cache key.
* `ttl` is in seconds, and it defaults to `DEFAULT_TTL`.
* @param work The work function to wrap. If `options.name` is not specified,
* either `work.displayName` or `work.name` must be a non-empty string.
*/
wrap<T, Args extends CacheKey[] = []>(
options: GetOptions & { name?: string },
work: WorkFn<T, Args>
): WorkFn<T, Args> {
// Parse our options.
let name: string;
if (options.name != null) {
name = options.name;
} else {
name = work.displayName || work.name;
}
if (name === "") {
throw new Error(
"cannot wrap an anonymous function without specifying `name`"
);
}
const getOptions = omit(options, "name");
log.debug("wrap", name);
// Wrap our function.
const wrappedFn = (...args: Args): Promise<T> => {
log.debug("call wrapped", name, ...args);
const key: CacheKey[] = [name];
key.push(...args);
return this.get(key, getOptions, () => work(...args));
};
wrappedFn.displayName = name;
return wrappedFn;
}
}
|
function extractVowels(str) {
const vowels = 'aeiou';
let result = '';
for (let i = 0; i < str.length; i++) {
if (vowels.includes(str[i].toLowerCase())) {
result += str[i];
}
}
return result;
}
const output = extractVowels("Hello, World!");
console.log(output); // eoo |
#!/bin/sh
# SUMMARY: Test the tpm example
# LABELS: amd64
set -e
# Source libraries. Uncomment if needed/defined
#. "${RT_LIB}"
. "${RT_PROJECT_ROOT}/_lib/lib.sh"
NAME=tpm
clean_up() {
rm -f ${NAME}*
}
trap clean_up EXIT
# Test code goes here
linuxkit build -docker "${LINUXKIT_EXAMPLES_DIR}/${NAME}.yml"
exit 0
|
SELECT Employee_Name
FROM Employees
WHERE Department = 'specified department'; |
def find_max_pair_sum(x):
max_sum = 0
for i in range(len(x)-1):
for j in range(i+1, len(x)):
if x[i]+x[j] > max_sum:
max_sum = x[i]+x[j]
return max_sum |
<gh_stars>10-100
alter table couchdb_progress alter column seq type varchar;
|
import React from 'react';
import Box from '@mui/material/Box';
import { useTheme } from '@mui/material/styles';
import useMediaQuery from '@mui/material/useMediaQuery';
import Main from 'layouts/Main';
import { ImageWithDescription } from 'blocks';
const About = (): JSX.Element => {
const theme = useTheme();
const isMd = useMediaQuery(theme.breakpoints.up('md'), {
defaultMatches: true,
});
return (
<Box sx={{ overflowX: 'hidden' }}>
<Main
colorInvert={false}
isContent={false}
menuColor={'text.primary'}
logoColor={isMd ? 'white' : 'brown'}
isParentPage={true}
>
<ImageWithDescription imagePosition={'right'} />
</Main>
</Box>
);
};
export default About;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.