text stringlengths 1 1.05M |
|---|
#!/bin/bash
FN="rgu34cprobe_2.18.0.tar.gz"
URLS=(
"https://bioconductor.org/packages/3.14/data/annotation/src/contrib/rgu34cprobe_2.18.0.tar.gz"
"https://bioarchive.galaxyproject.org/rgu34cprobe_2.18.0.tar.gz"
"https://depot.galaxyproject.org/software/bioconductor-rgu34cprobe/bioconductor-rgu34cprobe_2.18.0_src_all.tar.gz"
)
MD5="d1a6c433acd30b95fa7be89147105b74"
# Use a staging area in the conda dir rather than temp dirs, both to avoid
# permission issues as well as to have things downloaded in a predictable
# manner.
STAGING=$PREFIX/share/$PKG_NAME-$PKG_VERSION-$PKG_BUILDNUM
mkdir -p $STAGING
TARBALL=$STAGING/$FN
SUCCESS=0
for URL in ${URLS[@]}; do
curl $URL > $TARBALL
[[ $? == 0 ]] || continue
# Platform-specific md5sum checks.
if [[ $(uname -s) == "Linux" ]]; then
if md5sum -c <<<"$MD5 $TARBALL"; then
SUCCESS=1
break
fi
else if [[ $(uname -s) == "Darwin" ]]; then
if [[ $(md5 $TARBALL | cut -f4 -d " ") == "$MD5" ]]; then
SUCCESS=1
break
fi
fi
fi
done
if [[ $SUCCESS != 1 ]]; then
echo "ERROR: post-link.sh was unable to download any of the following URLs with the md5sum $MD5:"
printf '%s\n' "${URLS[@]}"
exit 1
fi
# Install and clean up
R CMD INSTALL --library=$PREFIX/lib/R/library $TARBALL
rm $TARBALL
rmdir $STAGING
|
package com.huaiangg.icibademo;
import io.reactivex.Observable;
import retrofit2.http.GET;
import retrofit2.http.Query;
/**
* @description:
* @author: HuaiAngg
* @create: 2018-12-08 19:36
*/
public interface IGetRequest {
@GET("ajax.php?a=fy&f=auto")
Observable<TransalteWord> getCall(@Query("t") String target, @Query("w") String word);
// 注解里传入 网络请求 的部分URL地址
// Retrofit把网络请求的URL分成了两部分:一部分放在Retrofit对象里,另一部分放在网络请求接口里
// 如果接口里的url是一个完整的网址,那么放在Retrofit对象里的URL可以忽略
// 采用Observable<...>接口
// getCall()是接受网络请求数据的方法
// @GET("ajax.php?a=fy&f=auto")
// Call<TransalteWord> getCall(@Query("t") String target, @Query("w") String word);
// 注解里传入 网络请求 的部分URL地址
// Retrofit把网络请求的URL分成了两部分:一部分放在Retrofit对象里,另一部分放在网络请求接口里
// 如果接口里的url是一个完整的网址,那么放在Retrofit对象里的URL可以忽略
// getCall()是接受网络请求数据的方法
// @GET("ajax.php?a=fy&f=auto&t=auto&w=hello%20world")
// Call<Transaltion> getCall();
// 注解里传入 网络请求 的部分URL地址
// Retrofit把网络请求的URL分成了两部分:一部分放在Retrofit对象里,另一部分放在网络请求接口里
// 如果接口里的url是一个完整的网址,那么放在Retrofit对象里的URL可以忽略
// getCall()是接受网络请求数据的方法
}
|
import * as path from "path";
import * as fs from "fs";
import * as dayjs from "dayjs";
import { DependencyTypes } from "../visitors/visitor";
export const defaultDependencyType: DependencyTypes = "dependencies";
export function isValidDependencyType(type: unknown): type is DependencyTypes {
if (typeof type === "string" && (type === "dependencies" || type === "devDependencies"))
return true;
return false;
}
export function getVersion(): string {
try {
const file = path.join(__dirname, "./../../../package.json");
return JSON.parse(fs.readFileSync(file, "utf8")).version;
} catch (e) {
return "version parse error!";
}
}
export function daysAgo(date: string | number | Date): string {
return `(${dayjs(new Date()).diff(date, "day")} days ago)`;
}
|
#!/bin/bash
nvidia-docker run -it --rm --shm-size=1g --ulimit memlock=-1 --ulimit stack=67108864 -v $PWD:/workspace/gnmt gnmt_tf bash
|
#!/bin/bash -eu
if [ 0 -ne ${EUID:-${UID}} ]
then
echo "You need to be root to perform this command."
exit 1
fi
VSD_BASE_DIR=/mnt/virtual_sd
VSD_IMG_PATH=/home/pi/virtual_sd.img
set +e
if [ ! 0 -eq `mount | grep -c ${VSD_BASE_DIR}` ]
then
umount ${VSD_BASE_DIR}
fi
mount -t vfat -o loop,sync,ro,noatime,dmask=000,fmask=111,iocharset=utf8,noauto ${VSD_IMG_PATH} ${VSD_BASE_DIR}
set -e
|
<reponame>OSWeDev/oswedev<filename>src/shared/modules/NFCConnect/ModuleNFCConnect.ts
import AccessPolicyTools from '../../tools/AccessPolicyTools';
import UserLogVO from '../AccessPolicy/vos/UserLogVO';
import UserVO from '../AccessPolicy/vos/UserVO';
import APIControllerWrapper from '../API/APIControllerWrapper';
import String2ParamVO, { String2ParamVOStatic } from '../API/vos/apis/String2ParamVO';
import StringParamVO, { StringParamVOStatic } from '../API/vos/apis/StringParamVO';
import GetAPIDefinition from '../API/vos/GetAPIDefinition';
import PostAPIDefinition from '../API/vos/PostAPIDefinition';
import APIDAOParamVO, { APIDAOParamVOStatic } from '../DAO/vos/APIDAOParamVO';
import Module from '../Module';
import ModuleTable from '../ModuleTable';
import ModuleTableField from '../ModuleTableField';
import VOsTypesManager from '../VOsTypesManager';
import NFCTagUserVO from './vos/NFCTagUserVO';
import NFCTagVO from './vos/NFCTagVO';
export default class ModuleNFCConnect extends Module {
public static MODULE_NAME: string = 'NFCConnect';
public static POLICY_GROUP: string = AccessPolicyTools.POLICY_GROUP_UID_PREFIX + ModuleNFCConnect.MODULE_NAME;
public static POLICY_BO_ACCESS: string = AccessPolicyTools.POLICY_UID_PREFIX + ModuleNFCConnect.MODULE_NAME + '.BO_ACCESS';
public static POLICY_FO_ACCESS: string = AccessPolicyTools.POLICY_UID_PREFIX + ModuleNFCConnect.MODULE_NAME + '.FO_ACCESS';
public static APINAME_connect = "connect";
public static APINAME_connect_and_redirect = "nfco";
public static APINAME_checktag_user = "checktag_user";
public static APINAME_add_tag = "add_tag";
public static APINAME_remove_user_tag = "remove_user_tag";
public static APINAME_get_own_tags = "get_own_tags";
public static getInstance(): ModuleNFCConnect {
if (!ModuleNFCConnect.instance) {
ModuleNFCConnect.instance = new ModuleNFCConnect();
}
return ModuleNFCConnect.instance;
}
private static instance: ModuleNFCConnect = null;
public connect: (serial_number: string) => Promise<boolean> = APIControllerWrapper.sah(ModuleNFCConnect.APINAME_connect);
public connect_and_redirect: (serial_number: string) => Promise<boolean> = APIControllerWrapper.sah(ModuleNFCConnect.APINAME_connect_and_redirect);
public checktag_user: (serial_number: string, user_id: number) => Promise<boolean> = APIControllerWrapper.sah(ModuleNFCConnect.APINAME_checktag_user);
public add_tag: (serial_number: string) => Promise<boolean> = APIControllerWrapper.sah(ModuleNFCConnect.APINAME_add_tag);
public remove_user_tag: (serial_number: string) => Promise<boolean> = APIControllerWrapper.sah(ModuleNFCConnect.APINAME_remove_user_tag);
public get_own_tags: () => Promise<NFCTagVO[]> = APIControllerWrapper.sah(ModuleNFCConnect.APINAME_get_own_tags);
private constructor() {
super("nfcconnect", ModuleNFCConnect.MODULE_NAME);
}
public registerApis() {
APIControllerWrapper.getInstance().registerApi(new GetAPIDefinition<StringParamVO, boolean>(
null,
ModuleNFCConnect.APINAME_connect,
[NFCTagVO.API_TYPE_ID, NFCTagUserVO.API_TYPE_ID],
StringParamVOStatic
));
APIControllerWrapper.getInstance().registerApi(new GetAPIDefinition<StringParamVO, boolean>(
null,
ModuleNFCConnect.APINAME_connect_and_redirect,
[NFCTagVO.API_TYPE_ID, NFCTagUserVO.API_TYPE_ID],
StringParamVOStatic
));
APIControllerWrapper.getInstance().registerApi(new PostAPIDefinition<APIDAOParamVO, boolean>(
null,
ModuleNFCConnect.APINAME_checktag_user,
[],
APIDAOParamVOStatic
));
APIControllerWrapper.getInstance().registerApi(new PostAPIDefinition<StringParamVO, boolean>(
null,
ModuleNFCConnect.APINAME_add_tag,
[NFCTagVO.API_TYPE_ID, NFCTagUserVO.API_TYPE_ID],
StringParamVOStatic
));
APIControllerWrapper.getInstance().registerApi(new PostAPIDefinition<StringParamVO, boolean>(
null,
ModuleNFCConnect.APINAME_remove_user_tag,
[NFCTagVO.API_TYPE_ID, NFCTagUserVO.API_TYPE_ID],
StringParamVOStatic
));
APIControllerWrapper.getInstance().registerApi(new GetAPIDefinition<void, NFCTagVO[]>(
null,
ModuleNFCConnect.APINAME_get_own_tags,
[NFCTagVO.API_TYPE_ID, NFCTagUserVO.API_TYPE_ID]
));
}
public initialize() {
this.fields = [];
this.datatables = [];
let label = new ModuleTableField('name', ModuleTableField.FIELD_TYPE_string, 'Numéro de série', true);
let datatable_fields = [
label,
new ModuleTableField('activated', ModuleTableField.FIELD_TYPE_boolean, 'Actif', true, true, true),
];
let datatable = new ModuleTable(this, NFCTagVO.API_TYPE_ID, () => new NFCTagVO(), datatable_fields, label, "NFC Tags");
this.datatables.push(datatable);
let nfc_tag_id = new ModuleTableField('nfc_tag_id', ModuleTableField.FIELD_TYPE_foreign_key, 'NFC Tag', true);
let user_id = new ModuleTableField('user_id', ModuleTableField.FIELD_TYPE_foreign_key, 'Utilisateur', true);
let datatable_fields_line = [
nfc_tag_id,
user_id
];
let datatable_user = new ModuleTable(this, NFCTagUserVO.API_TYPE_ID, () => new NFCTagUserVO(), datatable_fields_line, null, "NFC Tag User");
user_id.addManyToOneRelation(VOsTypesManager.getInstance().moduleTables_by_voType[UserVO.API_TYPE_ID]);
nfc_tag_id.addManyToOneRelation(datatable);
this.datatables.push(datatable_user);
}
} |
<filename>truffle-config.js
// This is the main configuration file for our Truffle project
module.exports = {
// See <http://truffleframework.com/docs/advanced/configuration>
// for more about customizing your Truffle configuration!
networks: {
development: {
host: "127.0.0.1",
port: 7545,
network_id: "*" // Match any network id
}
},
compilers: {
solc: {
version: "0.5.6", // ex: "0.4.20". (Default: Truffle's installed solc)
optimizer: {
enabled: true,
runs: 200
}
}
}
};
|
<reponame>gromver/rjv-react-antd
import React, { useRef } from 'react'
import Form from '../src/components/Form'
import CheckboxGroupField from '../src/components/CheckboxGroupField'
import { FieldApi } from 'rjv-react'
import { Button, Checkbox } from 'antd'
export default {
title: 'Components / CheckboxGroupField',
component: CheckboxGroupField
}
export const Overview = () => {
return (
<Form data={{}}>
<CheckboxGroupField
schema={{
default: [],
items: {
type: 'string',
enum: ['a', 'b']
}
}}
path="field"
label={'Checkbox group'}
help={'Only "a" and "b" are valid'}
itemProps={{ hasFeedback: true }}
validateTrigger="onChange"
>
<Checkbox value={'a'}>A</Checkbox>
<Checkbox value={'b'}>B</Checkbox>
<Checkbox value={'c'}>C</Checkbox>
</CheckboxGroupField>
</Form>
)
}
export const Readonly = () => {
return (
<Form data={{}}>
<CheckboxGroupField
schema={{
default: [],
readonly: true
}}
path="field"
label={'Checkbox readonly group'}
itemProps={{ hasFeedback: true }}
validateTrigger="onChange"
>
<Checkbox value={'a'}>A</Checkbox>
<Checkbox value={'b'}>B</Checkbox>
<Checkbox value={'c'}>C</Checkbox>
</CheckboxGroupField>
</Form>
)
}
export const RefForwarding = () => {
const fieldRef = useRef<FieldApi>(null)
return (
<Form data={{}} layout="vertical">
<CheckboxGroupField
ref={fieldRef}
schema={{
default: [],
items: {
type: 'string',
enum: ['a', 'b']
}
}}
path="field"
label={'Checkbox group'}
help={'Only "a" and "b" are valid'}
itemProps={{ hasFeedback: true }}
validateTrigger="none"
>
<Checkbox value={'a'}>A</Checkbox>
<Checkbox value={'b'}>B</Checkbox>
<Checkbox value={'c'}>C</Checkbox>
</CheckboxGroupField>
<Button onClick={() => fieldRef.current?.validate()}>Trigger validate</Button>
</Form>
)
}
|
#!/bin/sh
WORK=/work/01205/cipres
HOME=/home/01205/cipres
rootdir=$WORK/ngbw/workspace
basedirs="$rootdir $rootdir/ARCHIVE $rootdir/FAILED"
echo `date` : Running ws_cleanup on LONESTAR.
echo "Removing job dirs where nothing has been modified in 7 days."
for basedir in $basedirs; do
for jobdir in $basedir/NGBW-*; do
if [ -d $jobdir ]; then
# Find any files modified, or accessed recently, skipping "." itself.
# new_file_count=`find $jobdir -mindepth 1 -atime -7 -or -ctime -7 | wc -l`
new_file_count=`find $jobdir -mindepth 1 -ctime -7 | wc -l`
# If no young files delete the directory
if [ $new_file_count -eq 0 ]; then
echo "Deleting $jobdir"
rm -rf $jobdir
fi
fi
done
done
echo
# Remove old .lsf* and gram_job_mgr files from home directory.
find $HOME -name .lsf\* -maxdepth 1 -ctime +15 -exec rm {} \;
find $HOME -name gram_job_mgr\* -maxdepth 1 -ctime +15 -exec rm {} \;
find $HOME/.globus/job/*lonestar* -maxdepth 1 -ctime +15 -exec rm -rf {} \;
|
<filename>src/test/java/com/labkit/test/personapi/PersonAPITestRunner.java
/*
* The MIT License (MIT)
*
* Copyright (c) 2017 <NAME> < <EMAIL> >
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
/*package com.labkit.test.personapi;
import com.intuit.karate.junit4.Karate;
import cucumber.api.CucumberOptions;
import org.junit.runner.RunWith;
/**
* Test runner for Person API
*
* @author vidhya (<EMAIL>)
*/
/*@RunWith(Karate.class)
@CucumberOptions(features = "classpath:com/labkit/test/personapi/PersonApi.feature")
public class PersonAPITestRunner {
}
*/ |
bool subsetSumExists(vector<int> wt, int Sum) {
int n = wt.size();
bool t[n + 1][Sum + 1];
for (int i = 0; i <= Sum; i++) t[0][i] = false;
for (int i = 0; i <= n; i++) t[i][0] = true; // empty set
for (int i = 1; i <= n; i++) {
for (int j = 1; j <= Sum; j++) {
if (wt[i - 1] <= j) {
t[i][j] = t[i - 1][j - wt[i - 1]] || t[i - 1][j];
} else {
t[i][j] = t[i - 1][j];
}
}
}
return t[n][Sum];
} |
<gh_stars>1-10
//
// GoodsDetailConfig.h
// allrichstore
//
// Created by zhaozhe on 16/11/1.
// Copyright © 2016年 allrich88. All rights reserved.
//
#ifndef GoodsDetailConfig_h
#define GoodsDetailConfig_h
#define GD_HeaderViewH 360.0f //GoodsDetailHeaderView整体高度
#define GD_ActiveViewH 50.0f //GoodsDetailActiveView 一个活动高度
#define GD_StoreViewH 60.0f //GoodsDetailStoreView 高度
#define GD_SC_BottomH 60.0f // 底部上拉高度
#define GD_Margin 5.0f //模块之间的间距 5
#define GD_BottomH 45.0f// 底部工具栏的高度
#define GD_BottomBuyW 100.0f// 底部工具栏的两个大按钮的宽度
#define GD_DRAG_SHOW_HEIGHT 45.0f // 结束拖拽最大值时的显示
/* GoodsDetailEvaluativeView */
#define GD_EvaluativeHeaderH 45.0f // 头部
#define GD_EvaluativeIconH 40.0f // 头像
#define GD_EvaluativeStarH 25.0f //星等
#define GD_EvaluativePicH 80.0f// 图片的高度
#define GD_EvaluativeColorH 20.0f// 颜色的高度
#endif /* GoodsDetailConfig_h */
|
#!/bin/bash
#shellcheck disable=SC2034
test_name="security"
test_deploy_inspec_profiles=()
test_skip_diagnostics=true
do_deploy() {
#shellcheck disable=SC2154
chef-automate deploy "$test_config_path" \
--hartifacts "$test_hartifacts_path" \
--override-origin "$HAB_ORIGIN" \
--manifest-dir "$test_manifest_path" \
--admin-password chefautomate \
--enable-chef-server \
--enable-workflow \
--accept-terms-and-mlsa \
--debug
}
do_test_deploy() {
do_test_deploy_default
#shellcheck disable=SC2154
#shellcheck source=integration/helpers/ssl_tests.sh
source "${source_dir}/helpers/ssl_tests.sh"
run_ssl_scan
}
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/RNCryptor/RNCryptor.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/RNCryptor/RNCryptor.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
<filename>Observer/Observer.cpp
//
// Created by wolf on 7/9/18.
//
#include "Observer.h" |
# This script is meant to be invoked only inside Circle CI environment.
set -euo pipefail
CHANGED=$(node -r ts-node/register ./scripts/check_dependency_graph_changed.ts)
if [ $CHANGED = true ] ; then
echo "Generated dependency graph doesn't match what has been committed. Please verify and update it by running `ts-node scripts/update_dependency_graph.ts`"
exit 1
fi
echo "Generated dependency graph matches, all clear!"
|
package config
import (
"github.com/kelseyhightower/envconfig"
"github.com/stretchr/testify/assert"
"os"
"testing"
)
func Test_getProxyRequestURL(t *testing.T) {
type args struct {
endpoint string
path string
}
tests := []struct {
name string
args args
wantScheme string
wantHost string
wantPath string
externalEndpoint string
}{
{
name: "Get internal Datastore",
args: args{
endpoint: "",
path: "/mongodb-datastore/event/type/sh.keptn.event.evaluation.finished",
},
wantScheme: "http",
wantHost: "mongodb-datastore:8080",
wantPath: "event/type/sh.keptn.event.evaluation.finished",
},
{
name: "Get internal configuration service",
args: args{
endpoint: "",
path: "/configuration-service",
},
wantScheme: "http",
wantHost: "configuration-service:8080",
},
{
name: "Get configuration service",
args: args{
endpoint: "",
path: "/configuration-service",
},
wantScheme: "http",
wantHost: "configuration-service:8080",
},
{
name: "Get configuration service via public API",
args: args{
endpoint: "",
path: "/configuration-service",
},
wantScheme: "http",
wantHost: "external-api.com",
wantPath: "/api/configuration-service/",
externalEndpoint: "http://external-api.com/api",
},
{
name: "Get configuration service via public API with API prefix",
args: args{
endpoint: "",
path: "/configuration-service",
},
wantScheme: "http",
wantHost: "external-api.com",
wantPath: "/my/path/prefix/api/configuration-service/",
externalEndpoint: "http://external-api.com/my/path/prefix/api",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
env := EnvConfig{
KeptnAPIEndpoint: tt.externalEndpoint,
}
scheme, host, path := env.GetProxyHost(tt.args.path)
if scheme != tt.wantScheme {
t.Errorf("getProxyHost(); host = %v, want %v", scheme, tt.wantScheme)
}
if host != tt.wantHost {
t.Errorf("getProxyHost(); path = %v, want %v", host, tt.wantHost)
}
if path != tt.wantPath {
t.Errorf("getProxyHost(); path = %v, want %v", path, tt.wantPath)
}
})
}
}
func Test_getHTTPPollingEndpoint(t *testing.T) {
tests := []struct {
name string
apiEndpointEnvVar string
want string
}{
{
name: "get internal endpoint",
apiEndpointEnvVar: "",
want: "http://shipyard-controller:8080/v1/event/triggered",
},
{
name: "get external endpoint",
apiEndpointEnvVar: "https://my-keptn.com/api",
want: "https://my-keptn.com/api/controlPlane/v1/event/triggered",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
env := EnvConfig{
KeptnAPIEndpoint: tt.apiEndpointEnvVar,
}
if got := env.GetHTTPPollingEndpoint(); got != tt.want {
t.Errorf("GetHTTPPollingEndpoint() = %v, want %v", got, tt.want)
}
})
}
}
func Test_getPubSubRecipientURL(t *testing.T) {
type args struct {
recipientService string
port string
path string
}
tests := []struct {
name string
args args
want string
wantErr bool
}{
{
name: "simple service name",
args: args{
recipientService: "lighthouse-service",
port: "",
path: "",
},
want: "http://lighthouse-service:8080",
wantErr: false,
},
{
name: "simple service name with path (prepending slash)",
args: args{
recipientService: "lighthouse-service",
port: "",
path: "/event",
},
want: "http://lighthouse-service:8080/event",
wantErr: false,
},
{
name: "simple service name with path (without prepending slash)",
args: args{
recipientService: "lighthouse-service",
port: "",
path: "event",
},
want: "http://lighthouse-service:8080/event",
wantErr: false,
},
{
name: "simple service name with port",
args: args{
recipientService: "lighthouse-service",
port: "666",
path: "",
},
want: "http://lighthouse-service:666",
wantErr: false,
},
{
name: "empty recipient name",
args: args{
recipientService: "",
port: "666",
path: "",
},
want: "http://127.0.0.1:666",
wantErr: true,
},
{
name: "HTTPS recipient",
args: args{
recipientService: "https://lighthouse-service",
port: "",
path: "",
},
want: "https://lighthouse-service:8080",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if tt.args.recipientService != "" {
os.Setenv("PUBSUB_RECIPIENT", tt.args.recipientService)
} else {
os.Unsetenv("PUBSUB_RECIPIENT")
}
if tt.args.port != "" {
os.Setenv("PUBSUB_RECIPIENT_PORT", tt.args.port)
} else {
os.Unsetenv("PUBSUB_RECIPIENT_PORT")
}
if tt.args.path != "" {
os.Setenv("PUBSUB_RECIPIENT_PATH", tt.args.path)
} else {
os.Unsetenv("PUBSUB_RECIPIENT_PATH")
}
env := EnvConfig{}
_ = envconfig.Process("", &env)
got := env.GetPubSubRecipientURL()
if got != tt.want {
t.Errorf("getPubSubRecipientURL() got = %v, want1 %v", got, tt.want)
}
})
}
}
func Test_ValidateKeptnAPIEndpointURL(t *testing.T) {
// valid
config := EnvConfig{KeptnAPIEndpoint: "http:1.2.3.4.nip.io/some-path"}
assert.Nil(t, config.ValidateKeptnAPIEndpointURL())
// not valid
config = EnvConfig{KeptnAPIEndpoint: "d"}
assert.NotNil(t, config.ValidateKeptnAPIEndpointURL())
// not given
config = EnvConfig{KeptnAPIEndpoint: ""}
assert.Nil(t, config.ValidateKeptnAPIEndpointURL())
}
func Test_GetPubSubTopics(t *testing.T) {
// multiple topics
config := EnvConfig{PubSubTopic: "a,b,c"}
assert.Equal(t, 3, len(config.GetPubSubTopics()))
// zero topics
config = EnvConfig{}
assert.Equal(t, 0, len(config.GetPubSubTopics()))
}
|
"""Module for handling IATI standard reference data."""
import requests
import io
import os
from zipfile import ZipFile
from django.core.files.base import ContentFile
from django.core.files.storage import default_storage
from django.core.management import call_command
from django.conf import settings
from django.utils.text import slugify
from iati_standard.models import ReferenceData, ActivityStandardPage, IATIStandardPage, ReferenceMenu, StandardGuidanceIndexPage, StandardGuidancePage
from iati_standard.inlines import StandardGuidanceTypes
from iati_standard.edit_handlers import GithubAPI
from guidance_and_support.models import GuidanceAndSupportPage
SORT_ORDER = {
"iati-activities/iati-activity": 0,
"iati-activity/iati-identifier": 1,
"iati-activity/reporting-org": 2,
"reporting-org/narrative": 3,
"iati-activity/title": 4,
"title/narrative": 5,
"iati-activity/description": 6,
"description/narrative": 7,
"iati-activity/participating-org": 8,
"participating-org/narrative": 9,
"iati-activity/other-identifier": 10,
"other-identifier/owner-org": 11,
"owner-org/narrative": 12,
"iati-activity/activity-status": 13,
"iati-activity/activity-date": 14,
"activity-date/narrative": 15,
"iati-activity/contact-info": 16,
"contact-info/organisation": 17,
"organisation/narrative": 18,
"contact-info/department": 19,
"department/narrative": 20,
"contact-info/person-name": 21,
"person-name/narrative": 22,
"contact-info/job-title": 23,
"job-title/narrative": 24,
"contact-info/telephone": 25,
"contact-info/email": 26,
"contact-info/website": 27,
"contact-info/mailing-address": 28,
"mailing-address/narrative": 29,
"iati-activity/activity-scope": 30,
"iati-activity/recipient-country": 31,
"recipient-country/narrative": 32,
"iati-activity/recipient-region": 33,
"recipient-region/narrative": 34,
"iati-activity/location": 35,
"location/location-reach": 36,
"location/location-id": 37,
"location/name": 38,
"name/narrative": 39,
"location/description": 40,
"location/activity-description": 41,
"activity-description/narrative": 42,
"location/administrative": 43,
"location/point": 44,
"point/pos": 45,
"location/exactness": 46,
"location/location-class": 47,
"location/feature-designation": 48,
"iati-activity/sector": 49,
"sector/narrative": 50,
"iati-activity/tag": 51,
"tag/narrative": 52,
"iati-activity/country-budget-items": 53,
"country-budget-items/budget-item": 54,
"budget-item/description": 55,
"iati-activity/humanitarian-scope": 56,
"humanitarian-scope/narrative": 57,
"iati-activity/policy-marker": 58,
"policy-marker/narrative": 59,
"iati-activity/collaboration-type": 60,
"iati-activity/default-flow-type": 61,
"iati-activity/default-finance-type": 62,
"iati-activity/default-aid-type": 63,
"iati-activity/default-tied-status": 64,
"iati-activity/budget": 65,
"budget/period-start": 66,
"budget/period-end": 67,
"budget/value": 68,
"iati-activity/planned-disbursement": 69,
"planned-disbursement/period-start": 70,
"planned-disbursement/period-end": 71,
"planned-disbursement/value": 72,
"iati-activity/capital-spend": 73,
"iati-activity/transaction": 74,
"transaction/transaction-type": 75,
"transaction/transaction-date": 76,
"transaction/value": 77,
"transaction/description": 78,
"transaction/provider-org": 79,
"provider-org/narrative": 80,
"transaction/receiver-org": 81,
"receiver-org/narrative": 82,
"transaction/disbursement-channel": 83,
"transaction/sector": 84,
"transaction/recipient-country": 85,
"transaction/recipient-region": 86,
"transaction/flow-type": 87,
"transaction/finance-type": 88,
"transaction/aid-type": 89,
"transaction/tied-status": 90,
"iati-activity/document-link": 91,
"document-link/title": 92,
"document-link/category": 93,
"document-link/language": 94,
"document-link/document-date": 95,
"iati-activity/related-activity": 96,
"iati-activity/legacy-data": 97,
"iati-activity/conditions": 98,
"conditions/condition": 99,
"condition/narrative": 100,
"iati-activity/result": 101,
"result/title": 102,
"result/description": 103,
"result/document-link": 104,
"result/reference": 105,
"result/indicator": 106,
"indicator/title": 107,
"indicator/description": 108,
"indicator/document-link": 109,
"indicator/reference": 110,
"indicator/baseline": 111,
"baseline/comment": 112,
"comment/narrative": 113,
"indicator/period": 114,
"period/period-start": 115,
"period/period-end": 116,
"period/target": 117,
"target/comment": 118,
"period/actual": 119,
"actual/comment": 120,
"iati-activity/crs-add": 121,
"crs-add/other-flags": 122,
"crs-add/loan-terms": 123,
"loan-terms/repayment-type": 124,
"loan-terms/repayment-plan": 125,
"loan-terms/commitment-date": 126,
"loan-terms/repayment-first-date": 127,
"loan-terms/repayment-final-date": 128,
"crs-add/loan-status": 129,
"loan-status/interest-received": 130,
"loan-status/principal-outstanding": 131,
"loan-status/principal-arrears": 132,
"loan-status/interest-arrears": 133,
"iati-activity/fss": 134,
"fss/forecast": 135
}
def iati_order(json_page_obj):
"""Return IATI Standard element order."""
family_tag = "/".join(json_page_obj["ssot_path"].split("/")[-2:])
try:
return SORT_ORDER[family_tag]
except KeyError:
return 999
def default_order(json_page_obj):
"""Return ordering based on metadata."""
return json_page_obj["meta_order"], json_page_obj["title"]
def download_zip(url):
"""Download a ZIP file."""
headers = {
'Authorization': 'token %s' % settings.GITHUB_TOKEN,
'Accept': 'application/octet-stream',
}
response = requests.get(url, headers=headers)
return ZipFile(io.BytesIO(response.content))
def extract_zip(zipfile):
"""Extract zip in memory and yields (filename, file-like object) pairs."""
with zipfile as thezip:
for zipinfo in thezip.infolist():
with thezip.open(zipinfo) as thefile:
if not zipinfo.filename.startswith('.') and not zipinfo.filename.startswith('__MACOSX') and not zipinfo.is_dir():
yield thefile
def update_or_create_tags(observer, repo, tag=None, type_to_update=None):
"""Create or update tags."""
observer.update_state(
state='PROGRESS',
meta='Retrieving data and media from Github'
)
git = GithubAPI(repo)
if tag:
data, media = git.get_data(tag)
if type_to_update == "ssot":
populate_media(observer, media, tag)
populate_data(observer, data, tag)
populate_index(observer, tag, type_to_update)
observer.update_state(
state='SUCCESS',
meta='All tasks complete'
)
return True
def populate_media(observer, media, tag):
"""Use ZIP data to create reference download objects."""
observer.update_state(
state='PROGRESS',
meta='Creating reference downloads'
)
if media:
for item in extract_zip(download_zip(media.url)):
output_path = os.path.join(settings.REFERENCE_DOWNLOAD_ROOT, item.name)
if not settings.AZURE_ACCOUNT_NAME:
output_dir = os.path.dirname(output_path)
if not os.path.isdir(output_dir):
os.makedirs(output_dir)
if default_storage.exists(output_path):
default_storage.delete(output_path)
default_storage.save(output_path, ContentFile(item.read()))
else:
raise ValueError('No data available for tag: %s' % tag)
def populate_data(observer, data, tag):
"""Use ZIP data to create reference data objects."""
observer.update_state(
state='PROGRESS',
meta='Data retrieved, updating database'
)
if data:
for item in extract_zip(download_zip(data.url)):
if os.path.splitext(item.name)[1] == ".html":
ssot_path = os.path.dirname(item.name)
ReferenceData.objects.update_or_create(
ssot_path=ssot_path,
tag=tag,
defaults={'data': item.read().decode('utf-8')},
)
else:
raise ValueError('No data available for tag: %s' % tag)
def child_equals_object(child_page, object):
"""Check whether a page has changed by comparing it to a Reference Data object."""
if child_page.tag != object.tag:
return False
if getattr(child_page, "data_{}".format(object.language)) != object.data:
return False
return True
def create_or_update_from_object(parent_page, page_model, object):
"""Create ActivityStandardPage from ReferenceData object."""
try:
child_page = page_model.objects.get(
ssot_path=object.ssot_path
)
if not child_equals_object(child_page, object):
setattr(child_page, "data_{}".format(object.language), object.data)
child_page.tag = object.tag
child_page.locked = True
child_page.locked_by = None
child_page.save_revision().publish()
except page_model.DoesNotExist:
child_page = page_model(
ssot_path=object.ssot_path,
title=object.name,
heading=object.name,
slug=slugify(object.name),
tag=object.tag
)
setattr(child_page, "data_{}".format(object.language), object.data)
child_page.locked = True
child_page.locked_by = None
parent_page.add_child(instance=child_page)
child_page.save_revision().publish()
return child_page
def recursive_create(page_model, object_pool, parent_page, parent_path, recursed_page_paths):
"""Recursively create ActivityStandardPage objects."""
objects = object_pool.filter(parent_path=parent_path)
for object in objects:
child_page = create_or_update_from_object(parent_page, page_model, object)
if child_page.ssot_path not in recursed_page_paths:
recursed_page_paths.append(child_page.ssot_path)
recursive_create(page_model, object_pool, child_page, child_page.ssot_path, recursed_page_paths)
return True
def recursive_create_menu(parent_page):
"""Recursively create reference menu."""
page_obj = {
"depth": parent_page.depth,
"title": parent_page.title,
"pk": parent_page.pk,
"ssot_path": parent_page.specific.ssot_path,
"meta_order": parent_page.specific.meta_order,
"children": list()
}
page_children = parent_page.get_children()
if len(page_children) == 0:
return page_obj
for page_child in page_children:
page_obj["children"].append(
recursive_create_menu(page_child)
)
if list(filter(page_obj["ssot_path"].endswith, SORT_ORDER.keys())):
page_obj["children"].sort(key=iati_order)
else:
page_obj["children"].sort(key=default_order)
return page_obj
def populate_index(observer, tag, type_to_update):
"""Use ReferenceData objects to populate page index."""
observer.update_state(
state='PROGRESS',
meta='Populating index'
)
recursed_page_paths = []
new_object_paths = set(ReferenceData.objects.filter(tag=tag).order_by().values_list('ssot_path'))
old_object_paths = set(ReferenceData.objects.exclude(tag=tag).order_by().values_list('ssot_path'))
to_delete = [item[0] for item in (old_object_paths - new_object_paths)]
if type_to_update == "guidance":
StandardGuidanceTypes.objects.all().delete()
StandardGuidancePage.objects.filter(ssot_path__in=list(to_delete)).delete()
standard_page = GuidanceAndSupportPage.objects.live().first()
ssot_root_page = StandardGuidanceIndexPage.objects.live().descendant_of(standard_page).first()
if not ssot_root_page:
ssot_root_page = StandardGuidanceIndexPage(
title="Standard Guidance",
heading="Standard Guidance",
slug="standard-guidance",
section_summary="Use this section to find guidance on how to publish and interpret data on specific topics according to the IATI Standard.",
button_link_text="Search the Standard guidance pages"
)
standard_page.add_child(instance=ssot_root_page)
ssot_root_page.save_revision().publish()
recursive_create(StandardGuidancePage, ReferenceData.objects.filter(tag=tag), ssot_root_page, "guidance", recursed_page_paths)
elif type_to_update == "ssot":
menu_json = []
ActivityStandardPage.objects.filter(ssot_path__in=list(to_delete)).exclude(ssot_root_slug="developer").delete()
standard_page = IATIStandardPage.objects.live().first()
ssot_roots = [roots[0] for roots in ReferenceData.objects.filter(tag=tag).order_by().values_list('ssot_root_slug').distinct() if roots[0] not in ["guidance", "developer"]]
for ssot_root in ssot_roots:
objects = ReferenceData.objects.filter(tag=tag, ssot_path=ssot_root)
for object in objects:
ssot_root_page = create_or_update_from_object(standard_page, ActivityStandardPage, object)
ssot_root_page.title = ssot_root
ssot_root_page.slug = slugify(ssot_root)
ssot_root_page.save_revision().publish()
recursive_create(ActivityStandardPage, ReferenceData.objects.filter(tag=tag), ssot_root_page, ssot_root_page.ssot_path, recursed_page_paths)
menu_json.append(recursive_create_menu(ssot_root_page))
ReferenceMenu.objects.update_or_create(
tag=tag,
menu_type="ssot",
defaults={'menu_json': menu_json},
)
else:
menu_json = []
ActivityStandardPage.objects.filter(ssot_path__in=list(to_delete), ssot_root_slug="developer").delete()
standard_page = GuidanceAndSupportPage.objects.live().first()
ssot_root = "developer"
objects = ReferenceData.objects.filter(tag=tag, ssot_path=ssot_root)
for object in objects:
ssot_root_page = create_or_update_from_object(standard_page, ActivityStandardPage, object)
ssot_root_page.title = ssot_root
ssot_root_page.slug = slugify(ssot_root)
ssot_root_page.save_revision().publish()
recursive_create(ActivityStandardPage, ReferenceData.objects.filter(tag=tag), ssot_root_page, ssot_root_page.ssot_path, recursed_page_paths)
menu_json.append(recursive_create_menu(ssot_root_page))
ReferenceMenu.objects.update_or_create(
tag=tag,
menu_type="developer",
defaults={'menu_json': menu_json},
)
call_command('update_index')
|
#!/usr/bin/env bash
set -e -x
backup_dir="/etc/kubernetes/manifests-backup"
public::common::log() {
echo $(date +"[%Y%m%d %H:%M:%S]: ") $1
}
public::evict::gpu-device-plugin() {
dir=/etc/kubernetes/manifests/
if [ -f /etc/kubernetes/manifests/nvidia-device-plugin.yml ]; then
backup_dir="/etc/kubernetes/manifests-backup/"
mkdir -p $backup_dir
mv /etc/kubernetes/manifests/nvidia-device-plugin.yml $backup_dir
else
public::common::log "Skip removing nvidia-device-plugin.yml, because it doesn't exist."
fi
}
main() {
public::evict::gpu-device-plugin
touch /ready
}
main "$@"
|
$(document).ready(function () {
const login_form = $("#login_form");
login_form.on("submit", function (e) {
e.preventDefault();
e.stopPropagation();
const url = $(this).attr("action");
const method = $(this).attr("method");
const data = $(this).serializeArray();
$.ajax({
method: method,
url: url,
dataType: "json",
data: data
}).done(function (response) {
$(".remove-on-response").remove();
$.each(response, function (k, v) {
$("#" + k).parents(".input-group").after(`<p class='remove-on-response mb-0 text-danger'>${v}</p>`);
});
anime({
targets: '.remove-on-response',
translateX: [-5, 5, -5, 5],
easing: 'easeInOutQuad',
direction: 'alternate',
delay: 100,
duration: 250
});
if(response.hasOwnProperty("redirect")) {
window.location.href = response.redirect;
}
});
});
$.each(login_form.find("input"), function () {
$(this).on("keyup", function () {
$(this).parents(".input-group").next(".remove-on-response").slideUp(function () {
$(this).remove();
});
});
});
});
|
<reponame>rpatil524/COLID-Data-Marketplace-Frontend
export class MessageConfigDto {
sendInterval: string;
deleteInterval: string;
constructor(sendInterval: string, deleteInterval: string) {
this.sendInterval = sendInterval;
this.deleteInterval = deleteInterval;
}
}
|
import asyncio
class AsyncEventHandler:
async def handle_keyboard_interrupt(self):
try:
while True:
await asyncio.sleep(1)
except KeyboardInterrupt:
print("Keyboard interrupt handled")
async def handle_eof(self):
try:
while True:
await asyncio.sleep(1)
except asyncio.streams.IncompleteReadError:
print("EOF handled")
# Test the implementation
async def test_async_event_handler():
event_handler = AsyncEventHandler()
await event_handler.handle_keyboard_interrupt()
await event_handler.handle_eof()
asyncio.run(test_async_event_handler()) |
document.addEventListener('turbolinks:load', function () {
var timer = document.querySelector('.timer');
if (timer) {
var timerTime = timer.dataset.timer;
setInterval(function () {
if (timerTime > 0) {
timerTime--;
}
else {
alert('Время вышло!');
document.querySelector('.test_passage_form').submit();
}
timer.innerHTML = parseInt(timerTime / 60) + ':' + (timerTime % 60 ? timerTime % 60 : '00');
}, 1000);
}
});
|
import React, { useContext } from 'react';
import PropTypes from 'prop-types';
import Card from 'react-bootstrap/Card';
import Button from 'react-bootstrap/Button';
import { ModalContext } from '../contexts/ModalContext';
import { UserContext } from '../contexts/UserContext';
import { getPage, removeUser } from '../utils/api';
import { calcAgeFromDate } from '../utils/date';
const civilStates = ['Solteiro(a)', 'Casado(a)', 'Divorciado(a)', 'Viúvo(a)', 'Separado(a)'];
function UserCard({ user }) {
const [state, setUsers] = useContext(UserContext);
const [, setState] = useContext(ModalContext);
async function deleteUser(id) {
await removeUser(id);
getPage(state.page).then((res) => setUsers({
action: 'set',
page: state.page,
users: res.users,
pages: res.pages,
}));
}
const showModel = async () => {
setState({ action: 'show', user });
};
return (
<tr>
<td>{user.fullname}</td>
<td>{calcAgeFromDate(new Date(user.birthday))}</td>
<td>{civilStates[user.civil_state]}</td>
<td>{user.cpf}</td>
<td>{user.city}</td>
<td>{user.state}</td>
<td>
<Button variant="primary" onClick={showModel} className="mr-4">Editar</Button>
</td>
<td>
<Card.Link style={{ cursor: 'pointer' }} onClick={() => deleteUser(user.id)}>Remover</Card.Link>
</td>
</tr>
);
}
UserCard.propTypes = {
user: PropTypes.shape({
id: PropTypes.number,
fullname: PropTypes.string,
birthday: PropTypes.string,
civil_state: PropTypes.number,
cpf: PropTypes.string,
city: PropTypes.string,
state: PropTypes.string,
}).isRequired,
};
export default UserCard;
|
from random import shuffle
import time
import pyqrcode
import png
import os
def generate_qr_codes(num_codes):
# Generate unique identifiers for the QR codes
identifiers = [str(i) for i in range(1, num_codes + 1)]
# Shuffle the identifiers to ensure uniqueness
shuffle(identifiers)
# Create and save QR codes as PNG images
for identifier in identifiers:
qr = pyqrcode.create(identifier)
qr.png(f'qr_code_{identifier}.png', scale=6)
print(f'{num_codes} QR codes generated and saved successfully.')
# Example usage
generate_qr_codes(10) |
package scanner.sub;
import org.jooby.Env;
import org.jooby.Jooby;
import com.google.inject.Binder;
import com.typesafe.config.Config;
public class Submod implements Jooby.Module {
@Override
public void configure(final Env env, final Config conf, final Binder binder) {
}
}
|
import React from 'react'
import classNames from 'classnames'
var Button = React.createClass({
clickEv(e){
const { href, onClick } = this.props;
!!href && ( window.location.href = href );
!!onClick && onClick(e);
},
render(){
let {className, type, children, text, disabled, actionType, mini, plain, loading, ...others} = this.props;
const cls = classNames('weui-btn', {
'weui-btn_disabled': disabled,
'weui-btn_mini': mini,
'packagemainbackground': type === 'primary' && !plain,
'maincolor mainbordercolor': plain && type==='primary',
[`weui-btn_${type}`]: true,
[`weui-btn_plain-${type}`]: plain,
'weui-btn_loading': loading,
[className]: className
});
return(
<button className={ cls } disabled={ disabled } { ...others } type={ actionType } onClick={ this.clickEv }>
{ loading && <i className="weui-loading"></i> }
{ text || children }
</button>
)
}
})
Button.propTypes = {
disabled: React.PropTypes.bool,
type: React.PropTypes.string,
mini: React.PropTypes.bool,
plain: React.PropTypes.bool,
text: React.PropTypes.string,
loading: React.PropTypes.bool,
href: React.PropTypes.string,
actionType: React.PropTypes.string,
};
Button.defaultProps = {
disabled: false, // 是否禁用
type: 'default', // 样式主题:default / primary / warn
mini: false, // 大小是否是 mini
plain: false, // 是否是边框按钮
text: '', // 按钮文字
href: '', // 跳转链接
loading: false, // 是否显示loading
actionType:'button', // 按钮类型 type=buttton / type=reset
};
export default Button;
|
package br.com.digidev.messenger4j.setup;
/**
* @author <NAME>
*/
public enum CallToActionType {
WEB_URL,
POSTBACK
}
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2020-2022 F4PGA Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# SPDX-License-Identifier: Apache-2.0
import lxml.etree as ET
from enum import Enum
def clog2(x):
"""Ceiling log 2 of x.
>>> clog2(0), clog2(1), clog2(2), clog2(3), clog2(4)
(0, 0, 1, 2, 2)
>>> clog2(5), clog2(6), clog2(7), clog2(8), clog2(9)
(3, 3, 3, 3, 4)
>>> clog2(1 << 31)
31
>>> clog2(1 << 63)
63
>>> clog2(1 << 11)
11
"""
x -= 1
i = 0
while True:
if x <= 0:
break
x = x >> 1
i += 1
return i
def add_metadata(tag, mtype, msubtype):
meta_root = ET.SubElement(tag, 'metadata')
meta_type = ET.SubElement(meta_root, 'meta', {'name': 'type'})
meta_type.text = mtype
meta_subtype = ET.SubElement(meta_root, 'meta', {'name': 'subtype'})
meta_subtype.text = msubtype
return meta_root
class MuxType(Enum):
LOGIC = 'BEL_MX'
ROUTING = 'BEL_RX'
class MuxPinType(Enum):
INPUT = 'i'
SELECT = 's'
OUTPUT = 'o'
def verilog(self):
if self in (self.INPUT, self.SELECT):
return "input wire"
elif self == self.OUTPUT:
return "output wire"
else:
raise TypeError(
"Can't convert {} into verilog definition.".format(self))
def direction(self):
if self in (self.INPUT, self.SELECT):
return "input"
elif self == self.OUTPUT:
return "output"
else:
raise TypeError(
"Can't convert {} into verilog definition.".format(self))
def __str__(self):
return self.value
class ModulePort(object):
def __init__(self, pin_type, name, width, index, data_width=1):
self.name = name
self.pin_type = pin_type
self.width = width
self.index = index
self.data_width = data_width
def getDefinition(self):
if self.width == 1:
if self.data_width is not None and self.data_width > 1:
return '\t%s [%d:0] %s;\n' % (
self.pin_type.verilog(), self.data_width - 1, self.name)
else:
return '\t%s %s;\n' % (self.pin_type.verilog(), self.name)
else:
return '\t%s %s %s;\n' % (
self.pin_type.verilog(), self.index, self.name)
def pb_type_xml(mux_type, mux_name, pins, subckt=None, num_pb=1, comment=""):
"""Generate <pb_type> XML for a mux.
Parameters
----------
mux_type: MuxType
Type of mux to create.
mux_name: str
Name of the mux.
pins: [(MuxPinType, str, int, int),]
List of tuples which contain (pin type, pin name, port width, index)
subckt: str
Name of the blif_model for the mux. Only valid when mux_type ==
MuxType.LOGIC.
num_pb: int
Value for the num_pb value. Defaults to 1.
comment: str
Optional comment for the mux.
Returns
-------
xml.etree.ElementTree
pb_type.xml for requested mux
"""
assert isinstance(comment, str), "{} {}".format(
type(comment), repr(comment))
if mux_type not in (MuxType.LOGIC, MuxType.ROUTING):
assert False, "Unknown type {}".format(mux_type)
pb_type_xml = ET.Element(
'pb_type', {
'name': mux_name,
'num_pb': str(num_pb),
})
if mux_type == MuxType.LOGIC:
add_metadata(pb_type_xml, 'bel', 'mux')
else:
add_metadata(pb_type_xml, 'bel', 'routing')
if mux_type == MuxType.LOGIC:
model = ET.SubElement(pb_type_xml, "blif_model")
model.text = '.subckt {}'.format(subckt)
else:
assert not subckt, "Provided subckt={} for non-logic mux!".format(
subckt)
if comment is not None:
pb_type_xml.append(ET.Comment(comment))
for port in pins:
# assert port.index < port.width, (
# "Pin index {} >= width {} for pin {} {}".format(
# port.index, port.width, port.name, port.pin_type
# )
# )
if mux_type == MuxType.ROUTING and port.pin_type == MuxPinType.SELECT:
continue
assert port.width == 1 or port.data_width == 1, (
'Only one of width(%d) or data_width(%d) may > 1 for pin %s' %
(port.width, port.data_width, port.name))
if port.width == 1 and port.data_width > 1:
num_pins = port.data_width
else:
num_pins = port.width
mux = ET.SubElement(
pb_type_xml,
port.pin_type.direction(),
{
'name': port.name,
'num_pins': str(num_pins)
},
)
if mux_type == MuxType.LOGIC:
for inport in pins:
if inport.pin_type not in (MuxPinType.INPUT, MuxPinType.SELECT):
continue
for outport in pins:
if outport.pin_type not in (MuxPinType.OUTPUT, ):
continue
if inport.name.startswith('I'):
delay_inport = inport.name[1]
else:
# if it is not IX it must be S
delay_inport = "S0"
# XXX: temporary workaroud
if mux_name == "F6MUX":
maxdel = "10e-12"
else:
maxdel = "{{iopath_{}_OUT}}".format(delay_inport)
ET.SubElement(
pb_type_xml,
'delay_constant',
{
'max': maxdel,
'in_port': "%s" % inport.name,
'out_port': "%s" % outport.name,
},
)
elif mux_type == MuxType.ROUTING:
interconnect = ET.SubElement(pb_type_xml, 'interconnect')
inputs = [
"{}.{}".format(mux_name, port.name)
for port in pins
if port.pin_type in (MuxPinType.INPUT, )
]
outputs = [
"{}.{}".format(mux_name, port.name)
for port in pins
if port.pin_type in (MuxPinType.OUTPUT, )
]
assert len(outputs) == 1
mux = ET.SubElement(
interconnect,
'mux',
{
'name': '%s' % mux_name,
'input': " ".join(inputs),
'output': outputs[0],
},
)
meta_root = add_metadata(mux, 'bel', 'routing')
meta_fasm_mux = ET.SubElement(meta_root, 'meta', {'key': 'fasm_mux'})
meta_fasm_mux.text = "\n".join(
[""] + ["{0} = {0}".format(i) for i in inputs] + [""])
return pb_type_xml
if __name__ == "__main__":
import doctest
failure_count, test_count = doctest.testmod()
assert test_count > 0
assert failure_count == 0, "Doctests failed!"
|
<filename>client/llb/state_test.go
package llb
import (
"context"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestStateMeta(t *testing.T) {
t.Parallel()
s := Image("foo")
s = s.AddEnv("BAR", "abc").Dir("/foo/bar")
v, ok := getEnvHelper(t, s, "BAR")
assert.True(t, ok)
assert.Equal(t, "abc", v)
assert.Equal(t, "/foo/bar", getDirHelper(t, s))
s2 := Image("foo2")
s2 = s2.AddEnv("BAZ", "def").Reset(s)
_, ok = getEnvHelper(t, s2, "BAZ")
assert.False(t, ok)
v, ok = getEnvHelper(t, s2, "BAR")
assert.True(t, ok)
assert.Equal(t, "abc", v)
}
func TestFormattingPatterns(t *testing.T) {
t.Parallel()
s := Image("foo")
s = s.AddEnv("FOO", "ab%sc").Dir("/foo/bar%d")
v, ok := getEnvHelper(t, s, "FOO")
assert.True(t, ok)
assert.Equal(t, "ab%sc", v)
assert.Equal(t, "/foo/bar%d", getDirHelper(t, s))
s2 := Image("foo")
s2 = s2.AddEnvf("FOO", "ab%sc", "__").Dirf("/foo/bar%d", 1)
v, ok = getEnvHelper(t, s2, "FOO")
assert.True(t, ok)
assert.Equal(t, "ab__c", v)
assert.Equal(t, "/foo/bar1", getDirHelper(t, s2))
}
func getEnvHelper(t *testing.T, s State, k string) (string, bool) {
t.Helper()
v, ok, err := s.GetEnv(context.TODO(), k)
require.NoError(t, err)
return v, ok
}
|
<gh_stars>0
package org.glamey.training.codes.leetcode;
/**
* 给定字符串 abc\bd\bghi ,删除字符\b前边的一个字符,如果\b出现多次,统一按照此逻辑执行 <p>
* <p>
* 解决方法:<p>
* <p>
* 1、将字符串转化为char来操作,声明结果char[]、非removeStr的指针;非removeStr放入char[],指针+1,removeStr不放入char[]中,指针-1,通过后续的值进行覆盖操作即可。<p>
* 2、利用StringBuffer的append,deleteCharAt。<p>
* 3、通过递归的方式实现。
*
* @author zhouyang.zhou. 2017.08.30.17.
*/
public class RemoveSpecialCharacter {
private static final char removeStr = '\b';
public static String remove_1(String target) {
if (target == null || "".equals(target)) {
return target;
}
int position = 0;
char[] result = new char[target.length()];
for (int i = 0; i < target.length(); i++) {
char current = target.charAt(i);
if (current != removeStr) {
result[position++] = current;
} else {
if (position > 0) {
position--;
}
}
}
return String.valueOf(result, 0, position);
}
public static String remove_2(String target) {
if (target == null || "".equals(target)) {
return target;
}
StringBuilder builder = new StringBuilder(target.length());
int count = 0;
for (int i = 0; i < target.length(); i++) {
char current = target.charAt(i);
if (current != removeStr) {
builder.append(current);
count++;
} else {
if (count > 0) {
builder.deleteCharAt(count - 1);
count--;
}
}
}
return builder.toString();
}
public static String remove_3(String target) {
if (target == null || "".equals(target)) {
return target;
}
return null;
}
public static void main(String[] args) {
//String target = "abc\b\bd\b\bghi";
String target = "abc\bd\bghi";
System.out.println(remove_1(target));
System.out.println(remove_2(target));
}
}
|
def replace_string(old, new, s):
return s.replace(old, new)
input_string = "I love Python programming language"
new_string = replace_string("Python", "JavaScript", input_string)
print(new_string)
# Output: I love JavaScript programming language |
<filename>components/application-specific/LanguageSwitcher.js
import PropTypes from 'prop-types'
import React from 'react'
import withLang from '../../services/lang/withLang'
const LanguageSwitcher = ({ lang, setLang }) => {
return (
<nav className="govuk-language-select" aria-label="Language switcher">
<ul className="govuk-language-select__list">
{lang === 'en' && <li className="govuk-language-select__list-item">
<span aria-current="true">English</span>
</li>}
{lang !== 'en' && <li className="govuk-language-select__list-item">
<a href="#lang-en" hrefLang="en" lang="en" rel="alternate" className="govuk-link"
data-journey-click="link - click:lang-select:English" onClick={() => setLang('en')}>
<span className="govuk-visually-hidden">Change the language to English</span>
<span aria-hidden="true">English</span>
</a>
</li>}
{lang === 'cy' && <li className="govuk-language-select__list-item">
<span aria-current="true">Cymraeg</span>
</li>}
{lang !== 'cy' && <li className="govuk-language-select__list-item">
<a href="#lang-cy" hrefLang="cy" lang="cy" rel="alternate" className="govuk-link"
data-journey-click="link - click:lang-select:Cymraeg" onClick={() => setLang('cy')}>
<span className="govuk-visually-hidden">Newid yr iaith ir Gymraeg</span>
<span aria-hidden="true">Cymraeg</span>
</a>
</li>}
</ul>
</nav>
)
}
export default withLang(LanguageSwitcher, { withSetter: true })
LanguageSwitcher.propTypes = {
lang: PropTypes.string.isRequired,
setLang: PropTypes.func.isRequired
}
|
/*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.sdk.runners;
import com.google.cloud.dataflow.sdk.Pipeline;
import com.google.cloud.dataflow.sdk.PipelineResult.State;
import com.google.cloud.dataflow.sdk.annotations.Experimental;
import com.google.cloud.dataflow.sdk.options.BlockingDataflowPipelineOptions;
import com.google.cloud.dataflow.sdk.options.PipelineOptions;
import com.google.cloud.dataflow.sdk.options.PipelineOptionsValidator;
import com.google.cloud.dataflow.sdk.transforms.PTransform;
import com.google.cloud.dataflow.sdk.util.MonitoringUtil;
import com.google.cloud.dataflow.sdk.values.PInput;
import com.google.cloud.dataflow.sdk.values.POutput;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import javax.annotation.Nullable;
/**
* A PipelineRunner that's like {@link DataflowPipelineRunner}
* but that waits for the launched job to finish.
*
* <p> Prints out job status updates and console messages while it waits.
*
* <p> Returns the final job state, or throws an exception if the job
* fails or cannot be monitored.
*/
public class BlockingDataflowPipelineRunner extends
PipelineRunner<DataflowPipelineJob> {
private static final Logger LOG = LoggerFactory.getLogger(BlockingDataflowPipelineRunner.class);
// Defaults to an infinite wait period.
// TODO: make this configurable after removal of option map.
private static final long BUILTIN_JOB_TIMEOUT_SEC = -1L;
private DataflowPipelineRunner dataflowPipelineRunner = null;
private MonitoringUtil.JobMessagesHandler jobMessagesHandler;
protected BlockingDataflowPipelineRunner(
DataflowPipelineRunner internalRunner,
MonitoringUtil.JobMessagesHandler jobMessagesHandler) {
this.dataflowPipelineRunner = internalRunner;
this.jobMessagesHandler = jobMessagesHandler;
}
/**
* Constructs a runner from the provided options.
*/
public static BlockingDataflowPipelineRunner fromOptions(
PipelineOptions options) {
BlockingDataflowPipelineOptions dataflowOptions =
PipelineOptionsValidator.validate(BlockingDataflowPipelineOptions.class, options);
DataflowPipelineRunner dataflowPipelineRunner =
DataflowPipelineRunner.fromOptions(dataflowOptions);
return new BlockingDataflowPipelineRunner(dataflowPipelineRunner,
new MonitoringUtil.PrintHandler(dataflowOptions.getJobMessageOutput()));
}
@Override
public DataflowPipelineJob run(Pipeline p) {
DataflowPipelineJob job = dataflowPipelineRunner.run(p);
@Nullable
State result;
try {
result = job.waitToFinish(
BUILTIN_JOB_TIMEOUT_SEC, TimeUnit.SECONDS, jobMessagesHandler);
} catch (IOException | InterruptedException ex) {
throw new RuntimeException("Exception caught during job execution", ex);
}
if (result == null) {
throw new RuntimeException("No result provided: "
+ "possible error requesting job status.");
}
LOG.info("Job finished with status {}", result);
if (result.isTerminal()) {
return job;
}
// TODO: introduce an exception that can wrap a JobState,
// so that detailed error information can be retrieved.
throw new RuntimeException(
"Failed to wait for the job to finish. Returned result: " + result);
}
@Override
public <Output extends POutput, Input extends PInput> Output apply(
PTransform<Input, Output> transform, Input input) {
return dataflowPipelineRunner.apply(transform, input);
}
/**
* Sets callbacks to invoke during execution see {@code DataflowPipelineRunnerHooks}.
*/
@Experimental
public void setHooks(DataflowPipelineRunnerHooks hooks) {
this.dataflowPipelineRunner.setHooks(hooks);
}
}
|
<filename>api/http/router.go
// Copyright 2021 Northern.tech AS
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package http
import (
"net/http"
"os"
"github.com/gin-gonic/gin"
"github.com/mendersoftware/deviceconfig/app"
"github.com/mendersoftware/go-lib-micro/accesslog"
"github.com/mendersoftware/go-lib-micro/identity"
"github.com/mendersoftware/go-lib-micro/requestid"
)
// API URL used by the HTTP router
const (
URIDevices = "/api/devices/v1/deviceconfig"
URIInternal = "/api/internal/v1/deviceconfig"
URIManagement = "/api/management/v1/deviceconfig"
URITenants = "/tenants"
URITenantDevices = "/tenants/:tenant_id/devices"
URITenantDevice = "/tenants/:tenant_id/devices/:device_id"
URIConfiguration = "/configurations/device/:device_id"
URIDeployConfiguration = "/configurations/device/:device_id/deploy"
URIDeviceConfiguration = "/configuration"
URIAlive = "/alive"
URIHealth = "/health"
)
func init() {
if mode := os.Getenv(gin.EnvGinMode); mode != "" {
gin.SetMode(mode)
} else {
gin.SetMode(gin.ReleaseMode)
}
gin.DisableConsoleColor()
}
// NewRouter initializes a new gin.Engine as a http.Handler
func NewRouter(app app.App) http.Handler {
router := gin.New()
// accesslog provides logging of http responses and recovery on panic.
router.Use(accesslog.Middleware())
// requestid attaches X-Men-Requestid header to context
router.Use(requestid.Middleware())
intrnlAPI := NewInternalAPI(app)
intrnlGrp := router.Group(URIInternal)
intrnlGrp.GET(URIAlive, intrnlAPI.Alive)
intrnlGrp.GET(URIHealth, intrnlAPI.Health)
intrnlGrp.POST(URITenants, intrnlAPI.ProvisionTenant)
intrnlGrp.POST(URITenantDevices, intrnlAPI.ProvisionDevice)
intrnlGrp.DELETE(URITenantDevice, intrnlAPI.DecommissionDevice)
mgmtAPI := NewManagementAPI(app)
mgmtGrp := router.Group(URIManagement)
// identity middleware for collecting JWT claims into request Context.
mgmtGrp.Use(identity.Middleware())
mgmtGrp.GET(URIConfiguration, mgmtAPI.GetConfiguration)
mgmtGrp.PUT(URIConfiguration, mgmtAPI.SetConfiguration)
mgmtGrp.POST(URIDeployConfiguration, mgmtAPI.DeployConfiguration)
devAPI := NewDevicesAPI(app)
devGrp := router.Group(URIDevices)
devGrp.Use(identity.Middleware())
devGrp.GET(URIDeviceConfiguration, devAPI.GetConfiguration)
devGrp.PUT(URIDeviceConfiguration, devAPI.SetConfiguration)
return router
}
|
<reponame>alcatrazEscapee/WorleyCaves
package fluke.worleycaves.proxy;
public class ClientProxy extends CommonProxy
{
}
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-SS-N/7-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-SS-N/7-512+512+512-common-1 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_common_words_first_third_full --eval_function last_element_eval |
# Copyright 2021 The cert-manager Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# generated by ./hack/latest-kind-images.sh
KIND_IMAGE_SHA_K8S_118=sha256:f4bcc97a0ad6e7abaf3f643d890add7efe6ee4ab90baeb374b4f41a4c95567eb
KIND_IMAGE_SHA_K8S_119=sha256:a70639454e97a4b733f9d9b67e12c01f6b0297449d5b9cbbef87473458e26dca
KIND_IMAGE_SHA_K8S_120=sha256:cbeaf907fc78ac97ce7b625e4bf0de16e3ea725daf6b04f930bd14c67c671ff9
KIND_IMAGE_SHA_K8S_121=sha256:0fda882e43d425622f045b492f8bd83c2e0b4984fc03e2e05ec101ca1a685fb7
KIND_IMAGE_SHA_K8S_122=sha256:9af3ab3e36fb59890b2fb3a18000930b4792d62d10d2060e0ca701e2c392d487
|
<reponame>PhuongLe/clean-starter
import { Key } from "../../domain/entities/Key";
import { User } from "../../domain/entities/User";
export interface IUserRepository {
updateUserChallenge(user: User, challenge: string): Promise<void>;
loadByEmail(email: string): Promise<User>;
addKeyToUser(user: User, key: Key): Promise<void>;
loadByChallenge(challenge: string): Promise<User>;
loadByEmailAndPassword(email: string, password: string): Promise<User>
loadById(id: string): Promise<User>
create(email: string, challenge: string): Promise<User>
} |
# Generate lambda zip file
cd backend
mkdir tmp
pip install --target tmp -r requirements/production.txt
cd tmp
zip -r9 ../../aws-deployment/payload.zip .
cd ..
zip -r9 ../aws-deployment/payload.zip app main.py
rm -rf tmp
# Build Angular for production
cd ../frontend
# Set Angular environment file from environment variables
sed -i '' "s|apiUrl.*|apiUrl: '$API_URL'|" frontend/src/environments/environment.prod.ts
ng build
# Terraform update
cd ../aws-deployment
terraform apply -var-file="var.tfvars"
|
<reponame>dmb-2222/we-are-the-future<gh_stars>0
const Sequelize = require("sequelize");
require("dotenv").config();
const sequelize = new Sequelize({
host: process.env.HOST, // host of the database
database: process.env.DATABASE, // name of the DB to connect
dialect: "postgres", // DB dialect, one of 'mysql' | 'mariadb' | 'postgres' | 'mssql'
username: process.env.DATABASE, // DB user which will be used for connection to DB
password: <PASSWORD>, // DB user's password
});
module.exports = sequelize; |
<reponame>Annabelle1024/WYNewws<filename>WYNews/WYNews/Tools/Additions/NSArray+YJAddition.h
//
// NSArray+YJAddition.h
//
// Created by Annabelle on 16/5/28.
// Copyright © 2016年 annabelle. All rights reserved.
//
#import <Foundation/Foundation.h>
@interface NSArray (YJAddition)
/// 从 plist 文件创建指定 clsName 对象数组
///
/// @param plistName plist 文件名
/// @param clsName 要创建模型的类名
///
/// @return clsName 的对象数组
+ (NSArray *)yj_objectListWithPlistName:(NSString *)plistName clsName:(NSString *)clsName;
@end
|
// generated by genversion
export const version = '3.8.12';
|
# -*- coding: utf-8 -*-
import unittest
import json
import logging
from mock import patch
with patch('os.path.exists') as mock_exists:
mock_exists.return_value = True
import app as service_alti
from tests import create_json
from tests.unit_tests import ENDPOINT_FOR_JSON_PROFILE, ENDPOINT_FOR_CSV_PROFILE,\
LINESTRING_VALID_LV03, POINT_1_LV03, POINT_2_LV03, POINT_3_LV03, LINESTRING_WRONG_SHAPE,\
LINESTRING_SMALL_LINE_LV03, LINESTRING_MISSPELLED_SHAPE, LINESTRING_VALID_LV95,\
LINESTRING_SMALL_LINE_LV95, DEFAULT_HEADERS, prepare_mock
from app.helpers.profile_helpers import PROFILE_MAX_AMOUNT_POINTS, PROFILE_DEFAULT_AMOUNT_POINTS
logger = logging.getLogger(__name__)
class TestProfile(unittest.TestCase):
# pylint: disable=too-many-public-methods
def setUp(self) -> None:
service_alti.app.config['TESTING'] = True
self.test_instance = service_alti.app.test_client()
self.headers = DEFAULT_HEADERS
def __check_response(self, response, expected_status=200):
self.assertIsNotNone(response)
self.assertEqual(
response.status_code, expected_status, msg="%s" % response.get_data(as_text=True)
)
def prepare_mock_and_test_post(self, mock_georaster_utils, body, expected_status):
prepare_mock(mock_georaster_utils)
response = self.post_with_body(body)
self.__check_response(response, expected_status)
return response
def prepare_mock_and_test_csv_profile(self, mock_georaster_utils, params, expected_status):
prepare_mock(mock_georaster_utils)
response = self.get_csv_with_params(params)
self.__check_response(response, expected_status)
return response
def get_json_profile(self, params, expected_status=200):
# pylint: disable=broad-except
try:
response = self.test_instance.get(
ENDPOINT_FOR_JSON_PROFILE, query_string=params, headers=self.headers
)
self.__check_response(response, expected_status)
return response
except Exception as e:
logger.exception(e)
self.fail('Call to test_instance failed')
def get_csv_with_params(self, params):
return self.test_instance.get(
ENDPOINT_FOR_CSV_PROFILE, query_string=params, headers=self.headers
)
def post_with_body(self, body):
return self.test_instance.post(ENDPOINT_FOR_JSON_PROFILE, data=body, headers=self.headers)
def prepare_mock_and_test_json_profile(self, mock_georaster_utils, params, expected_status):
prepare_mock(mock_georaster_utils)
return self.get_json_profile(params=params, expected_status=expected_status)
def verify_point_is_present(self, response, point, msg="point not present"):
self.assertEqual(response.content_type, "application/json")
if len(point) != 2:
self.fail("Point must be a [x,y] point")
present = False
for profile_point in response.json:
if point[0] == profile_point['easting'] and point[1] == profile_point['northing']:
present = True
if not present:
self.fail(msg)
def assert_response_contains(self, response, content):
self.assertTrue(
content in response.get_data(as_text=True),
msg="Response doesn't contain '%s' : '%s'" % (content, response.get_data(as_text=True))
)
@patch('app.routes.georaster_utils')
def test_do_not_fail_when_no_origin(self, mock_georaster_utils):
self.headers = {}
self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils,
params={
'sr': 2056, 'geom': create_json(4, 2056)
},
expected_status=200
)
@patch('app.routes.georaster_utils')
def test_profile_invalid_sr_json_valid(self, mock_georaster_utils):
resp = self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils,
params={
'sr': 666, 'geom': create_json(3, 21781)
},
expected_status=400
)
self.assert_response_contains(
resp,
"Please provide a valid number for the spatial reference "
"system model 21781 or 2056"
)
@patch('app.routes.georaster_utils')
def test_profile_lv95_json_valid(self, mock_georaster_utils):
self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils,
params={
'sr': 2056, 'geom': create_json(4, 2056)
},
expected_status=200
)
@patch('app.routes.georaster_utils')
def test_profile_lv03_json_valid(self, mock_georaster_utils):
resp = self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils,
params={
'geom': LINESTRING_VALID_LV03, 'smart_filling': True, 'offset': 0
},
expected_status=200
)
self.assertEqual(resp.content_type, 'application/json')
first_point = resp.json[0]
self.assertEqual(first_point['dist'], 0)
self.assertEqual(first_point['alts']['COMB'], 104.0)
self.assertEqual(first_point['easting'], 630000)
self.assertEqual(first_point['northing'], 170000)
second_point = resp.json[1]
self.assertEqual(second_point['dist'], 40)
self.assertEqual(second_point['alts']['COMB'], 123.5)
self.assertEqual(second_point['easting'], 630032.0)
self.assertEqual(second_point['northing'], 170024.0)
self.verify_point_is_present(resp, POINT_1_LV03)
self.verify_point_is_present(resp, POINT_2_LV03)
self.verify_point_is_present(resp, POINT_3_LV03)
@patch('app.routes.georaster_utils')
def test_profile_lv03_layers_post(self, mock_georaster_utils):
params = create_json(4, 21781)
self.headers['Content-Type'] = 'application/json'
resp = self.prepare_mock_and_test_post(
mock_georaster_utils=mock_georaster_utils, body=json.dumps(params), expected_status=200
)
self.assertEqual(resp.content_type, 'application/json')
@patch('app.routes.georaster_utils')
def test_profile_lv03_layers_none(self, mock_georaster_utils):
resp = self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils,
params={'geom': '{"type":"LineString","coordinates":[[0,0],[0,0],[0,0]]}'},
expected_status=400
)
self.assert_response_contains(resp, "No 'sr' given and cannot be guessed from 'geom'")
def test_profile_lv03_layers_none2(self):
resp = self.get_json_profile(
params={
'geom':
'{"type":"LineString","coordinates":[[550050,-206550],[556950,204150],'
'[561050,207950]]}'
},
expected_status=400
)
self.assert_response_contains(resp, "No 'sr' given and cannot be guessed from 'geom'")
@patch('app.routes.georaster_utils')
def test_profile_lv03_json_with_callback_valid(self, mock_georaster_utils):
resp = self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils,
params={
'geom': create_json(4, 21781), 'callback': 'cb_'
},
expected_status=200
)
self.assertEqual(resp.content_type, 'application/javascript')
self.assert_response_contains(resp, 'cb_([')
@patch('app.routes.georaster_utils')
def test_profile_lv03_json_missing_geom(self, mock_georaster_utils):
resp = self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils,
params={
'sr': 21781, 'geom': None
},
expected_status=400
)
self.assert_response_contains(resp, 'No \'geom\' given')
@patch('app.routes.georaster_utils')
def test_profile_lv03_json_wrong_geom(self, mock_georaster_utils):
resp = self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils, params={'geom': 'toto'}, expected_status=400
)
self.assert_response_contains(resp, 'Error loading geometry in JSON string')
@patch('app.routes.georaster_utils')
def test_profile_lv03_json_wrong_shape(self, mock_georaster_utils):
resp = self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils,
params={'geom': LINESTRING_WRONG_SHAPE},
expected_status=400
)
self.assert_response_contains(resp, 'Error converting JSON to Shape')
@patch('app.routes.georaster_utils')
def test_profile_lv03_json_nb_points(self, mock_georaster_utils):
# as 150 is too much for this profile (distance between points will be smaller than 2m
# resolution of the altitude model), the service will return 203 and a smaller amount of
# points
resp = self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils,
params={
'geom': LINESTRING_SMALL_LINE_LV03, 'smart_filling': True, 'nb_points': '150'
},
expected_status=203
)
self.assertEqual(resp.content_type, 'application/json')
self.assertNotEqual(len(resp.json), 150)
@patch('app.routes.georaster_utils')
def test_profile_lv03_json_simplify_linestring(self, mock_georaster_utils):
resp = self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils,
params={
'geom': create_json(4, 21781), 'nb_points': '2'
},
expected_status=200
)
self.assertEqual(resp.content_type, 'application/json')
@patch('app.routes.georaster_utils')
def test_profile_lv03_json_nb_points_smart_filling(self, mock_georaster_utils):
resp = self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils,
params={
'geom': LINESTRING_SMALL_LINE_LV03, 'smart_filling': True, 'nbPoints': '150'
},
expected_status=203
)
self.assertEqual(resp.content_type, 'application/json')
@patch('app.routes.georaster_utils')
def test_profile_lv03_json_nb_points_wrong(self, mock_georaster_utils):
resp = self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils,
params={
'geom': create_json(4, 21781), 'nb_points': 'toto'
},
expected_status=400
)
self.assert_response_contains(
resp, "Please provide a numerical value for the parameter "
"'NbPoints'/'nb_points'"
)
@patch('app.routes.georaster_utils')
def test_profile_lv03_json_nb_points_too_much(self, mock_georaster_utils):
resp = self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils,
params={
'geom': create_json(4, 21781), 'nb_points': PROFILE_MAX_AMOUNT_POINTS + 1
},
expected_status=400
)
self.assert_response_contains(
resp, "Please provide a numerical value for the parameter "
"'NbPoints'/'nb_points'"
)
@patch('app.routes.georaster_utils')
def test_profile_lv03_json_default_nb_points(self, mock_georaster_utils):
resp = self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils,
params={'geom': LINESTRING_VALID_LV03},
expected_status=200
)
self.assertGreaterEqual(len(resp.json), PROFILE_DEFAULT_AMOUNT_POINTS)
self.assertGreaterEqual(PROFILE_MAX_AMOUNT_POINTS, len(resp.json))
@patch('app.routes.georaster_utils')
def test_profile_lv03_csv_valid(self, mock_georaster_utils):
resp = self.prepare_mock_and_test_csv_profile(
mock_georaster_utils=mock_georaster_utils,
params={'geom': create_json(4, 21781)},
expected_status=200
)
self.assertEqual(resp.content_type, 'text/csv')
@patch('app.routes.georaster_utils')
def test_profile_lv03_cvs_wrong_geom(self, mock_georaster_utils):
resp = self.prepare_mock_and_test_csv_profile(
mock_georaster_utils=mock_georaster_utils, params={'geom': 'toto'}, expected_status=400
)
self.assert_response_contains(resp, 'Error loading geometry in JSON string')
@patch('app.routes.georaster_utils')
def test_profile_lv03_csv_misspelled_shape(self, mock_georaster_utils):
resp = self.prepare_mock_and_test_csv_profile(
mock_georaster_utils=mock_georaster_utils,
params={'geom': LINESTRING_MISSPELLED_SHAPE},
expected_status=400
)
self.assert_response_contains(resp, 'Error loading geometry in JSON string')
resp = self.prepare_mock_and_test_csv_profile(
mock_georaster_utils=mock_georaster_utils,
params={'geom': LINESTRING_WRONG_SHAPE},
expected_status=400
)
self.assert_response_contains(resp, 'Error converting JSON to Shape')
@patch('app.routes.georaster_utils')
def test_profile_lv03_json_invalid_linestring(self, mock_georaster_utils):
resp = self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils,
params={'geom': '{"type":"LineString","coordinates":[[550050,206550]]}'},
expected_status=400
)
self.assert_response_contains(resp, 'Error converting JSON to Shape')
@patch('app.routes.georaster_utils')
def test_profile_lv03_json_offset(self, mock_georaster_utils):
resp = self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils,
params={
'geom': LINESTRING_VALID_LV03, 'offset': '1'
},
expected_status=200
)
self.assertEqual(resp.content_type, 'application/json')
@patch('app.routes.georaster_utils')
def test_profile_lv03_json_invalid_offset(self, mock_georaster_utils):
resp = self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils,
params={
'geom': LINESTRING_VALID_LV03, 'offset': 'asdf'
},
expected_status=400
)
self.assert_response_contains(
resp, "Please provide a numerical value for the parameter 'offset'"
)
@patch('app.routes.georaster_utils')
def test_profile_entity_too_large(self, mock_georaster_utils):
resp = self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils,
params={
'geom': create_json(7000), 'sr': '2056'
},
expected_status=413
)
self.assert_response_contains(resp, 'Geometry contains too many points')
@patch('app.routes.georaster_utils')
def test_profile_entity_too_large_post(self, mock_georaster_utils):
params = create_json(7000)
self.headers['Content-Type'] = 'application/json'
resp = self.prepare_mock_and_test_post(
mock_georaster_utils=mock_georaster_utils, body=json.dumps(params), expected_status=413
)
self.assert_response_contains(resp, 'Geometry contains too many points')
@patch('app.routes.georaster_utils')
def test_profile_lv95(self, mock_georaster_utils):
resp = self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils,
params={'geom': LINESTRING_VALID_LV95},
expected_status=200
)
self.assertEqual(resp.content_type, 'application/json')
@patch('app.routes.georaster_utils')
def test_profile_lv95_nb_points_exceeds_resolution_meshing(self, mock_georaster_utils):
resp = self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils,
params={
'geom': LINESTRING_SMALL_LINE_LV95, 'smart_filling': True, 'nb_points': 150
},
expected_status=203
)
self.assertEqual(resp.content_type, 'application/json')
self.assertNotEqual(len(resp.json), 150)
@patch('app.routes.georaster_utils')
def test_profile_points_given_in_geom_are_in_profile(self, mock_georaster_utils):
point1, point2, point3, point4, point5, point6, point7 = [2631599.9, 1171895.0], \
[2631960.5, 1171939.7], \
[2632384.3, 1171798.3], \
[2632600.9, 1171525.6], \
[2632633.5, 1171204.0], \
[2632622.1, 1171025.3], \
[2632820.8, 1170741.8]
multipoint_geom = f'{{"type":"LineString","coordinates":[{point1},{point2},{point3},' \
f'{point4},{point5},{point6},{point7}]}}'
resp = self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils,
params={'geom': multipoint_geom},
expected_status=200
)
self.verify_point_is_present(resp, point1, msg="point1 not present")
self.verify_point_is_present(resp, point2, msg="point2 not present")
self.verify_point_is_present(resp, point3, msg="point3 not present")
self.verify_point_is_present(resp, point4, msg="point4 not present")
self.verify_point_is_present(resp, point5, msg="point5 not present")
self.verify_point_is_present(resp, point6, msg="point6 not present")
self.verify_point_is_present(resp, point7, msg="point7 not present")
@patch('app.routes.georaster_utils')
def test_profile_all_old_elevation_models_are_returned(self, mock_georaster_utils):
resp = self.prepare_mock_and_test_json_profile(
mock_georaster_utils=mock_georaster_utils,
params={'geom': LINESTRING_VALID_LV95},
expected_status=200
)
self.assertEqual(resp.content_type, 'application/json')
altitudes = resp.json[0]['alts']
comb_value = altitudes['COMB']
if not altitudes.get('DTM2'):
self.fail("All old elevation_models must be returned in alt for compatibility issue")
if altitudes['DTM2'] != comb_value:
self.fail("All values from all models should be taken from the new COMB layer")
if not altitudes.get('DTM25'):
self.fail("All old elevation_models must be returned in alt for compatibility issue")
if altitudes['DTM25'] != comb_value:
self.fail("All values from all models should be taken from the new COMB layer")
|
#!/bin/bash
antlr Lustre.g4 -visitor -no-listener -Dlanguage=Python3
|
import nltk
from nltk.stem import WordNetLemmatizer
lemmatizer = WordNetLemmatizer()
def tokenize(sentence):
tokens = nltk.word_tokenize(sentence)
lemmatized_tokens = [lemmatizer.lemmatize(word.lower()) for word in tokens]
return lemmatized_tokens
def response(user_input):
# Code to construct user response
# based on Natural Language Processing
return response |
#!/bin/bash
# Archived program command-line for experiment
# Copyright 2017 Xiang Zhang
#
# Usage: bash {this_file} [additional_options]
set -x;
set -e;
LOCATION=models/rakutenfull/charunigram_evaluation;
TRAIN_DATA=data/rakuten/sentiment/full_train_chartoken_shuffle_split_0.txt;
TEST_DATA=data/rakuten/sentiment/full_train_chartoken_shuffle_split_1.txt;
fasttext supervised -input $TRAIN_DATA -output $LOCATION/model_2 -dim 10 -lr 0.1 -wordNgrams 1 -minCount 1 -bucket 10000000 -epoch 2 -thread 10;
fasttext test $LOCATION/model_2.bin $TEST_DATA;
fasttext supervised -input $TRAIN_DATA -output $LOCATION/model_5 -dim 10 -lr 0.1 -wordNgrams 1 -minCount 1 -bucket 10000000 -epoch 5 -thread 10;
fasttext test $LOCATION/model_5.bin $TEST_DATA;
fasttext supervised -input $TRAIN_DATA -output $LOCATION/model_10 -dim 10 -lr 0.1 -wordNgrams 1 -minCount 1 -bucket 10000000 -epoch 10 -thread 10;
fasttext test $LOCATION/model_10.bin $TEST_DATA;
|
import {
FETCH_SUCCESS,
FETCH_FAILURE,
UPDATE_FAILED,
UPDATE_SUCCESS,
} from './constants';
const initialState = {
list: [
{ id: 5, body: 'Aang' },
{ id: 4, body: 'Katara' },
{ id: 3, body: 'Toph' },
{ id: 2, body: 'Sokka' },
{ id: 1, body: 'Appa' },
],
};
export default function(state = initialState, action) {
switch (action.type) {
case FETCH_SUCCESS: {
return {
...state,
list: action.list,
};
}
case FETCH_FAILURE: {
return {
...state,
error: action.error,
};
}
case UPDATE_SUCCESS: {
return {
...state,
added: action.updated,
};
}
case UPDATE_FAILED: {
return {
...state,
error: action.error,
};
}
default:
return state;
}
}
|
import './App.css';
import { Link } from 'react-router-dom';
import Chatbox from './Chatbox';
import FriendList from './FriendList';
function App() {
return (
<div className="App">
<div className="App-header">
<Link to="/friendList">
<button>
Go FriendList
</button>
</Link>
</div>
<div className="App-content">
<Chatbox/>
</div>
</div>
);
}
export default App;
|
<reponame>liubailing/omi
!function() {
'use strict';
function obaa(target, arr, callback) {
var eventPropArr = [];
if (isArray(target)) {
if (0 === target.length) target.__o_ = {
__r_: target,
__p_: '#'
};
mock(target, target);
}
for (var prop in target) if (target.hasOwnProperty(prop)) if (callback) {
if (isArray(arr) && isInArray(arr, prop)) {
eventPropArr.push(prop);
watch(target, prop, null, target);
} else if (isString(arr) && prop === arr) {
eventPropArr.push(prop);
watch(target, prop, null, target);
}
} else {
eventPropArr.push(prop);
watch(target, prop, null, target);
}
if (!target.__c_) target.__c_ = [];
var propChanged = callback ? callback : arr;
target.__c_.push({
all: !callback,
propChanged: propChanged,
eventPropArr: eventPropArr
});
}
function mock(target, root) {
methods.forEach(function(item) {
target[item] = function() {
var old = Array.prototype.slice.call(this, 0);
var result = Array.prototype[item].apply(this, Array.prototype.slice.call(arguments));
if (new RegExp('\\b' + item + '\\b').test(triggerStr)) {
for (var cprop in this) if (this.hasOwnProperty(cprop) && !isFunction(this[cprop])) watch(this, cprop, this.__o_.__p_, root);
onPropertyChanged('Array-' + item, this, old, this, this.__o_.__p_, root);
}
return result;
};
target['pure' + item.substring(0, 1).toUpperCase() + item.substring(1)] = function() {
return Array.prototype[item].apply(this, Array.prototype.slice.call(arguments));
};
});
}
function watch(target, prop, path, root) {
if ('__o_' !== prop) if (!isFunction(target[prop])) {
if (!target.__o_) target.__o_ = {
__r_: root
};
if (void 0 !== path && null !== path) target.__o_.__p_ = path; else target.__o_.__p_ = '#';
var currentValue = target.__o_[prop] = target[prop];
Object.defineProperty(target, prop, {
get: function() {
return this.__o_[prop];
},
set: function(value) {
var old = this.__o_[prop];
this.__o_[prop] = value;
onPropertyChanged(prop, value, old, this, target.__o_.__p_, root);
},
configurable: !0,
enumerable: !0
});
if ('object' == typeof currentValue) {
if (isArray(currentValue)) {
mock(currentValue, root);
if (0 === currentValue.length) {
if (!currentValue.__o_) currentValue.__o_ = {};
if (void 0 !== path && null !== path) currentValue.__o_.__p_ = path + '-' + prop; else currentValue.__o_.__p_ = '#-' + prop;
}
}
for (var cprop in currentValue) if (currentValue.hasOwnProperty(cprop)) watch(currentValue, cprop, target.__o_.__p_ + '-' + prop, root);
}
}
}
function onPropertyChanged(prop, value, oldValue, target, path, root) {
if (value !== oldValue && (!nan(value) || !nan(oldValue)) && root.__c_) {
var rootName = getRootName(prop, path);
for (var i = 0, len = root.__c_.length; i < len; i++) {
var handler = root.__c_[i];
if (handler.all || isInArray(handler.eventPropArr, rootName) || 0 === rootName.indexOf('Array-')) handler.propChanged.call(target, prop, value, oldValue, path);
}
}
if (0 !== prop.indexOf('Array-') && 'object' == typeof value) watch(target, prop, target.__o_.__p_, root);
}
function isFunction(obj) {
return '[object Function]' === Object.prototype.toString.call(obj);
}
function nan(value) {
return 'number' == typeof value && isNaN(value);
}
function isArray(obj) {
return '[object Array]' === Object.prototype.toString.call(obj);
}
function isString(obj) {
return 'string' == typeof obj;
}
function isInArray(arr, item) {
for (var i = arr.length; --i > -1; ) if (item === arr[i]) return !0;
return !1;
}
function getRootName(prop, path) {
if ('#' === path) return prop; else return path.split('-')[1];
}
function getPath(obj, out, name) {
var result = {};
obj.forEach(function(item) {
if ('string' == typeof item) result[item] = !0; else {
var tempPath = item[Object.keys(item)[0]];
if ('string' == typeof tempPath) result[tempPath] = !0; else if ('string' == typeof tempPath[0]) result[tempPath[0]] = !0; else tempPath[0].forEach(function(path) {
return result[path] = !0;
});
}
});
out && (out[name] = result);
return result;
}
function needUpdate(diffResult, updatePath) {
for (var keyA in diffResult) {
if (updatePath[keyA]) return !0;
for (var keyB in updatePath) if (includePath(keyA, keyB)) return !0;
}
return !1;
}
function includePath(pathA, pathB) {
if (0 === pathA.indexOf(pathB)) {
var next = pathA.substr(pathB.length, 1);
if ('[' === next || '.' === next) return !0;
}
return !1;
}
function fixPath(path) {
var mpPath = '';
var arr = path.replace('#-', '').split('-');
arr.forEach(function(item, index) {
if (index) if (isNaN(Number(item))) mpPath += '.' + item; else mpPath += '[' + item + ']'; else mpPath += item;
});
return mpPath;
}
function $(options) {
var beforeCreate = options.beforeCreate;
var destroyed = options.destroyed;
var use = options.use;
var useSelf = options.useSelf;
options.computed = options.computed || {};
if (options.store) reset(options.store);
options.beforeCreate = function() {
this.$store = store;
if (isMultiStore) {
if (use) {
var updatePath = {};
for (var storeName in use) {
getPath(use[storeName], updatePath, storeName);
store[storeName].components.push(this);
}
this.__$updatePath_ = updatePath;
}
if (useSelf) {
var updateSelfPath = {};
for (var _storeName in useSelf) {
getPath(useSelf[_storeName], updateSelfPath, _storeName);
store[_storeName].updateSelfComponents.push(this);
}
this.__$updateSelfPath_ = updateSelfPath;
}
} else {
if (use) {
this.__$updatePath_ = getPath(use);
store.components.push(this);
}
if (useSelf) {
this.__$updateSelfPath_ = getPath(useSelf);
store.updateSelfComponents.push(this);
}
}
beforeCreate && beforeCreate.apply(this, arguments);
};
options.destroyed = function() {
if (isMultiStore) {
for (var key in store) if ('replaceState' !== key) {
removeItem(this, store[key].components);
removeItem(this, store[key].updateSelfComponents);
}
} else {
removeItem(this, store.updateSelfComponents);
removeItem(this, store.components);
}
destroyed && destroyed.apply(this, arguments);
};
options.computed.state = function() {
if (isMultiStore) {
var state = {};
Object.keys(store).forEach(function(k) {
state[k] = store[k].data;
});
return state;
}
return store.data;
};
options.computed.store = function() {
return store;
};
return options;
}
function recUpdate(root) {
root.$forceUpdate();
root.$children.forEach(function(child) {
recUpdate(child);
});
}
function observe(store, storeName) {
store.components = [];
store.updateSelfComponents = [];
if ('undefined' != typeof window) obaa(store.data, function(prop, val, old, path) {
var patch = {};
patch[fixPath(path + '-' + prop)] = !0;
store.components.forEach(function(component) {
var p = component.__$updatePath_;
if (storeName) {
if (p && p[storeName] && needUpdate(patch, p[storeName])) recUpdate(component);
} else if (p && needUpdate(patch, p)) recUpdate(component);
});
store.updateSelfComponents.forEach(function(component) {
var sp = component.__$updateSelfPath_;
if (storeName) {
if (sp && sp[storeName] && needUpdate(patch, sp[storeName])) component.$forceUpdate();
} else if (sp && needUpdate(patch, sp)) component.$forceUpdate();
});
});
}
function removeItem(item, arr) {
for (var i = 0, len = arr.length; i < len; i++) if (arr[i] === item) {
arr.splice(i, 1);
break;
}
}
function render(app, renderTo, initStore, options) {
if (Vue) {
initStore = initStore || store;
reset(initStore);
return new Vue(Object.assign({
render: function(h) {
return h(app);
}
}, options, initStore ? {
store: initStore
} : {})).$mount(renderTo);
} else if ('production' !== process.env.NODE_ENV) console.error('[Omiv] has not been installed yet. Vue.use(Omiv) should be called first.');
}
function reset(s) {
if (s) {
store = s;
if (store.data) {
isMultiStore = !1;
observe(store);
} else {
isMultiStore = !0;
for (var key in store) if (store[key].data) observe(store[key], key);
}
} else store = void 0;
}
function install(_Vue) {
Vue = _Vue;
applyMixin(Vue);
}
function applyMixin(Vue) {
function omivInit() {
var _this = this;
var options = this.$options;
var use = options.use;
var useSelf = options.useSelf;
if (options.store) this.$store = 'function' == typeof options.store ? options.store() : options.store; else if (options.parent && options.parent.$store) this.$store = options.parent.$store;
if (this.$store && !this.$store.replaceState) this.$store.replaceState = function() {
var store = arguments.length > 0 && void 0 !== arguments[0] ? arguments[0] : {};
Object.keys(store).forEach(function(key) {
if (!key.startsWith('_')) _this.$store.data[key] = store[key];
});
};
if (this.$store && !store) reset(this.$store);
if (isMultiStore) {
if (use) {
var updatePath = {};
for (var storeName in use) {
getPath(use[storeName], updatePath, storeName);
this.$store[storeName].components.push(this);
}
this.__$updatePath_ = updatePath;
}
if (useSelf) {
var updateSelfPath = {};
for (var _storeName2 in useSelf) {
getPath(useSelf[_storeName2], updateSelfPath, _storeName2);
this.$store[_storeName2].updateSelfComponents.push(this);
}
this.__$updateSelfPath_ = updateSelfPath;
}
} else {
if (use) {
this.__$updatePath_ = getPath(use);
this.$store.components.push(this);
}
if (useSelf) {
this.__$updateSelfPath_ = getPath(useSelf);
this.$store.updateSelfComponents.push(this);
}
}
}
function omivDestroyed() {
if (isMultiStore) {
for (var key in this.$store) if ('replaceState' !== key) {
removeItem(this, this.$store[key].components);
removeItem(this, this.$store[key].updateSelfComponents);
}
} else {
removeItem(this, this.$store.updateSelfComponents);
removeItem(this, this.$store.components);
}
}
var omivComputed = {
$state: function() {
if (isMultiStore) {
var state = {};
Object.keys(this.$store).forEach(function(k) {
state[k] = store[k].data;
});
return state;
}
return this.$store.data;
}
};
Vue.mixin({
beforeCreate: omivInit,
computed: omivComputed,
destroyed: omivDestroyed
});
}
var triggerStr = [ 'concat', 'copyWithin', 'fill', 'pop', 'push', 'reverse', 'shift', 'sort', 'splice', 'unshift', 'size' ].join(',');
var methods = [ 'concat', 'copyWithin', 'entries', 'every', 'fill', 'filter', 'find', 'findIndex', 'forEach', 'includes', 'indexOf', 'join', 'keys', 'lastIndexOf', 'map', 'pop', 'push', 'reduce', 'reduceRight', 'reverse', 'shift', 'slice', 'some', 'sort', 'splice', 'toLocaleString', 'toString', 'unshift', 'values', 'size' ];
obaa.add = function(obj, prop) {
watch(obj, prop, obj.__o_.__p_, obj.__o_.__r_);
};
obaa.set = function(obj, prop, value) {
watch(obj, prop, obj.__o_.__p_, obj.__o_.__r_);
obj[prop] = value;
};
Array.prototype.size = function(length) {
this.length = length;
};
var Vue;
var store;
var isMultiStore = !1;
var Omiv = {
$: $,
render: render,
reset: reset,
install: install
};
if ('undefined' != typeof module) module.exports = Omiv; else self.Omiv = Omiv;
}();
//# sourceMappingURL=omiv.js.map |
public static String reverse(String str)
{
// Converting string to character array
char []arr = str.toCharArray();
// Find the size of array
int n = arr.length;
// Swap the characters of the string
for (int i = 0; i < n / 2; i++)
{
char temp = arr[i];
arr[i] = arr[n - i - 1];
arr[n - i - 1] = temp;
}
// Return the reversed string
return String.valueOf(arr);
} |
SELECT student_name
FROM students
WHERE score > 80
GROUP BY student_name
HAVING COUNT(*) = (SELECT COUNT(*) FROM tests) |
#!/bin/bash
#
# Installs the Python modules for the KenLM language model implementation.
#
# Run with sudo to install system-wide; otherwise installs for the current
# user only.
#
# Dependencies:
# g++
# pip (for system-wide install)
if [ $(id -u) -eq 0 ] ; then
echo Installing KenLM system-wide...
pip install https://github.com/kpu/kenlm/archive/master.zip
else
echo Installing KenLM for user: $USER
wget http://kheafield.com/code/kenlm.tar.gz
tar -xzf kenlm.tar.gz
cd kenlm
/usr/local/bin/python setup.py install --user
/usr/bin/python setup.py install --user
cd ../
# rm -rf kenlm/ kenlm.tar.gz
fi
exit 0
|
<filename>core/environment.rb
# -*- coding: utf-8 -*-
#
# Envirionment
#
# 変更不能な設定たち
# コアで変更されるもの
# CHIの設定
miquire :core, 'config'
module Environment
# このアプリケーションの名前。
NAME = CHIConfig::NAME
# 名前の略称
ACRO = CHIConfig::ACRO
# 下の2行は馬鹿にしか見えない
TWITTER_CONSUMER_KEY = CHIConfig::TWITTER_CONSUMER_KEY
TWITTER_CONSUMER_SECRET = CHIConfig::TWITTER_CONSUMER_SECRET
TWITTER_AUTHENTICATE_REVISION = CHIConfig::TWITTER_AUTHENTICATE_REVISION
# pidファイル
PIDFILE = CHIConfig::PIDFILE
# コンフィグファイルのディレクトリ
CONFROOT = CHIConfig::CONFROOT
# 一時ディレクトリ
TMPDIR = CHIConfig::TMPDIR
# ログディレクトリ
LOGDIR = CHIConfig::LOGDIR
SETTINGDIR = CHIConfig::SETTINGDIR
# キャッシュディレクトリ
CACHE = CHIConfig::CACHE
# プラグインディレクトリ
PLUGIN_PATH = CHIConfig::PLUGIN_PATH
# AutoTag有効?
AutoTag = CHIConfig::AutoTag
# 再起動後に、前回取得したポストを取得しない
NeverRetrieveOverlappedMumble = CHIConfig::NeverRetrieveOverlappedMumble
class Version
include Comparable
attr_reader :mejor, :minor, :debug, :devel
def initialize(mejor, minor, debug, devel=0)
@mejor = mejor
@minor = minor
@debug = debug
@devel = devel
end
def to_a
[@mejor, @minor, @debug, @devel]
end
def to_s
if 9999 == @devel
[@mejor, @minor, @debug].join('.')
else
[@mejor, @minor, @debug, @devel].join('.')
end
end
def to_i
@mejor
end
def to_f
@mejor + @minor/100
end
def inspect
"#{Environment::NAME} ver.#{self.to_s}"
end
def size
to_a.size
end
def <=>(other)
self.to_a <=> other.to_a
end
end
# このソフトのバージョン。
VERSION = Version.new(*CHIConfig::VERSION)
end
|
#!/bin/bash
sed -i 's/^ContextMenuDefaultItems_File:.*/ContextMenuDefaultItems_File:3;5;4;26;6;12;0;10;25;11;22;28;29;31;0;13;14;15;16;24;21;0;17;0;18;19;0;7;9;8;/' /home/seapine/.seapine/spscmclient.conf |
#import <Foundation/Foundation.h>
#import <UIKit/UIKitDefines.h>
#import <UIKit/UIApplication.h>
@class UIView;
@class UIBarButtonItem, UITabBarItem;
@class UISearchDisplayController;
typedef enum {
UIModalTransitionStyleCoverVertical = 0,
UIModalTransitionStyleFlipHorizontal,
UIModalTransitionStyleCrossDissolve
} UIModalTransitionStyle;
UIKIT_EXTERN_CLASS @interface UIViewController : UIResponder <NSCoding> {
struct {
} _uiviewControllerFlags;
}
- (id)initWithNibName:(NSString *)nibNameOrNil bundle:(NSBundle *)nibBundleOrNil;
- (void)loadView;
- (void)viewDidLoad;
- (void)viewDidUnload;
- (BOOL)isViewLoaded;
- (void)viewWillAppear:(BOOL)animated;
- (void)viewDidAppear:(BOOL)animated;
- (void)viewWillDisappear:(BOOL)animated;
- (void)viewDidDisappear:(BOOL)animated;
- (void)didReceiveMemoryWarning;
- (void)presentModalViewController:(UIViewController *)modalViewController animated:(BOOL)animated;
- (void)dismissModalViewControllerAnimated:(BOOL)animated;
#ifdef OBJC2
@property(nonatomic,retain) UIView *view;
@property(nonatomic, readonly, copy) NSString *nibName;
@property(nonatomic, readonly, retain) NSBundle *nibBundle;
@property(nonatomic,copy) NSString *title;
@property(nonatomic,readonly) UIViewController *modalViewController;
@property(nonatomic,assign) UIModalTransitionStyle modalTransitionStyle ;
@property(nonatomic,assign) BOOL wantsFullScreenLayout;
@property(nonatomic,readonly) UIViewController *parentViewController;
#else /* OBJC2 */
// TODO
#endif /* OBJC2 */
@end
@interface UIViewController (UIViewControllerRotation)
- (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation;
- (UIView *)rotatingHeaderView;
- (UIView *)rotatingFooterView;
- (void)willRotateToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration;
- (void)didRotateFromInterfaceOrientation:(UIInterfaceOrientation)fromInterfaceOrientation;
- (void)willAnimateRotationToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration;
- (void)willAnimateFirstHalfOfRotationToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation duration:(NSTimeInterval)duration;
- (void)didAnimateFirstHalfOfRotationToInterfaceOrientation:(UIInterfaceOrientation)toInterfaceOrientation;
- (void)willAnimateSecondHalfOfRotationFromInterfaceOrientation:(UIInterfaceOrientation)fromInterfaceOrientation duration:(NSTimeInterval)duration;
#ifdef OBJC2
@property(nonatomic,readonly) UIInterfaceOrientation interfaceOrientation;
#else /* OBJC2 */
// TODO
#endif /* OBJC2 */
@end
@interface UIViewController (UIViewControllerEditing)
- (void)setEditing:(BOOL)editing animated:(BOOL)animated;
- (UIBarButtonItem *)editButtonItem;
#ifdef OBJC2
@property(nonatomic,getter=isEditing) BOOL editing;
#else /* OBJC2 */
// TODO
#endif /* OBJC2 */
@end
@interface UIViewController (UISearchDisplayControllerSupport)
#ifdef OBJC2
@property(nonatomic, readonly, retain) UISearchDisplayController *searchDisplayController;
#else /* OBJC2 */
// TODO
#endif /* OBJC2 */
@end
|
#!/bin/sh
###
#
# Name: Reset Individual Spotlight Index Entry.sh
# Description: Resets Spotlight index entry for target path.
# Created: 2017-06-29
# Last Modified: 2020-09-09
# Version: 1.2.2
#
#
# Copyright 2017 Palantir Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
###
########## variable-ing ##########
# Jamf Pro script parameter: "Target Path"
# Use full path to target file in the variable.
resetPath="$4"
spotlightPlist="/.Spotlight-V100/VolumeConfiguration.plist"
macOSVersionMajor=$(/usr/bin/sw_vers -productVersion | /usr/bin/awk -F . '{print $1}')
macOSVersionMinor=$(/usr/bin/sw_vers -productVersion | /usr/bin/awk -F . '{print $2}')
########## function-ing ##########
# Exits with error if any required Jamf Pro arguments are undefined.
check_jamf_pro_arguments () {
if [ -z "$resetPath" ]; then
echo "❌ ERROR: Undefined Jamf Pro argument, unable to proceed."
exit 74
fi
}
# Exits with error if running an unsupported version of macOS.
check_macos_version () {
if [ "$macOSVersionMajor" -gt 10 ] || [ "$macOSVersionMinor" -gt 14 ]; then
/bin/echo "❌ ERROR: macOS version ($(/usr/bin/sw_vers -productVersion)) unrecognized or incompatible, unable to proceed."
exit 1
fi
}
# Restarts the Spotlight service.
metadata_reset () {
/bin/launchctl stop com.apple.metadata.mds
/bin/launchctl start com.apple.metadata.mds
}
########## main process ##########
# Verify script prerequisites.
check_jamf_pro_arguments
check_macos_version
# Verify $resetPath exists on the system.
if [ ! -e "$resetPath" ]; then
echo "Target path $resetPath does not exist, unable to proceed. Please check Target Path parameter in Jamf Pro policy."
exit 74
fi
# Add target path to Spotlight exclusions.
/usr/bin/defaults write "$spotlightPlist" Exclusions -array-add "$resetPath"
metadata_reset
echo "Added $resetPath to Spotlight exclusions."
# Remove target path from Spotlight exclusions.
/usr/bin/defaults delete "$spotlightPlist" Exclusions
metadata_reset
echo "Removed $resetPath from Spotlight exclusions. Target path should appear in Spotlight search results shortly."
exit 0
|
#!bin/sh
pip install redis==2.10.5
python -u worker.py |
let arr = [1, 2, 2, 3, 3, 4, 4, 4 ];
const mode = arr => {
let numMapping = {};
let largest = 0;
let max = 0;
for (let num of arr) {
if (numMapping[num] > 0) {
numMapping[num]++;
} else {
numMapping[num] = 1;
}
if (numMapping[num] > max) {
max = numMapping[num];
largest = num;
}
}
return largest;
}
mode(arr); // 4 |
<reponame>Jarunik/sm-team-finder
package org.slos.battle;
import org.slos.battle.monster.BattleAttributeType;
import org.slos.battle.monster.MonsterBattleStats;
public class StatChangeContext {
private MonsterBattleStats initiator;
private MonsterBattleStats target;
private BattleAttributeType battleAttributeType;
private Integer amount;
public StatChangeContext(MonsterBattleStats initiator, MonsterBattleStats target, BattleAttributeType battleAttributeType, Integer amount) {
this.initiator = initiator;
this.target = target;
this.battleAttributeType = battleAttributeType;
this.amount = amount;
}
public MonsterBattleStats getInitiator() {
return initiator;
}
public MonsterBattleStats getTarget() {
return target;
}
public Integer getAmount() {
return amount;
}
public BattleAttributeType getBattleAttributeType() {
return battleAttributeType;
}
@Override
public String toString() {
return "StatChangeContext{" +
"initiator=" + initiator +
", target=" + target +
", battleAttributeType=" + battleAttributeType +
", amount=" + amount +
'}';
}
}
|
<filename>src/components/source-code/js/index.js
/*
*************************************
* <!-- Source Code View -->
*************************************
*/
import {
templateUrl,
homeUrl,
ajaxUrl,
browser,
UixModuleInstance,
UixGUID,
UixMath,
UixCssProperty,
UixDebounce,
UixThrottle
} from '@uixkit/core/_global/js';
import '@uixkit/plugins/Miscellaneous/scrollLock';
import '../scss/_style.scss';
export const SOURCE_CODE_VIEW = ( ( module, $, window, document ) => {
if ( window.SOURCE_CODE_VIEW === null ) return false;
module.SOURCE_CODE_VIEW = module.SOURCE_CODE_VIEW || {};
module.SOURCE_CODE_VIEW.version = '0.0.2';
module.SOURCE_CODE_VIEW.documentReady = function( $ ) {
//Add view source code to body
$( 'body' ).prepend( '<a href="#uix-source-code" id="uix-view-source"><i class="fa fa-code" aria-hidden="true"></i></a><div id="uix-source-code"><a href="javascript:void(0);" id="uix-source-code__close"></a></div>' );
//View source button event
$( '#uix-view-source' ).on( 'click', function() {
// Locks the page
$.scrollLock( true );
//Add class for body
//When scrollLock is used, scrollTop value will change
$( 'body' ).addClass( 'scrollLock' );
//
$( '#uix-source-code' ).show();
});
$( '#uix-source-code > #uix-source-code__close' ).on( 'click', function() {
// Unlocks the page
$.scrollLock( false );
//Remove class for body
//When scrollLock is used, scrollTop value will change
$( 'body' ).removeClass( 'scrollLock' );
//
const uri = window.location.toString();
if ( uri.indexOf( '#' ) > 0 ) {
const clean_uri = uri.substring(0, uri.indexOf( '#' ) );
window.history.replaceState({}, document.title, clean_uri );
}
$( '#uix-source-code' ).hide();
});
//Remove tag from HTML-String
const removeElements = function( text, selector ) {
const wrapped = $( "<div>" + text + "</div>" );
wrapped.find( selector ).remove();
return wrapped.html();
};
//Source code init
const sourceCodeBodyClass = $( 'body' ).attr( 'class' ),
sourceCodeBodyClassCode = ( typeof sourceCodeBodyClass != typeof undefined ) ? 'body class="'+sourceCodeBodyClass+'"' : 'body';
$.get( window.location.toString(), function( data ) {
let pageBodyCode = data.split("<body")[1].split(">").slice(1).join(">").split("</body>")[0],
pageHeaderCode = data.split("</head>")[0];
pageBodyCode = removeElements( pageBodyCode, '#uix-view-source, #uix-source-code' );
pageBodyCode = pageBodyCode.replace(/[<>]/g, function(m) { return {'<':'<','>':'>'}[m]; });
pageHeaderCode = pageHeaderCode.replace(/[<>]/g, function(m) { return {'<':'<','>':'>'}[m]; });
$("<pre />", {
"html": pageHeaderCode + '</head>\n<'+sourceCodeBodyClassCode+'>\n' + pageBodyCode + '\n</body>\n</html>',
"class": 'highlightBlock-print html'
}).appendTo( '#uix-source-code' );
$( 'pre.highlightBlock-print' ).each( function( i, block ) {
hljs.highlightBlock( block );
});
});
//highlighter written
};
module.components.documentReady.push( module.SOURCE_CODE_VIEW.documentReady );
return class SOURCE_CODE_VIEW {
constructor() {
this.module = module;
}
};
})( UixModuleInstance, jQuery, window, document );
|
const Command = require('../../structures/Command');
//floor is made of floor that is made of neon what kekwhat :TP_kekgobrr:
module.exports = class extends Command {
constructor(...args) {
super(...args, {
name: 'google',
description: 'Google something',
category: 'Misc',
usage: ["<thing to google>"],
argsLength: 1,
});
}
async run(message, args) {
const url = `https://www.google.com/search?q=${args.map(a => a).join('+')}`
if (url.length > 2000){
return message.channel.send("Too many characters to send.")
}
message.channel.send(url)
}
};
|
import { configureStore } from '../../store/index';
import WvStore from '../../models/WvStore';
export class WvPostList {
constructor() {
this.records = "{}";
this.isDebug = "false";
this.reloadPostIndex = 1;
}
componentWillLoad() {
var initStore = new WvStore();
initStore.records = JSON.parse(this.records);
initStore.reloadPostIndex = 1;
if (this.isDebug.toLowerCase() === "true") {
initStore.isDebug = true;
}
else {
initStore.isDebug = false;
}
this.store.setStore(configureStore(initStore));
this.store.mapStateToProps(this, (state) => {
return {
reloadPostIndex: state.reloadPostIndex
};
});
}
render() {
let scope = this;
let storeState = scope.store.getState();
let recordsList = storeState.records;
return (h("div", { class: "pc-post-list" },
Object.keys(recordsList).map(function (group) {
return (h("div", null,
h("div", { class: "group" }, group),
recordsList[group].map(function (record) {
return (h("wv-feed", { key: record["id"], record: record }));
})));
}),
h("div", { class: "alert alert-info " + (recordsList.length === 0 ? "" : "d-none") }, "No feeds found")));
}
static get is() { return "wv-feed-list"; }
static get properties() { return {
"isDebug": {
"type": String,
"attr": "is-debug"
},
"records": {
"type": String,
"attr": "records"
},
"reloadPostIndex": {
"state": true
},
"store": {
"context": "store"
}
}; }
}
|
#!/usr/bin/env bash
# ~/.macos — https://mths.be/macos
# Close any open System Preferences panes, to prevent them from overriding
# settings we’re about to change
osascript -e 'tell application "System Preferences" to quit'
# Ask for the administrator password upfront
sudo -v
# Keep-alive: update existing `sudo` time stamp until `.macos` has finished
while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null &
###############################################################################
# General UI/UX #
###############################################################################
# Set computer name (as done via System Preferences → Sharing)
#sudo scutil --set ComputerName "0x6D746873"
#sudo scutil --set HostName "0x6D746873"
#sudo scutil --set LocalHostName "0x6D746873"
#sudo defaults write /Library/Preferences/SystemConfiguration/com.apple.smb.server NetBIOSName -string "0x6D746873"
# Disable the sound effects on boot
sudo nvram SystemAudioVolume=" "
# Disable transparency in the menu bar and elsewhere on Yosemite
defaults write com.apple.universalaccess reduceTransparency -bool true
# Set highlight color to green
defaults write NSGlobalDomain AppleHighlightColor -string "0.764700 0.976500 0.568600"
# Set sidebar icon size to medium
defaults write NSGlobalDomain NSTableViewDefaultSizeMode -int 2
# Always show scrollbars
defaults write NSGlobalDomain AppleShowScrollBars -string "Always"
# Possible values: `WhenScrolling`, `Automatic` and `Always`
# Disable the over-the-top focus ring animation
defaults write NSGlobalDomain NSUseAnimatedFocusRing -bool false
# Disable smooth scrolling
# (Uncomment if you’re on an older Mac that messes up the animation)
#defaults write NSGlobalDomain NSScrollAnimationEnabled -bool false
# Increase window resize speed for Cocoa applications
defaults write NSGlobalDomain NSWindowResizeTime -float 0.001
# Expand save panel by default
defaults write NSGlobalDomain NSNavPanelExpandedStateForSaveMode -bool true
defaults write NSGlobalDomain NSNavPanelExpandedStateForSaveMode2 -bool true
# Expand print panel by default
defaults write NSGlobalDomain PMPrintingExpandedStateForPrint -bool true
defaults write NSGlobalDomain PMPrintingExpandedStateForPrint2 -bool true
# Save to disk (not to iCloud) by default
defaults write NSGlobalDomain NSDocumentSaveNewDocumentsToCloud -bool false
# Automatically quit printer app once the print jobs complete
defaults write com.apple.print.PrintingPrefs "Quit When Finished" -bool true
# Disable the “Are you sure you want to open this application?” dialog
defaults write com.apple.LaunchServices LSQuarantine -bool false
# Remove duplicates in the “Open With” menu (also see `lscleanup` alias)
/System/Library/Frameworks/CoreServices.framework/Frameworks/LaunchServices.framework/Support/lsregister -kill -r -domain local -domain system -domain user
# Display ASCII control characters using caret notation in standard text views
# Try e.g. `cd /tmp; unidecode "\x{0000}" > cc.txt; open -e cc.txt`
defaults write NSGlobalDomain NSTextShowsControlCharacters -bool true
# Disable Resume system-wide
defaults write com.apple.systempreferences NSQuitAlwaysKeepsWindows -bool false
# Disable automatic termination of inactive apps
defaults write NSGlobalDomain NSDisableAutomaticTermination -bool true
# Disable the crash reporter
#defaults write com.apple.CrashReporter DialogType -string "none"
# Set Help Viewer windows to non-floating mode
defaults write com.apple.helpviewer DevMode -bool true
# Fix for the ancient UTF-8 bug in QuickLook (https://mths.be/bbo)
# Commented out, as this is known to cause problems in various Adobe apps :(
# See https://github.com/mathiasbynens/dotfiles/issues/237
#echo "0x08000100:0" > ~/.CFUserTextEncoding
# Reveal IP address, hostname, OS version, etc. when clicking the clock
# in the login window
sudo defaults write /Library/Preferences/com.apple.loginwindow AdminHostInfo HostName
# Disable Notification Center and remove the menu bar icon
launchctl unload -w /System/Library/LaunchAgents/com.apple.notificationcenterui.plist 2> /dev/null
# Disable automatic capitalization as it’s annoying when typing code
defaults write NSGlobalDomain NSAutomaticCapitalizationEnabled -bool false
# Disable smart dashes as they’re annoying when typing code
defaults write NSGlobalDomain NSAutomaticDashSubstitutionEnabled -bool false
# Disable automatic period substitution as it’s annoying when typing code
defaults write NSGlobalDomain NSAutomaticPeriodSubstitutionEnabled -bool false
# Disable smart quotes as they’re annoying when typing code
defaults write NSGlobalDomain NSAutomaticQuoteSubstitutionEnabled -bool false
# Disable auto-correct
defaults write NSGlobalDomain NSAutomaticSpellingCorrectionEnabled -bool false
# Set a custom wallpaper image. `DefaultDesktop.jpg` is already a symlink, and
# all wallpapers are in `/Library/Desktop Pictures/`. The default is `Wave.jpg`.
#rm -rf ~/Library/Application Support/Dock/desktoppicture.db
#sudo rm -rf /System/Library/CoreServices/DefaultDesktop.jpg
#sudo ln -s /path/to/your/image /System/Library/CoreServices/DefaultDesktop.jpg
###############################################################################
# Trackpad, mouse, keyboard, Bluetooth accessories, and input #
###############################################################################
# Trackpad: enable tap to click for this user and for the login screen
defaults write com.apple.driver.AppleBluetoothMultitouch.trackpad Clicking -bool true
defaults -currentHost write NSGlobalDomain com.apple.mouse.tapBehavior -int 1
defaults write NSGlobalDomain com.apple.mouse.tapBehavior -int 1
# Trackpad: map bottom right corner to right-click
defaults write com.apple.driver.AppleBluetoothMultitouch.trackpad TrackpadCornerSecondaryClick -int 2
defaults write com.apple.driver.AppleBluetoothMultitouch.trackpad TrackpadRightClick -bool true
defaults -currentHost write NSGlobalDomain com.apple.trackpad.trackpadCornerClickBehavior -int 1
defaults -currentHost write NSGlobalDomain com.apple.trackpad.enableSecondaryClick -bool true
# Disable “natural” (Lion-style) scrolling
defaults write NSGlobalDomain com.apple.swipescrolldirection -bool false
# Increase sound quality for Bluetooth headphones/headsets
defaults write com.apple.BluetoothAudioAgent "Apple Bitpool Min (editable)" -int 40
# Enable full keyboard access for all controls
# (e.g. enable Tab in modal dialogs)
defaults write NSGlobalDomain AppleKeyboardUIMode -int 3
# Use scroll gesture with the Ctrl (^) modifier key to zoom
defaults write com.apple.universalaccess closeViewScrollWheelToggle -bool true
defaults write com.apple.universalaccess HIDScrollZoomModifierMask -int 262144
# Follow the keyboard focus while zoomed in
defaults write com.apple.universalaccess closeViewZoomFollowsFocus -bool true
# Disable press-and-hold for keys in favor of key repeat
defaults write NSGlobalDomain ApplePressAndHoldEnabled -bool false
# Set a blazingly fast keyboard repeat rate
defaults write NSGlobalDomain KeyRepeat -int 1
defaults write NSGlobalDomain InitialKeyRepeat -int 10
# Set language and text formats
# Note: if you’re in the US, replace `EUR` with `USD`, `Centimeters` with
# `Inches`, `en_GB` with `en_US`, and `true` with `false`.
defaults write NSGlobalDomain AppleLanguages -array "en" "nl"
defaults write NSGlobalDomain AppleLocale -string "en_GB@currency=EUR"
defaults write NSGlobalDomain AppleMeasurementUnits -string "Centimeters"
defaults write NSGlobalDomain AppleMetricUnits -bool true
# Hide language menu in the top right corner of the boot screen
sudo defaults write /Library/Preferences/com.apple.loginwindow showInputMenu -bool false
# Set the timezone; see `sudo systemsetup -listtimezones` for other values
sudo systemsetup -settimezone "Europe/London" > /dev/null
# Stop iTunes from responding to the keyboard media keys
#launchctl unload -w /System/Library/LaunchAgents/com.apple.rcd.plist 2> /dev/null
###############################################################################
# Energy saving #
###############################################################################
# Enable lid wakeup
sudo pmset -a lidwake 1
# Restart automatically on power loss
sudo pmset -a autorestart 1
# Restart automatically if the computer freezes
sudo systemsetup -setrestartfreeze on
# Sleep the display after 15 minutes
sudo pmset -a displaysleep 15
# Disable machine sleep while charging
sudo pmset -c sleep 0
# Set machine sleep to 5 minutes on battery
sudo pmset -b sleep 5
# Set standby delay to 24 hours (default is 1 hour)
sudo pmset -a standbydelay 86400
# Never go into computer sleep mode
sudo systemsetup -setcomputersleep Off > /dev/null
# Hibernation mode
# 0: Disable hibernation (speeds up entering sleep mode)
# 3: Copy RAM to disk so the system state can still be restored in case of a
# power failure.
sudo pmset -a hibernatemode 0
# Remove the sleep image file to save disk space
sudo rm /private/var/vm/sleepimage
# Create a zero-byte file instead…
sudo touch /private/var/vm/sleepimage
# …and make sure it can’t be rewritten
sudo chflags uchg /private/var/vm/sleepimage
###############################################################################
# Screen #
###############################################################################
# Require password immediately after sleep or screen saver begins
defaults write com.apple.screensaver askForPassword -int 1
defaults write com.apple.screensaver askForPasswordDelay -int 0
# Save screenshots to the specified directory
defaults write com.apple.screencapture location -string "${HOME}/Documents/Screenshots"
# Save screenshots in PNG format (other options: BMP, GIF, JPG, PDF, TIFF)
defaults write com.apple.screencapture type -string "png"
# Disable shadow in screenshots
defaults write com.apple.screencapture disable-shadow -bool true
# Enable subpixel font rendering on non-Apple LCDs
# Reference: https://github.com/kevinSuttle/macOS-Defaults/issues/17#issuecomment-266633501
defaults write NSGlobalDomain AppleFontSmoothing -int 1
# Enable HiDPI display modes (requires restart)
sudo defaults write /Library/Preferences/com.apple.windowserver DisplayResolutionEnabled -bool true
###############################################################################
# Finder #
###############################################################################
# Finder: allow quitting via ⌘ + Q; doing so will also hide desktop icons
defaults write com.apple.finder QuitMenuItem -bool true
# Finder: disable window animations and Get Info animations
defaults write com.apple.finder DisableAllAnimations -bool true
# Set Desktop as the default location for new Finder windows
# For other paths, use `PfLo` and `file:///full/path/here/`
defaults write com.apple.finder NewWindowTarget -string "PfDe"
defaults write com.apple.finder NewWindowTargetPath -string "file://${HOME}/Desktop/"
# Show icons for hard drives, servers, and removable media on the desktop
defaults write com.apple.finder ShowExternalHardDrivesOnDesktop -bool true
defaults write com.apple.finder ShowHardDrivesOnDesktop -bool true
defaults write com.apple.finder ShowMountedServersOnDesktop -bool true
defaults write com.apple.finder ShowRemovableMediaOnDesktop -bool true
# Finder: show hidden files by default
#defaults write com.apple.finder AppleShowAllFiles -bool true
# Finder: show all filename extensions
defaults write NSGlobalDomain AppleShowAllExtensions -bool true
# Finder: show status bar
defaults write com.apple.finder ShowStatusBar -bool true
# Finder: show path bar
defaults write com.apple.finder ShowPathbar -bool true
# Display full POSIX path as Finder window title
defaults write com.apple.finder _FXShowPosixPathInTitle -bool true
# Keep folders on top when sorting by name
defaults write com.apple.finder _FXSortFoldersFirst -bool true
# When performing a search, search the current folder by default
defaults write com.apple.finder FXDefaultSearchScope -string "SCcf"
# Disable the warning when changing a file extension
defaults write com.apple.finder FXEnableExtensionChangeWarning -bool false
# Enable spring loading for directories
defaults write NSGlobalDomain com.apple.springing.enabled -bool true
# Remove the spring loading delay for directories
defaults write NSGlobalDomain com.apple.springing.delay -float 0
# Avoid creating .DS_Store files on network or USB volumes
defaults write com.apple.desktopservices DSDontWriteNetworkStores -bool true
defaults write com.apple.desktopservices DSDontWriteUSBStores -bool true
# Disable disk image verification
defaults write com.apple.frameworks.diskimages skip-verify -bool true
defaults write com.apple.frameworks.diskimages skip-verify-locked -bool true
defaults write com.apple.frameworks.diskimages skip-verify-remote -bool true
# Automatically open a new Finder window when a volume is mounted
defaults write com.apple.frameworks.diskimages auto-open-ro-root -bool true
defaults write com.apple.frameworks.diskimages auto-open-rw-root -bool true
defaults write com.apple.finder OpenWindowForNewRemovableDisk -bool true
# Show item info near icons on the desktop and in other icon views
#/usr/libexec/PlistBuddy -c "Set :DesktopViewSettings:IconViewSettings:showItemInfo true" ~/Library/Preferences/com.apple.finder.plist
#/usr/libexec/PlistBuddy -c "Set :FK_StandardViewSettings:IconViewSettings:showItemInfo true" ~/Library/Preferences/com.apple.finder.plist
#/usr/libexec/PlistBuddy -c "Set :StandardViewSettings:IconViewSettings:showItemInfo true" ~/Library/Preferences/com.apple.finder.plist
# Show item info to the right of the icons on the desktop
#/usr/libexec/PlistBuddy -c "Set DesktopViewSettings:IconViewSettings:labelOnBottom false" ~/Library/Preferences/com.apple.finder.plist
# Enable snap-to-grid for icons on the desktop and in other icon views
#/usr/libexec/PlistBuddy -c "Set :DesktopViewSettings:IconViewSettings:arrangeBy grid" ~/Library/Preferences/com.apple.finder.plist
#/usr/libexec/PlistBuddy -c "Set :FK_StandardViewSettings:IconViewSettings:arrangeBy grid" ~/Library/Preferences/com.apple.finder.plist
#/usr/libexec/PlistBuddy -c "Set :StandardViewSettings:IconViewSettings:arrangeBy grid" ~/Library/Preferences/com.apple.finder.plist
# Increase grid spacing for icons on the desktop and in other icon views
#/usr/libexec/PlistBuddy -c "Set :DesktopViewSettings:IconViewSettings:gridSpacing 100" ~/Library/Preferences/com.apple.finder.plist
#/usr/libexec/PlistBuddy -c "Set :FK_StandardViewSettings:IconViewSettings:gridSpacing 100" ~/Library/Preferences/com.apple.finder.plist
#/usr/libexec/PlistBuddy -c "Set :StandardViewSettings:IconViewSettings:gridSpacing 100" ~/Library/Preferences/com.apple.finder.plist
# Increase the size of icons on the desktop and in other icon views
#/usr/libexec/PlistBuddy -c "Set :DesktopViewSettings:IconViewSettings:iconSize 80" ~/Library/Preferences/com.apple.finder.plist
#/usr/libexec/PlistBuddy -c "Set :FK_StandardViewSettings:IconViewSettings:iconSize 80" ~/Library/Preferences/com.apple.finder.plist
#/usr/libexec/PlistBuddy -c "Set :StandardViewSettings:IconViewSettings:iconSize 80" ~/Library/Preferences/com.apple.finder.plist
# Use list view in all Finder windows by default
# Four-letter codes for the other view modes: `icnv`, `clmv`, `glyv`
defaults write com.apple.finder FXPreferredViewStyle -string "Nlsv"
# Disable the warning before emptying the Trash
#defaults write com.apple.finder WarnOnEmptyTrash -bool false
# Enable AirDrop over Ethernet and on unsupported Macs running Lion
defaults write com.apple.NetworkBrowser BrowseAllInterfaces -bool true
# Show the ~/Library folder
chflags nohidden ~/Library
# Show the /Volumes folder
sudo chflags nohidden /Volumes
# Expand the following File Info panes:
# “General”, “Open with”, and “Sharing & Permissions”
defaults write com.apple.finder FXInfoPanesExpanded -dict \
General -bool true \
OpenWith -bool true \
Privileges -bool true
###############################################################################
# Dock, Dashboard, and hot corners #
###############################################################################
# Enable highlight hover effect for the grid view of a stack (Dock)
defaults write com.apple.dock mouse-over-hilite-stack -bool true
# Set the icon size of Dock items to 36 pixels
defaults write com.apple.dock tilesize -int 36
# Change minimize/maximize window effect
defaults write com.apple.dock mineffect -string "scale"
# Minimize windows into their application’s icon
defaults write com.apple.dock minimize-to-application -bool true
# Enable spring loading for all Dock items
defaults write com.apple.dock enable-spring-load-actions-on-all-items -bool true
# Hide indicator lights for open applications in the Dock
defaults write com.apple.dock show-process-indicators -bool false
# Wipe all (default) app icons from the Dock
# This is only really useful when setting up a new Mac, or if you don’t use
# the Dock to launch apps.
#defaults write com.apple.dock persistent-apps -array
# Show only open applications in the Dock
defaults write com.apple.dock static-only -bool true
# Don’t animate opening applications from the Dock
defaults write com.apple.dock launchanim -bool false
# Speed up Mission Control animations
defaults write com.apple.dock expose-animation-duration -float 0.1
# Don’t group windows by application in Mission Control
# (i.e. use the old Exposé behavior instead)
defaults write com.apple.dock expose-group-by-app -bool false
# Disable Dashboard
defaults write com.apple.dashboard mcx-disabled -bool true
# Don’t show Dashboard as a Space
defaults write com.apple.dock dashboard-in-overlay -bool true
# Don’t automatically rearrange Spaces based on most recent use
defaults write com.apple.dock mru-spaces -bool false
# Remove the auto-hiding Dock delay
#defaults write com.apple.dock autohide-delay -float 0
# Remove the animation when hiding/showing the Dock
#defaults write com.apple.dock autohide-time-modifier -float 0
# Automatically hide and show the Dock
defaults write com.apple.dock autohide -bool true
# Make Dock icons of hidden applications translucent
defaults write com.apple.dock showhidden -bool true
# Disable the Launchpad gesture (pinch with thumb and three fingers)
#defaults write com.apple.dock showLaunchpadGestureEnabled -int 0
# Reset Launchpad, but keep the desktop wallpaper intact
find "${HOME}/Library/Application Support/Dock" -name "*-*.db" -maxdepth 1 -delete
# Add a spacer to the left side of the Dock (where the applications are)
#defaults write com.apple.dock persistent-apps -array-add '{tile-data={}; tile-type="spacer-tile";}'
# Add a spacer to the right side of the Dock (where the Trash is)
#defaults write com.apple.dock persistent-others -array-add '{tile-data={}; tile-type="spacer-tile";}'
# Hot corners
# Possible values:
# 0: no-op
# 2: Mission Control
# 3: Show application windows
# 4: Desktop
# 5: Start screen saver
# 6: Disable screen saver
# 7: Dashboard
# 10: Put display to sleep
# 11: Launchpad
# 12: Notification Center
# Top left screen corner → Mission Control
#defaults write com.apple.dock wvous-tl-corner -int 2
#defaults write com.apple.dock wvous-tl-modifier -int 0
# Top right screen corner → Desktop
#defaults write com.apple.dock wvous-tr-corner -int 4
#defaults write com.apple.dock wvous-tr-modifier -int 0
# Bottom left screen corner → Start screen saver
defaults write com.apple.dock wvous-bl-corner -int 10
defaults write com.apple.dock wvous-bl-modifier -int 0
###############################################################################
# Safari & WebKit #
###############################################################################
# Privacy: don’t send search queries to Apple
defaults write com.apple.Safari UniversalSearchEnabled -bool false
defaults write com.apple.Safari SuppressSearchSuggestions -bool true
# Press Tab to highlight each item on a web page
defaults write com.apple.Safari WebKitTabToLinksPreferenceKey -bool true
defaults write com.apple.Safari com.apple.Safari.ContentPageGroupIdentifier.WebKit2TabsToLinks -bool true
# Show the full URL in the address bar (note: this still hides the scheme)
defaults write com.apple.Safari ShowFullURLInSmartSearchField -bool true
# Set Safari’s home page to `about:blank` for faster loading
defaults write com.apple.Safari HomePage -string "about:blank"
# Prevent Safari from opening ‘safe’ files automatically after downloading
defaults write com.apple.Safari AutoOpenSafeDownloads -bool false
# Allow hitting the Backspace key to go to the previous page in history
defaults write com.apple.Safari com.apple.Safari.ContentPageGroupIdentifier.WebKit2BackspaceKeyNavigationEnabled -bool true
# Hide Safari’s bookmarks bar by default
defaults write com.apple.Safari ShowFavoritesBar -bool false
# Hide Safari’s sidebar in Top Sites
defaults write com.apple.Safari ShowSidebarInTopSites -bool false
# Disable Safari’s thumbnail cache for History and Top Sites
defaults write com.apple.Safari DebugSnapshotsUpdatePolicy -int 2
# Enable Safari’s debug menu
defaults write com.apple.Safari IncludeInternalDebugMenu -bool true
# Make Safari’s search banners default to Contains instead of Starts With
defaults write com.apple.Safari FindOnPageMatchesWordStartsOnly -bool false
# Remove useless icons from Safari’s bookmarks bar
defaults write com.apple.Safari ProxiesInBookmarksBar "()"
# Enable the Develop menu and the Web Inspector in Safari
defaults write com.apple.Safari IncludeDevelopMenu -bool true
defaults write com.apple.Safari WebKitDeveloperExtrasEnabledPreferenceKey -bool true
defaults write com.apple.Safari com.apple.Safari.ContentPageGroupIdentifier.WebKit2DeveloperExtrasEnabled -bool true
# Add a context menu item for showing the Web Inspector in web views
defaults write NSGlobalDomain WebKitDeveloperExtras -bool true
# Enable continuous spellchecking
defaults write com.apple.Safari WebContinuousSpellCheckingEnabled -bool true
# Disable auto-correct
defaults write com.apple.Safari WebAutomaticSpellingCorrectionEnabled -bool false
# Disable AutoFill
defaults write com.apple.Safari AutoFillFromAddressBook -bool false
defaults write com.apple.Safari AutoFillPasswords -bool false
defaults write com.apple.Safari AutoFillCreditCardData -bool false
defaults write com.apple.Safari AutoFillMiscellaneousForms -bool false
# Warn about fraudulent websites
defaults write com.apple.Safari WarnAboutFraudulentWebsites -bool true
# Disable plug-ins
defaults write com.apple.Safari WebKitPluginsEnabled -bool false
defaults write com.apple.Safari com.apple.Safari.ContentPageGroupIdentifier.WebKit2PluginsEnabled -bool false
# Disable Java
defaults write com.apple.Safari WebKitJavaEnabled -bool false
defaults write com.apple.Safari com.apple.Safari.ContentPageGroupIdentifier.WebKit2JavaEnabled -bool false
defaults write com.apple.Safari com.apple.Safari.ContentPageGroupIdentifier.WebKit2JavaEnabledForLocalFiles -bool false
# Block pop-up windows
defaults write com.apple.Safari WebKitJavaScriptCanOpenWindowsAutomatically -bool false
defaults write com.apple.Safari com.apple.Safari.ContentPageGroupIdentifier.WebKit2JavaScriptCanOpenWindowsAutomatically -bool false
# Disable auto-playing video
#defaults write com.apple.Safari WebKitMediaPlaybackAllowsInline -bool false
#defaults write com.apple.SafariTechnologyPreview WebKitMediaPlaybackAllowsInline -bool false
#defaults write com.apple.Safari com.apple.Safari.ContentPageGroupIdentifier.WebKit2AllowsInlineMediaPlayback -bool false
#defaults write com.apple.SafariTechnologyPreview com.apple.Safari.ContentPageGroupIdentifier.WebKit2AllowsInlineMediaPlayback -bool false
# Enable “Do Not Track”
defaults write com.apple.Safari SendDoNotTrackHTTPHeader -bool true
# Update extensions automatically
defaults write com.apple.Safari InstallExtensionUpdatesAutomatically -bool true
###############################################################################
# Mail #
###############################################################################
# Disable send and reply animations in Mail.app
defaults write com.apple.mail DisableReplyAnimations -bool true
defaults write com.apple.mail DisableSendAnimations -bool true
# Copy email addresses as `foo@example.com` instead of `Foo Bar <foo@example.com>` in Mail.app
defaults write com.apple.mail AddressesIncludeNameOnPasteboard -bool false
# Add the keyboard shortcut ⌘ + Enter to send an email in Mail.app
defaults write com.apple.mail NSUserKeyEquivalents -dict-add "Send" "@\U21a9"
# Display emails in threaded mode, sorted by date (oldest at the top)
defaults write com.apple.mail DraftsViewerAttributes -dict-add "DisplayInThreadedMode" -string "yes"
defaults write com.apple.mail DraftsViewerAttributes -dict-add "SortedDescending" -string "yes"
defaults write com.apple.mail DraftsViewerAttributes -dict-add "SortOrder" -string "received-date"
# Disable inline attachments (just show the icons)
defaults write com.apple.mail DisableInlineAttachmentViewing -bool true
###############################################################################
# Spotlight #
###############################################################################
# Hide Spotlight tray-icon (and subsequent helper)
#sudo chmod 600 /System/Library/CoreServices/Search.bundle/Contents/MacOS/Search
# Disable Spotlight indexing for any volume that gets mounted and has not yet
# been indexed before.
# Use `sudo mdutil -i off "/Volumes/foo"` to stop indexing any volume.
sudo defaults write /.Spotlight-V100/VolumeConfiguration Exclusions -array "/Volumes"
# Change indexing order and disable some search results
# Yosemite-specific search results (remove them if you are using macOS 10.9 or older):
# MENU_DEFINITION
# MENU_CONVERSION
# MENU_EXPRESSION
# MENU_SPOTLIGHT_SUGGESTIONS (send search queries to Apple)
# MENU_WEBSEARCH (send search queries to Apple)
# MENU_OTHER
defaults write com.apple.spotlight orderedItems -array \
'{"enabled" = 1;"name" = "APPLICATIONS";}' \
'{"enabled" = 1;"name" = "SYSTEM_PREFS";}' \
'{"enabled" = 1;"name" = "DIRECTORIES";}' \
'{"enabled" = 1;"name" = "PDF";}' \
'{"enabled" = 1;"name" = "FONTS";}' \
'{"enabled" = 0;"name" = "DOCUMENTS";}' \
'{"enabled" = 0;"name" = "MESSAGES";}' \
'{"enabled" = 0;"name" = "CONTACT";}' \
'{"enabled" = 0;"name" = "EVENT_TODO";}' \
'{"enabled" = 0;"name" = "IMAGES";}' \
'{"enabled" = 0;"name" = "BOOKMARKS";}' \
'{"enabled" = 0;"name" = "MUSIC";}' \
'{"enabled" = 0;"name" = "MOVIES";}' \
'{"enabled" = 0;"name" = "PRESENTATIONS";}' \
'{"enabled" = 0;"name" = "SPREADSHEETS";}' \
'{"enabled" = 0;"name" = "SOURCE";}' \
'{"enabled" = 0;"name" = "MENU_DEFINITION";}' \
'{"enabled" = 0;"name" = "MENU_OTHER";}' \
'{"enabled" = 0;"name" = "MENU_CONVERSION";}' \
'{"enabled" = 0;"name" = "MENU_EXPRESSION";}' \
'{"enabled" = 0;"name" = "MENU_WEBSEARCH";}' \
'{"enabled" = 0;"name" = "MENU_SPOTLIGHT_SUGGESTIONS";}'
# Load new settings before rebuilding the index
killall mds > /dev/null 2>&1
# Make sure indexing is enabled for the main volume
sudo mdutil -i on / > /dev/null
# Rebuild the index from scratch
sudo mdutil -E / > /dev/null
###############################################################################
# Terminal & iTerm 2 #
###############################################################################
# Only use UTF-8 in Terminal.app
defaults write com.apple.terminal StringEncodings -array 4
# Enable “focus follows mouse” for Terminal.app and all X11 apps
# i.e. hover over a window and start typing in it without clicking first
#defaults write com.apple.terminal FocusFollowsMouse -bool true
#defaults write org.x.X11 wm_ffm -bool true
# Enable Secure Keyboard Entry in Terminal.app
# See: https://security.stackexchange.com/a/47786/8918
defaults write com.apple.terminal SecureKeyboardEntry -bool true
# Disable the annoying line marks
defaults write com.apple.Terminal ShowLineMarks -int 0
# Don’t display the annoying prompt when quitting iTerm
defaults write com.googlecode.iterm2 PromptOnQuit -bool false
###############################################################################
# Time Machine #
###############################################################################
# Prevent Time Machine from prompting to use new hard drives as backup volume
defaults write com.apple.TimeMachine DoNotOfferNewDisksForBackup -bool true
# Disable local Time Machine backups
#hash tmutil &> /dev/null && sudo tmutil disablelocal
###############################################################################
# Activity Monitor #
###############################################################################
# Show the main window when launching Activity Monitor
defaults write com.apple.ActivityMonitor OpenMainWindow -bool true
# Visualize CPU usage in the Activity Monitor Dock icon
defaults write com.apple.ActivityMonitor IconType -int 5
# Show all processes in Activity Monitor
defaults write com.apple.ActivityMonitor ShowCategory -int 0
# Sort Activity Monitor results by CPU usage
defaults write com.apple.ActivityMonitor SortColumn -string "CPUUsage"
defaults write com.apple.ActivityMonitor SortDirection -int 0
###############################################################################
# Address Book, Dashboard, iCal, TextEdit, and Disk Utility #
###############################################################################
# Enable the debug menu in Address Book
defaults write com.apple.addressbook ABShowDebugMenu -bool true
# Enable Dashboard dev mode (allows keeping widgets on the desktop)
defaults write com.apple.dashboard devmode -bool true
# Enable the debug menu in iCal (pre-10.8)
defaults write com.apple.iCal IncludeDebugMenu -bool true
# Use plain text mode for new TextEdit documents
defaults write com.apple.TextEdit RichText -int 0
# Open and save files as UTF-8 in TextEdit
defaults write com.apple.TextEdit PlainTextEncoding -int 4
defaults write com.apple.TextEdit PlainTextEncodingForWrite -int 4
# Enable the debug menu in Disk Utility
defaults write com.apple.DiskUtility DUDebugMenuEnabled -bool true
defaults write com.apple.DiskUtility advanced-image-options -bool true
# Auto-play videos when opened with QuickTime Player
defaults write com.apple.QuickTimePlayerX MGPlayMovieOnOpen -bool true
###############################################################################
# Mac App Store #
###############################################################################
# Enable the WebKit Developer Tools in the Mac App Store
defaults write com.apple.appstore WebKitDeveloperExtras -bool true
# Enable Debug Menu in the Mac App Store
defaults write com.apple.appstore ShowDebugMenu -bool true
# Enable the automatic update check
defaults write com.apple.SoftwareUpdate AutomaticCheckEnabled -bool true
# Check for software updates daily, not just once per week
defaults write com.apple.SoftwareUpdate ScheduleFrequency -int 1
# Download newly available updates in background
defaults write com.apple.SoftwareUpdate AutomaticDownload -int 1
# Install System data files & security updates
defaults write com.apple.SoftwareUpdate CriticalUpdateInstall -int 1
# Automatically download apps purchased on other Macs
defaults write com.apple.SoftwareUpdate ConfigDataInstall -int 1
# Turn on app auto-update
defaults write com.apple.commerce AutoUpdate -bool true
# Allow the App Store to reboot machine on macOS updates
defaults write com.apple.commerce AutoUpdateRestartRequired -bool true
###############################################################################
# Photos #
###############################################################################
# Prevent Photos from opening automatically when devices are plugged in
defaults -currentHost write com.apple.ImageCapture disableHotPlug -bool true
###############################################################################
# Messages #
###############################################################################
# Disable automatic emoji substitution (i.e. use plain text smileys)
#defaults write com.apple.messageshelper.MessageController SOInputLineSettings -dict-add "automaticEmojiSubstitutionEnablediMessage" -bool false
# Disable smart quotes as it’s annoying for messages that contain code
defaults write com.apple.messageshelper.MessageController SOInputLineSettings -dict-add "automaticQuoteSubstitutionEnabled" -bool false
# Disable continuous spell checking
#defaults write com.apple.messageshelper.MessageController SOInputLineSettings -dict-add "continuousSpellCheckingEnabled" -bool false
###############################################################################
# Kill affected applications #
###############################################################################
for app in "Activity Monitor" \
"Address Book" \
"Calendar" \
"cfprefsd" \
"Contacts" \
"Dock" \
"Finder" \
"Google Chrome Canary" \
"Google Chrome" \
"Mail" \
"Messages" \
"Opera" \
"Photos" \
"Safari" \
"SizeUp" \
"Spectacle" \
"SystemUIServer" \
"Terminal" \
"Transmission" \
"Tweetbot" \
"Twitter" \
"iCal"; do
killall "${app}" &> /dev/null
done
echo "Done. Note that some of these changes require a logout/restart to take effect."
|
<reponame>SpiderEvgn/campo
class Admin::UsersController < Admin::BaseController
before_action :set_user, only: [:show, :edit, :update, :destroy]
def index
@users = User.order(id: :desc).page(params[:page])
end
def show
end
def new
@user = User.new
end
def create
@user = User.new user_params
if @user.save
redirect_to admin_user_url(@user), notice: t('flash.user_is_successfully_created')
else
render 'update_form'
end
end
def edit
end
def update
if @user.update user_params
redirect_to admin_user_url(@user), notice: t('flash.user_is_successfully_updated')
else
render 'update_form'
end
end
def destroy
end
private
def user_params
params.require(:user).permit(:name, :slug, :description, :role)
end
def set_user
@user = User.find params[:id]
end
end
|
#!/usr/bin/env python3
# Required packages
from yfinance import Ticker
from sys import stdout, argv, exit
from datetime import date, timedelta
# Neater colour clear function
def clr_code():
stdout.write("\033[m")
def main(symbol):
# Create one week data frame of daily price information
stock_frame = Ticker(symbol).history(period = '1d',
start = date.today() - timedelta(7),
end = date.today())
# Reset colours, print heading information (may require tuning for neatness)
clr_code()
print("$", symbol, "\t\t Open\t Close")
# Iterate through available days in frame
for day in range(stock_frame.shape[0]):
# Grab open/close price
stock_open = stock_frame['Open'].iloc[day]
stock_close = stock_frame['Close'].iloc[day]
# Calculate direction, colourise appropriately
if stock_open < stock_close:
# Black on Green
# 0 - non bold, 38;5 - 256color on system bg, 2 - green, 7 - reverse fg/bg
stdout.write("\033[0;38;5;2;7m")
else:
# White on Red
# 0 - non bold, 48;5 - white on 256color bg, 52 - dark red bg
stdout.write("\033[0;48;5;52m")
# Print formatted date string, tab, open price, arrow, close price
print(stock_frame.index[day].strftime("%Y-%m-%d"),
"\t", "%.2f" % stock_open,
" -> ", "%.2f" % stock_close)
# Clear colour codes at end of each line
clr_code()
if __name__ == "__main__":
# Check that a ticker is given at the command line
if len(argv) == 1:
print("Please provide a stock ticker to inspect, e.g. SPY")
exit(1)
main(argv[1])
|
<table>
<thead>
<tr>
<th>Column 1</th>
<th>Column 2</th>
</tr>
</thead>
<tbody>
<tr>
<td>Data 1</td>
<td>Data 2</td>
</tr>
</tbody>
</table> |
<gh_stars>0
package com.horowitz.mickey;
import java.awt.Color;
import java.awt.Point;
import java.awt.image.BufferedImage;
import java.util.List;
import java.util.Map;
public interface ImageComparator {
/**
* Compares two images with allowed roughness.
*
* @param image1
* an image to compare.
* @param image2
* an image to compare.
*
* @return true if images have the same sizes and number of unmatching pixels less or equal to hmm
*/
public boolean compare(BufferedImage image1, BufferedImage image2);
public boolean compareOld(BufferedImage image1, BufferedImage image2, Pixel[] indices);
public boolean compare(BufferedImage image1, BufferedImage image2, Map<Integer, Color[]> colors, Pixel[] indices);
public List<Pixel> findSimilarities(BufferedImage image1, BufferedImage image2, Pixel[] indices);
public boolean compare(BufferedImage image, Pixel[] mask, Color[] colors);
public boolean compareInt(BufferedImage image, Pixel[] mask, Color[] colors);
public Point findPoint(BufferedImage image, Pixel[] mask, Color[] colors);
public Pixel findImage(BufferedImage image, BufferedImage area);
public abstract void setErrors(int errors);
public abstract int getErrors();
public abstract void setPrecision(int precision);
public abstract int getPrecision();
public abstract void setRoughness(double roughness);
public abstract double getRoughness();
} |
package mainclient.fieldNowFinal;
import main.fieldNowFinal.FieldNowFinal;
public class FieldNowFinalExt extends FieldNowFinal {
public int fieldNowFinalAssignmentSuperKey() {
super.fieldFinal = 3;
return super.fieldFinal;
}
public int fieldNowFinalNoAssignmentSuperKey() {
return super.fieldFinal;
}
public int fieldNowFinalAssignmentNoSuperKey() {
fieldFinal = 3;
return fieldFinal;
}
public int fieldNowFinalNoAssignmentNoSuperKey() {
return fieldFinal;
}
}
|
#!/usr/bin/env bash
# Initially authored by Franziska Schwab
# Modularization and current development by Peter Eisner
# Copyright 2021 Technische Informationsbibliothek (TIB)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# script conducts pre-ingest analysis and checks:
# 1.1 Check file names (fileNamePolicy)
# 1.2 Check directory names (dirNamePolicy)
# 2) Check for operating system specific files (hiddenSystemFiles)
# 3) Check for invisible files and directories (hiddenFilesDirs)
# 4) check for empty files and directories (emptyFilesDirs)
# 5) check against SIP-specification (sipStructure)
# Identifier-MASTER/MODIFIED_MASTER/DERIVATIVE_COPY
# 6) check for files >2GB (bigFiles)
# 7) check for duplicates (dubCheck)
# 8) look for (possibly compressed) archive files (archiveFiles)
# [ Cleanup ]
# Dependencies:
# Using Cygwin64Bit in version 3.0.7 or higher, all neccessary packages
# should be installed by default.
# Works fine on real computers, too.
# get the location of this script
piaDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
function piaConfig {
################
# Configuration:
# config options are sourced from PIA's config file. At the moment, the
# file pia.config needs to be edited manually.
source "$piaDir"/pia.config
# create output folder safely and recursively
mkdir -p $output
# add time stamped subdirectory to output
configured_output=$output
output=$configured_output/$(date +%Y-%m-%d_%H-%M-%S)
mkdir $output
# define depth limits for identifier directories
if [ -z $id_depth ]
then
# determine depth as difference from "input" to "MASTER"
input_depth=$(echo $input | awk -F '/' '{print NF}')
master_depth=$(find $input -type d -name "MASTER" | awk -F '/' '{print NF; exit}')
id_depth=$(expr $master_depth - $input_depth - 1)
fi
SIP_mindepth=$id_depth
SIP_maxdepth=$id_depth
# check if needed commands are available
# currently, this checks a single command: unzip. later we can add more
# packers like rar or 7zip to this array, like this: ("unzip" "unrar" ...)
declare -a mandatory_cmds=("unzip")
peek_in_archives=true
for cmd in "${mandatory_cmds[@]}"
do
if ! command -v "$cmd" &> /dev/null
then
echo "WARNING: Could not find mandatory command $cmd."
peek_in_archives=false
fi
done
if [ "$peek_in_archives" = false ]
then
echo "Will not analyze contents of archive files due to missing command(s)."
fi
}
function fileNamePolicy {
#####################################
# 1.1 Check file names against policy
# There are different sets of characters. Some are explicitly forbidden according
# to our policy; others are known to be harmless, so we allow them. While both sets
# are mutally exclusive, both added together do not cover all possible characters.
# This routine does two checks:
# 1. It looks for explicitly forbidden characters in file names (FORBIDDEN).
# 2. Then it looks for filenames containing characters that are not on the list of
# explicitly allowed characters (NOT ALLOWED).
# The results from FORBIDDEN are then subtracted from NOT ALLOWED results.
#
# You may think of it like this: the first check finds the really bad ones, the
# second check goes for the strange and weird ones which might still be okay.
echo
echo "-----------------Looking for forbidden characters in file names-----------------"
echo
# generate a file list, format as <path><delimiter><filename>
find "$input" -type f -fprintf "$output"/tmp_files_delimited.txt '%hpr3ttY_un1que_delimiTer%f\n'
# FIND FORBIDDEN CHARACTERS IN FILE NAMES
# boil down the file list, keep lines with forbidden characters
grep -e "[]\[<>„“&*#,;@|$\\ ]" \
"$output"/tmp_files_delimited.txt \
> "$output"/tmp_files_forbidden_characters_delimited.txt
# detect spaces in filenames
awk 'BEGIN{ FS="pr3ttY_un1que_delimiTer";OFS="/" } $2 ~ / / { print $1, $2 }' \
"$output"/tmp_files_forbidden_characters_delimited.txt \
> "$output"/filenames_with_spaces.txt
piaStatsSpaces=$(cat "$output"/filenames_with_spaces.txt | wc -l)
# detect punctuation characters (except ".")
awk 'BEGIN{ FS="pr3ttY_un1que_delimiTer";OFS="/" } $2 ~ /[„“,;]/ { print $1, $2 }' \
"$output"/tmp_files_forbidden_characters_delimited.txt \
> "$output"/filenames_with_punctuation_characters.txt
piaStatsPunctuations=$(cat "$output"/filenames_with_punctuation_characters.txt | wc -l)
# detect symbol characters
awk 'BEGIN{ FS="pr3ttY_un1que_delimiTer";OFS="/" } $2 ~ /[&*#@|$]/ { print $1, $2 }' \
"$output"/tmp_files_forbidden_characters_delimited.txt \
> "$output"/filenames_with_symbol_characters.txt
piaStatsSymbols=$(cat "$output"/filenames_with_symbol_characters.txt | wc -l)
# detect bracket characters
awk 'BEGIN{ FS="pr3ttY_un1que_delimiTer";OFS="/" } $2 ~ /[]\[<>]/ { print $1, $2 }' \
"$output"/tmp_files_forbidden_characters_delimited.txt \
> "$output"/filenames_with_bracket_characters.txt
piaStatsBrackets=$(cat "$output"/filenames_with_bracket_characters.txt | wc -l)
# detect backslash character
awk 'BEGIN{ FS="pr3ttY_un1que_delimiTer";OFS="/" } $2 ~ /[\\]/ { print $1, $2 }' \
"$output"/tmp_files_forbidden_characters_delimited.txt \
> "$output"/filenames_with_backslash_character.txt
piaStatsBackslash=$(cat "$output"/filenames_with_backslash_character.txt | wc -l)
# detect leading, trailing or more than one dot in file name
awk 'BEGIN{ FS="pr3ttY_un1que_delimiTer";OFS="/" } $2 ~ /^\.|\..*\.|\.$/ { print $1, $2 }' \
"$output"/tmp_files_delimited.txt \
> "$output"/filenames_with_dot_characters.txt
piaStatsDots=$(cat "$output"/filenames_with_dot_characters.txt | wc -l)
# compile positives in one list
cat "$output"/filenames_with_*.txt > "$output"/tmp_filenames_with_forbidden_characters.txt
# report results
if [ -s "$output"/tmp_filenames_with_forbidden_characters.txt ]
then
sort -u "$output"/tmp_filenames_with_forbidden_characters.txt \
> "$output"/tmp_filenames_with_forbidden_characters_sorted_unique.txt
piaStatsForbidden=$(cat "$output"/tmp_filenames_with_forbidden_characters_sorted_unique.txt | wc -l)
echo "Number of file names containing"
echo "Spaces: $piaStatsSpaces"
echo "Punctuations: $piaStatsPunctuations"
echo "Symbols: $piaStatsSymbols"
echo "Brackets: $piaStatsBrackets"
echo "Backslashes: $piaStatsBackslash"
echo "Dots: $piaStatsDots"
echo "Detected $piaStatsForbidden file names with forbidden characters."
else
echo "No file names with forbidden characters detected."
fi
# FIND NOT ALLOWED CHARACTERS
# detect file names containing characters not in the list of allowed characters
# this regex is quite tricky and prone to failure when tempered with
awk 'BEGIN{ FS="pr3ttY_un1que_delimiTer";OFS="/" } $2 ~ /[^a-zA-Z0-9_ÄäÖöÜüß\.\?\+!~–=:'"'"'\)\(%§-]/ { print $1, $2 }' \
"$output"/tmp_files_delimited.txt \
> "$output"/tmp_filenames_with_not_allowed_characters.txt
# remove FORBIDDEN results from NOT ALLOWED
if [ -s "$output"/tmp_filenames_with_not_allowed_characters.txt ]
then
sort "$output"/tmp_filenames_with_not_allowed_characters.txt \
> "$output"/tmp_filenames_with_not_allowed_characters_sorted.txt
comm -13 "$output"/tmp_filenames_with_forbidden_characters_sorted_unique.txt \
"$output"/tmp_filenames_with_not_allowed_characters_sorted.txt \
> "$output"/filenames_with_not_allowed_characters.txt
else
echo "No file names with not allowed characters found."
return
fi
# report results
if [ -s "$output"/filenames_with_not_allowed_characters.txt ]
then
piaStatsNotAllowed=$(cat "$output"/filenames_with_not_allowed_characters.txt| wc -l)
echo "Detected $piaStatsNotAllowed file names with not allowed characters."
else
echo "No file names with not allowed characters found."
fi
}
function dirNamePolicy {
##########################################
# 1.2 Check directory names against policy
# This differs from the file name check in only one regard: it generates a
# (recursive) list of folder names instead of file names in the beginning.
echo
echo "-----------------Looking for forbidden characters in folder names---------------"
echo
# generate a folder list, format as <path><delimiter><folderame>
find "$input" -type d -fprintf "$output"/tmp_folders_delimited.txt '%hpr3ttY_un1que_delimiTer%f\n'
# FIND FORBIDDEN CHARACTERS IN FOLDER NAMES
# boil down the folder list, keep lines with forbidden characters
grep -e "[]\[<>„“&*#,;@|$\\ ]" \
"$output"/tmp_folders_delimited.txt \
> "$output"/tmp_folders_forbidden_characters_delimited.txt
# detect spaces in foldernames
awk 'BEGIN{ FS="pr3ttY_un1que_delimiTer";OFS="/" } $2 ~ / / { print $1, $2 }' \
"$output"/tmp_folders_forbidden_characters_delimited.txt \
> "$output"/foldernames_with_spaces.txt
piaStatsSpaces=$(cat "$output"/foldernames_with_spaces.txt | wc -l)
# detect punctuation characters (except ".")
awk 'BEGIN{ FS="pr3ttY_un1que_delimiTer";OFS="/" } $2 ~ /[„“,;]/ { print $1, $2 }' \
"$output"/tmp_folders_forbidden_characters_delimited.txt \
> "$output"/foldernames_with_punctuation_characters.txt
piaStatsPunctuations=$(cat "$output"/foldernames_with_punctuation_characters.txt | wc -l)
# detect symbol characters
awk 'BEGIN{ FS="pr3ttY_un1que_delimiTer";OFS="/" } $2 ~ /[&*#@|$]/ { print $1, $2 }' \
"$output"/tmp_folders_forbidden_characters_delimited.txt \
> "$output"/foldernames_with_symbol_characters.txt
piaStatsSymbols=$(cat "$output"/foldernames_with_symbol_characters.txt | wc -l)
# detect bracket characters
awk 'BEGIN{ FS="pr3ttY_un1que_delimiTer";OFS="/" } $2 ~ /[]\[<>]/ { print $1, $2 }' \
"$output"/tmp_folders_forbidden_characters_delimited.txt \
> "$output"/foldernames_with_bracket_characters.txt
piaStatsBrackets=$(cat "$output"/foldernames_with_bracket_characters.txt | wc -l)
# detect backslash character
awk 'BEGIN{ FS="pr3ttY_un1que_delimiTer";OFS="/" } $2 ~ /[\\]/ { print $1, $2 }' \
"$output"/tmp_folders_forbidden_characters_delimited.txt \
> "$output"/foldernames_with_backslash_character.txt
piaStatsBackslash=$(cat "$output"/foldernames_with_backslash_character.txt | wc -l)
# detect leading, trailing or more than one dot in folder name
awk 'BEGIN{ FS="pr3ttY_un1que_delimiTer";OFS="/" } $2 ~ /^\.|\..*\.|\.$/ { print $1, $2 }' \
"$output"/tmp_folders_delimited.txt \
> "$output"/foldernames_with_dot_characters.txt
piaStatsDots=$(cat "$output"/foldernames_with_dot_characters.txt | wc -l)
# compile positives in one list
cat "$output"/foldernames_with_*.txt > "$output"/tmp_foldernames_with_forbidden_characters.txt
# report results
if [ -s "$output"/tmp_foldernames_with_forbidden_characters.txt ]
then
sort -u "$output"/tmp_foldernames_with_forbidden_characters.txt \
> "$output"/tmp_foldernames_with_forbidden_characters_sorted_unique.txt
piaStatsForbidden=$(cat "$output"/tmp_foldernames_with_forbidden_characters_sorted_unique.txt | wc -l)
echo "Number of folder names containing"
echo "Spaces: $piaStatsSpaces"
echo "Punctuations: $piaStatsPunctuations"
echo "Symbols: $piaStatsSymbols"
echo "Brackets: $piaStatsBrackets"
echo "Backslashes: $piaStatsBackslash"
echo "Dots: $piaStatsDots"
echo "Detected $piaStatsForbidden folder names with forbidden characters."
else
echo "No folder names with forbidden characters detected."
fi
# FIND NOT ALLOWED CHARACTERS
# detect folder names containing characters not in the list of allowed characters
# this regex is quite tricky and prone to failure when tempered with
awk 'BEGIN{ FS="pr3ttY_un1que_delimiTer";OFS="/" } $2 ~ /[^a-zA-Z0-9_ÄäÖöÜüß\.\?\+!~–=:'"'"'\)\(%§-]/ { print $1, $2 }' \
"$output"/tmp_folders_delimited.txt \
> "$output"/tmp_foldernames_with_not_allowed_characters.txt
# remove FORBIDDEN results from NOT ALLOWED
if [ -s "$output"/tmp_foldernames_with_not_allowed_characters.txt ]
then
sort "$output"/tmp_foldernames_with_not_allowed_characters.txt \
> "$output"/tmp_foldernames_with_not_allowed_characters_sorted.txt
comm -13 "$output"/tmp_foldernames_with_forbidden_characters_sorted_unique.txt \
"$output"/tmp_foldernames_with_not_allowed_characters_sorted.txt \
> "$output"/foldernames_with_not_allowed_characters.txt
else
echo "No folder names with not allowed characters found."
return
fi
# report results
if [ -s "$output"/foldernames_with_not_allowed_characters.txt ]
then
piaStatsNotAllowed=$(cat "$output"/foldernames_with_not_allowed_characters.txt| wc -l)
echo "Detected $piaStatsNotAllowed folder names with not allowed characters."
else
echo "No folder names with not allowed characters found."
fi
}
function hiddenSystemFiles {
##############################################
# 2) Check for operating system specific files
echo
echo "-----------------Loooking for hidden system files-------------------------------"
echo
# in case of reworking/widening this check: update readme.
find "$input" -iname Thumbs.db -o -name .DS_Store >"$output"/systemdateien.txt
if [ -s "$output"/systemdateien.txt ]
then
echo "Panic! Operating system specific files found."
else
echo "No operating system specific files found."
rm "$output"/systemdateien.txt
fi
}
function hiddenFilesDirs {
############################################
# 3) Check for hidden files and directories
echo
echo "-----------------Looking for hidden files and directories-----------------------"
echo
find "$input" -name ".*" -print >"$output"/hidden_files.txt
if [ -s "$output"/hidden_files.txt ]
then
echo "Panic! Hidden files or directories found."
else
echo "No hidden files or directories found."
rm "$output"/hidden_files.txt
fi
}
function emptyFilesDirs {
##########################################
# 4) check for empty files and directories
echo
echo "-----------------Looking for empty files and directories------------------------"
echo
find "$input" -empty >"$output"/empty.txt
if [ -s "$output"/empty.txt ]
then
echo "Panic! Empty files or directories found."
else
echo "No empty files or directories found."
rm "$output"/empty.txt
fi
}
function sipStructure {
####################################################################
# 5) check against SIP-specification for representation directories:
# MASTER/MODIFIED_MASTER/DERIVATIVE_COPY
echo
echo "-----------------Checking SIP conformity of representation directories----------"
echo
echo "Check against SIP-specification for representation directories:"
echo "MASTER/MODIFIED_MASTER/DERIVATIVE_COPY"
echo
# read all directory names into an array.
IFS=$'\n' dirs=( $(find "$input" -mindepth "$SIP_mindepth" -maxdepth "$SIP_maxdepth" -type d -printf "%P\\n") )
# Following echoes are for testing purpose only.
# count items in array
# echo "${dirs[*]}"
# print items in array
# echo "${#dirs[@]}"
for i in "${dirs[@]}"
do
# echo "$i"
cd "$input/$i" || exit
# test if directory "MASTER" exists
if [ -d "MASTER" ]
then
# Following echo is for testing purpose only. Should not be shown in
# shell when script is running to reduce amound of messages.
# echo "$i contains a MASTER directory."
:
else
echo "Panic! $i doesn't contain a MASTER directory."
printf "%s doesn't contain a MASTER directory.\\n" "$i" >>"$output"/sip_structure_not_ok.txt
fi
# count number of directories
dir_count=$(find "$input" -maxdepth 1 -type d | wc -l)
# test if number of directories is not 1
if [ "$dir_count" -ne 1 ]
then
# read directory names into an array
IFS=$'\n' check_dir_names=( $(find "$input/$i" -maxdepth 1 -type d -printf "%P\\n") )
# echo "${check_dir_names[@]}"
# print items in array
for d in "${check_dir_names[@]}"
do
# check for each item in array if it matches the list of allowed
# directory names
if [[ "$d" =~ ^(MASTER|MODIFIED_MASTER|DERIVATIVE_COPY)$ ]]
then
# Following echo is for testing purpose only. Should not be shown
# in shell when script is running to reduce amound of messages.
# echo "$i contains allowed directory names and is ok."
:
else
echo "Panic! $i contains forbidden directory names: '$d'"
printf "%s contains forbidden directory names: %s\\n" "$i" "$d" >>"$output"/sip_structure_not_ok.txt
fi
# count Files in MASTER, if >1, check if directories DERIVATIVE_COPY
# and MODIFIED_MASTER exist
if [ $check_for_MM = yes ]
then
MASTERfiles_count=$(find "$input/$i/MASTER" -type f | wc -l)
# test if number of files in MASTER is >1
if [ "$MASTERfiles_count" -gt 1 ]; then
if [ -d "DERIVATIVE_COPY" ]; then
if [ -d "MODIFIED_MASTER" ]; then
:
else
echo "Panic! $input/$i is missing MODIFIED_MASTER"
printf "%s/%s is missing MODIFIED_MASTER \\n" "$input" "$i" >>"$output"/sip_structure_not_ok.txt
fi
else
:
fi
fi
fi
done
fi
cd "$input" || exit
done
# reset working directory to PIA's location
cd $piaDir
}
function bigFiles {
#########################
# 6) check for files >2GB
echo
echo "-----------------Looking for files bigger than 2 GB-----------------------------"
echo
find "$input" -size +2G >>"$output"/big_files.txt
if [ -s "$output"/big_files.txt ]
then
echo "Panic! Files > 2GB found."
else
echo "No files bigger than 2GB found."
rm "$output"/big_files.txt
fi
}
function dubCheck {
########################
# 7) check for duplicates
echo
echo "-----------------Looking for duplicate files------------------------------------"
echo
# Duplicates are identified by md5-hashes, not by filenames. Since we expect
# lots of merely renamed files in MODIFIED_MASTER subdirectories, an
# indiscriminate check on all files would result in a lot of false positives.
# Therefore we perform two checks, mutually excluding the MASTER and
# MODIFIED_MASTER subdirectories. The results are merged in one report.
# generate file list
find "$input" -type f -fprint "$output"/tmp_files_without_md5sums.txt
# get number of files to process
numberOfFiles=$(cat "$output"/tmp_files_without_md5sums.txt | wc -l)
numberOfFilesProcessed=0
# iterate over file list, generate md5 hashes
while read file
do
md5sum "$file" >> "$output"/tmp_files_with_md5sums.txt
numberOfFilesProcessed=$((numberOfFilesProcessed + 1))
echo -en "\e[0K\r Calculating hash for file $numberOfFilesProcessed of $numberOfFiles..."
done < "$output"/tmp_files_without_md5sums.txt
printf "\n"
# compile a file list excluding MASTER folders
grep -v "/MASTER/" "$output"/tmp_files_with_md5sums.txt \
> "$output"/tmp_files_without_master.txt
# compile a file list excluding MODIFIED_MASTER folders
grep -v "/MODIFIED_MASTER/" "$output"/tmp_files_with_md5sums.txt \
> "$output"/tmp_files_without_modified_master.txt
# Sort the file lists and discard all lines starting with a unique hash. This
# keeps only duplicate files in the lists.
sort "$output"/tmp_files_without_master.txt \
| uniq -D -w 32 \
> "$output"/tmp_files_without_master_sorted_duplicates.txt
sort "$output"/tmp_files_without_modified_master.txt \
| uniq -D -w 32 \
> "$output"/tmp_files_without_modified_master_sorted_duplicates.txt
# Join both lists, sorted, only unique lines (option '-u'). The 'grouped' list
# separates identical files by a newline.
sort -u "$output"/tmp_files_without_master_sorted_duplicates.txt \
"$output"/tmp_files_without_modified_master_sorted_duplicates.txt \
| uniq -w 32 --group > "$output"/tmp_duplicates_by_md5_grouped.txt
# If the list is not empty, there are duplicates. Else there are none.
if [ -s "$output"/tmp_duplicates_by_md5_grouped.txt ]
then
echo
echo "There are duplicates."
echo
else
echo
echo "No duplicates found. You are lucky, or something went wrong."
echo
return
fi
# sort output list by number of occurances
echo "Rearranging duplicate list."
echo
currentDir=$PWD
cd "$output"
csplit tmp_duplicates_by_md5_grouped.txt \
--prefix="tmp_splitted_duplicates_" \
--suffix-format="%06d.txt" \
--suppress-matched \
--silent '/^$/' {*}
for file in tmp_splitted_duplicates*
do
len=$(cat $file | wc -l)
mv "$file" $(printf "%03d_%s\n" $len $file)
done
for file in *tmp_splitted_duplicates*
do
cat $file >> duplicates_sorted_by_occurance.txt
echo >> duplicates_sorted_by_occurance.txt
done
rm *tmp_splitted_duplicates*
cd "$currentDir"
}
function archiveFiles {
########################
# 8) look for (possibly compressed) archive files
echo
echo "-----------------Looking for archive files--------------------------------------"
echo
# make a list of identifier folders
find "$input" -maxdepth $id_depth -mindepth $id_depth -type d \
> "$output"/tmp_id_folders.txt
numberOfFolders=$(cat "$output"/tmp_id_folders.txt | wc -l)
numberOfFoldersProcessed=0
# find archives by MIME type, folder by folder
# this loop takes a while. if one were inclined to optimize this, keep
# in mind the vast majority of the execution time (like 96 percent-ish)
# is spent with the "... -exec file ..." part.
# so unless there is a high performance alternative to determine the
# MIME type of a file, there is no point in fiddling with this.
while read folder
do
find "$folder" -type f -exec file -F 'pr3ttY_un1que_delimiTer' -i {} \; \
| grep -F -f "$piaDir"/search-patterns_archive_mime_types.lst \
>> "$output"/tmp_archive_files.txt
numberOfFoldersProcessed=$((numberOfFoldersProcessed +1))
echo -en "\e[0K\r Processing folder $numberOfFoldersProcessed of $numberOfFolders..."
done < "$output"/tmp_id_folders.txt
printf "\n"
# get rid of false positives, output as csv
# files like epub or office documents may be report ed as MIME type
# "application/zip". these get purged here, based on their file
# extension.
# also, beware: the trailing space in the following FS is a hackish way
# to get rid of padding.
awk 'BEGIN{ FS="pr3ttY_un1que_delimiTer ";OFS="," } tolower($1) !~ /epub$|docx$/ { print "\042"$1"\042", $2 }' \
"$output"/tmp_archive_files.txt \
> "$output"/archive_files.csv
# identify zip archives containing macOS ressource fork files
if [ "$peek_in_archives" = true ]
then
cat "$output"/tmp_archive_files.txt | grep -F "application/zip" \
> "$output"/tmp_zip_archive_files_delimited.txt
awk -F 'pr3ttY_un1que_delimiTer' '{ print $1 }' \
"$output"/tmp_zip_archive_files_delimited.txt \
> "$output"/tmp_zip_archive_files.txt
echo "Scanning ZIP archives for macOS ressource fork files."
while read zip
do
( unzip -l $zip | grep -F -q "._" ) && echo $zip >> "$output"/macos_ressource_fork_files.txt
done < "$output"/tmp_zip_archive_files.txt
fi
# report stats
if [ -s "$output"/archive_files.csv ]
then
piaStatsArchives=$(cat "$output"/archive_files.csv | wc -l)
echo "Detected $piaStatsArchives archive files according to MIME types."
else
echo "No archive files found."
fi
if [ -s "$output"/macos_ressource_fork_files.txt ]
then
piaStatsMacosInZip=$(cat "$output"/macos_ressource_fork_files.txt | wc -l)
echo "Found $piaStatsMacosInZip ZIPs containing macOS ressource fork files."
elif [ "$peek_in_archives" = false ]
then
echo "Could not scan archives for macOS ressurce forks."
else
echo "Found no ZIPs containing macOS ressource fork files."
fi
}
########################
# MAIN PROGRAM STRUCTURE
# clear the screen
clear
echo
echo " ____ ___ _ "
echo " | _ \ |_ _| / \ "
echo " Pre | |_) | | | / _ \ "
echo " Ingest | __/ | | / ___ \ "
echo " Analyzer |_| |___| /_/ \_\ "
echo
# get and display PIA's config parameters
piaConfig
echo
echo "Input directory is:"
# echo
echo "$input"
echo
echo "Output will be written to:"
# echo
echo "$output"
echo
echo "Depth of identifier directories: $id_depth"
echo
echo "SIP structure contains MODIFIED_MASTER: $check_for_MM"
echo
echo "--------------------------------------------------------------------------------"
echo " OPTIONS"
echo
# set prompt for select
PS3='Please type in the number of your choice: '
# define an array with menu options
option=(
"Perform all checks and exit"
"Check everything but duplicates"
"Check everything but duplicates and archives"
"Find forbidden characters in filenames"
"Find the usual hidden system files"
"Find hidden files and directories"
"Find empty files and directories"
"Check SIP conformity of directories"
"Look for files bigger than 2 GB"
"Find duplicates"
"Find archive files by MIME type"
"Quit"
)
# interactive menu utilizing select
select choice in "${option[@]}"
do
case $choice in
"Perform all checks and exit")
fileNamePolicy
dirNamePolicy
hiddenSystemFiles
hiddenFilesDirs
emptyFilesDirs
sipStructure
bigFiles
archiveFiles
dubCheck
break
;;
"Check everything but duplicates")
fileNamePolicy
dirNamePolicy
hiddenSystemFiles
hiddenFilesDirs
emptyFilesDirs
sipStructure
bigFiles
archiveFiles
;;
"Check everything but duplicates and archives")
fileNamePolicy
dirNamePolicy
hiddenSystemFiles
hiddenFilesDirs
emptyFilesDirs
sipStructure
bigFiles
;;
"Find forbidden characters in filenames")
fileNamePolicy
dirNamePolicy
;;
"Find the usual hidden system files")
hiddenSystemFiles
;;
"Find hidden files and directories")
hiddenFilesDirs
;;
"Find empty files and directories")
emptyFilesDirs
;;
"Check SIP conformity of directories")
sipStructure
;;
"Look for files bigger than 2 GB")
bigFiles
;;
"Find duplicates")
dubCheck
;;
"Find archive files by MIME type")
archiveFiles
;;
"Quit")
break
;;
esac
done
#########
# Cleanup
# remove temporary files if any
if [[ $( ls "$output"/tmp_*.txt 2>/dev/null ) ]]
then
echo
echo "Removing temporary files."
rm "$output"/tmp_*.txt
fi
# remove empty reports
for file in "$output"/*.txt "$output"/*.csv
do
if [ ! -s $file ]
then
rm $file 2>/dev/null
fi
done
# remove output subdir if empty
rmdir $output 2>/dev/null && { printf "\nNothing to report. Exiting PIA.\n"; exit 0; }
echo
echo "Exiting PIA. Please check reports in"
echo "$output"
echo
exit 0
|
<gh_stars>0
package main
import (
"embed"
"html/template"
"io/fs"
"net/http"
"github.com/gin-gonic/gin"
)
//go:embed assets templates
var embeddedFiles embed.FS
func main() {
router := gin.Default()
templ := template.Must(template.New("").ParseFS(embeddedFiles, "templates/*"))
router.SetHTMLTemplate(templ)
router.StaticFS("/public", http.FS(embeddedFiles))
router.GET("/favicon.png", func(c *gin.Context) {
c.FileFromFS(".", FaviconFS())
})
router.GET("/", func(c *gin.Context) {
c.HTML(http.StatusOK, "index.tmpl", gin.H{
"title": "Hello Gin!",
})
})
router.GET("/ping", func(c *gin.Context) {
c.HTML(http.StatusOK, "ping.tmpl", gin.H{
"title": "Pong",
})
})
router.Run(":8080")
}
func FaviconFS() http.FileSystem {
sub, err := fs.Sub(embeddedFiles, "assets/favicon.png")
if err != nil {
panic(err)
}
return http.FS(sub)
}
|
#!/bin/sh
### BEGIN INIT INFO
# Provides: alignment
# Required-Start: mountkernfs
# Required-Stop: mountkernfs
# Default-Start: S
# Default-Stop:
### END INIT INFO
|
package catalog
import (
"encoding/json"
"errors"
"fmt"
"sync"
"time"
log "github.com/sirupsen/logrus"
"k8s.io/api/core/v1"
v1beta1ext "k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1beta1"
k8serrors "k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/tools/cache"
catsrcv1alpha1 "github.com/operator-framework/operator-lifecycle-manager/pkg/api/apis/catalogsource/v1alpha1"
csvv1alpha1 "github.com/operator-framework/operator-lifecycle-manager/pkg/api/apis/clusterserviceversion/v1alpha1"
"github.com/operator-framework/operator-lifecycle-manager/pkg/api/apis/installplan/v1alpha1"
subscriptionv1alpha1 "github.com/operator-framework/operator-lifecycle-manager/pkg/api/apis/subscription/v1alpha1"
"github.com/operator-framework/operator-lifecycle-manager/pkg/api/client"
"github.com/operator-framework/operator-lifecycle-manager/pkg/api/client/clientset/versioned"
"github.com/operator-framework/operator-lifecycle-manager/pkg/api/client/informers/externalversions"
"github.com/operator-framework/operator-lifecycle-manager/pkg/controller/registry"
"github.com/operator-framework/operator-lifecycle-manager/pkg/lib/queueinformer"
"k8s.io/client-go/util/workqueue"
)
const (
crdKind = "CustomResourceDefinition"
secretKind = "Secret"
)
//for test stubbing and for ensuring standardization of timezones to UTC
var timeNow = func() metav1.Time { return metav1.NewTime(time.Now().UTC()) }
// Operator represents a Kubernetes operator that executes InstallPlans by
// resolving dependencies in a catalog.
type Operator struct {
*queueinformer.Operator
client versioned.Interface
namespace string
sources map[string]registry.Source
sourcesLock sync.Mutex
sourcesLastUpdate metav1.Time
}
// NewOperator creates a new Catalog Operator.
func NewOperator(kubeconfigPath string, wakeupInterval time.Duration, operatorNamespace string, watchedNamespaces ...string) (*Operator, error) {
// Default to watching all namespaces.
if watchedNamespaces == nil {
watchedNamespaces = []string{metav1.NamespaceAll}
}
// Create a new client for ALM types (CRs)
crClient, err := client.NewClient(kubeconfigPath)
if err != nil {
return nil, err
}
sharedInformerFactory := externalversions.NewSharedInformerFactory(crClient, wakeupInterval)
// Create an informer for each namespace.
ipSharedIndexInformers := []cache.SharedIndexInformer{}
subSharedIndexInformers := []cache.SharedIndexInformer{}
for _, namespace := range watchedNamespaces {
nsInformerFactory := externalversions.NewFilteredSharedInformerFactory(crClient, wakeupInterval, namespace, nil)
ipSharedIndexInformers = append(ipSharedIndexInformers, nsInformerFactory.Installplan().V1alpha1().InstallPlans().Informer())
subSharedIndexInformers = append(subSharedIndexInformers, nsInformerFactory.Subscription().V1alpha1().Subscriptions().Informer())
}
// Create a new queueinformer-based operator.
queueOperator, err := queueinformer.NewOperator(kubeconfigPath)
if err != nil {
return nil, err
}
// Allocate the new instance of an Operator.
op := &Operator{
Operator: queueOperator,
client: crClient,
namespace: operatorNamespace,
sources: make(map[string]registry.Source),
}
// Register CatalogSource informers.
catsrcQueue := workqueue.NewNamedRateLimitingQueue(workqueue.DefaultControllerRateLimiter(), "catalogsources")
catsrcQueueInformer := queueinformer.New(
catsrcQueue,
[]cache.SharedIndexInformer{
sharedInformerFactory.Catalogsource().V1alpha1().CatalogSources().Informer(),
},
op.syncCatalogSources,
nil,
)
for _, informer := range catsrcQueueInformer {
op.RegisterQueueInformer(informer)
}
// Register InstallPlan informers.
ipQueue := workqueue.NewNamedRateLimitingQueue(workqueue.DefaultControllerRateLimiter(), "installplans")
ipQueueInformers := queueinformer.New(
ipQueue,
ipSharedIndexInformers,
op.syncInstallPlans,
nil,
)
for _, informer := range ipQueueInformers {
op.RegisterQueueInformer(informer)
}
// Register Subscription informers.
subscriptionQueue := workqueue.NewNamedRateLimitingQueue(workqueue.DefaultControllerRateLimiter(), "subscriptions")
subscriptionQueueInformers := queueinformer.New(
subscriptionQueue,
subSharedIndexInformers,
op.syncSubscriptions,
nil,
)
for _, informer := range subscriptionQueueInformers {
op.RegisterQueueInformer(informer)
}
return op, nil
}
func (o *Operator) syncCatalogSources(obj interface{}) (syncError error) {
catsrc, ok := obj.(*catsrcv1alpha1.CatalogSource)
if !ok {
log.Debugf("wrong type: %#v", obj)
return fmt.Errorf("casting CatalogSource failed")
}
src, err := registry.NewInMemoryFromConfigMap(o.OpClient, o.namespace, catsrc.Spec.ConfigMap)
if err != nil {
return fmt.Errorf("failed to create catalog source from ConfigMap %s: %s", catsrc.Spec.ConfigMap, err)
}
o.sourcesLock.Lock()
defer o.sourcesLock.Unlock()
o.sources[catsrc.GetName()] = src
o.sourcesLastUpdate = timeNow()
return err
}
func (o *Operator) syncSubscriptions(obj interface{}) (syncError error) {
sub, ok := obj.(*subscriptionv1alpha1.Subscription)
if !ok {
log.Debugf("wrong type: %#v", obj)
return fmt.Errorf("casting Subscription failed")
}
log.Infof("syncing Subscription with catalog %s: %s on channel %s",
sub.Spec.CatalogSource, sub.Spec.Package, sub.Spec.Channel)
var updatedSub *subscriptionv1alpha1.Subscription
updatedSub, syncError = o.syncSubscription(sub)
if updatedSub != nil {
updatedSub.Status.LastUpdated = timeNow()
// Update Subscription with status of transition. Log errors if we can't write them to the status.
if _, err := o.client.SubscriptionV1alpha1().Subscriptions(updatedSub.GetNamespace()).Update(updatedSub); err != nil {
updateErr := errors.New("error updating Subscription status: " + err.Error())
if syncError == nil {
log.Info(updateErr)
return updateErr
}
syncError = fmt.Errorf("error transitioning Subscription: %s and error updating Subscription status: %s", syncError, updateErr)
log.Info(syncError)
}
}
return
}
func (o *Operator) syncInstallPlans(obj interface{}) (syncError error) {
plan, ok := obj.(*v1alpha1.InstallPlan)
if !ok {
log.Debugf("wrong type: %#v", obj)
return fmt.Errorf("casting InstallPlan failed")
}
log.Infof("syncing InstallPlan: %s", plan.SelfLink)
syncError = transitionInstallPlanState(o, plan)
// Update InstallPlan with status of transition. Log errors if we can't write them to the status.
if _, err := o.client.InstallplanV1alpha1().InstallPlans(plan.GetNamespace()).Update(plan); err != nil {
updateErr := errors.New("error updating InstallPlan status: " + err.Error())
if syncError == nil {
log.Info(updateErr)
return updateErr
}
syncError = fmt.Errorf("error transitioning InstallPlan: %s and error updating InstallPlan status: %s", syncError, updateErr)
log.Info(syncError)
}
return
}
type installPlanTransitioner interface {
ResolvePlan(*v1alpha1.InstallPlan) error
ExecutePlan(*v1alpha1.InstallPlan) error
}
var _ installPlanTransitioner = &Operator{}
func transitionInstallPlanState(transitioner installPlanTransitioner, plan *v1alpha1.InstallPlan) error {
switch plan.Status.Phase {
case v1alpha1.InstallPlanPhaseNone:
log.Debug("plan phase unrecognized, setting to Planning")
plan.Status.Phase = v1alpha1.InstallPlanPhasePlanning
return nil
case v1alpha1.InstallPlanPhasePlanning:
log.Debug("plan phase Planning, attempting to resolve")
if err := transitioner.ResolvePlan(plan); err != nil {
plan.Status.SetCondition(v1alpha1.ConditionFailed(v1alpha1.InstallPlanResolved,
v1alpha1.InstallPlanReasonDependencyConflict, err))
plan.Status.Phase = v1alpha1.InstallPlanPhaseFailed
return err
}
plan.Status.SetCondition(v1alpha1.ConditionMet(v1alpha1.InstallPlanResolved))
plan.Status.Phase = v1alpha1.InstallPlanPhaseInstalling
return nil
case v1alpha1.InstallPlanPhaseInstalling:
log.Debug("plan phase Installing, attempting to install")
if err := transitioner.ExecutePlan(plan); err != nil {
plan.Status.SetCondition(v1alpha1.ConditionFailed(v1alpha1.InstallPlanInstalled,
v1alpha1.InstallPlanReasonComponentFailed, err))
plan.Status.Phase = v1alpha1.InstallPlanPhaseFailed
return err
}
plan.Status.SetCondition(v1alpha1.ConditionMet(v1alpha1.InstallPlanInstalled))
plan.Status.Phase = v1alpha1.InstallPlanPhaseComplete
return nil
default:
return nil
}
}
// ResolvePlan modifies an InstallPlan to contain a Plan in its Status field.
func (o *Operator) ResolvePlan(plan *v1alpha1.InstallPlan) error {
if plan.Status.Phase != v1alpha1.InstallPlanPhasePlanning {
panic("attempted to create a plan that wasn't in the planning phase")
}
if len(o.sources) == 0 {
return fmt.Errorf("cannot resolve InstallPlan without any Catalog Sources")
}
o.sourcesLock.Lock()
defer o.sourcesLock.Unlock()
var notFoundErr error
for sourceName, source := range o.sources {
log.Debugf("resolving against source %v", sourceName)
plan.EnsureCatalogSource(sourceName)
notFoundErr = resolveInstallPlan(sourceName, source, plan)
if notFoundErr != nil {
continue
}
// Look up the CatalogSource.
catsrc, err := o.client.CatalogsourceV1alpha1().CatalogSources(o.namespace).Get(sourceName, metav1.GetOptions{})
if err != nil {
return err
}
for _, secretName := range catsrc.Spec.Secrets {
// Attempt to look up the secret.
_, err := o.OpClient.KubernetesInterface().CoreV1().Secrets(plan.Namespace).Get(secretName, metav1.GetOptions{})
status := v1alpha1.StepStatusUnknown
if k8serrors.IsNotFound(err) {
status = v1alpha1.StepStatusNotPresent
} else if err == nil {
status = v1alpha1.StepStatusPresent
} else {
return err
}
// Prepend any required secrets to the plan for that Catalog Source.
plan.Status.Plan = append([]v1alpha1.Step{{
Resolving: "",
Resource: v1alpha1.StepResource{
Name: secretName,
Kind: "Secret",
Group: "",
Version: "v1",
},
Status: status,
}}, plan.Status.Plan...)
}
return nil
}
return notFoundErr
}
func resolveCRDDescription(crdDesc csvv1alpha1.CRDDescription, sourceName string, source registry.Source, owned bool) (v1alpha1.StepResource, string, error) {
log.Debugf("resolving %#v", crdDesc)
crdKey := registry.CRDKey{
Kind: crdDesc.Kind,
Name: crdDesc.Name,
Version: crdDesc.Version,
}
crd, err := source.FindCRDByKey(crdKey)
if err != nil {
return v1alpha1.StepResource{}, "", err
}
log.Debugf("found %#v", crd)
if owned {
// Label CRD with catalog source
labels := crd.GetLabels()
if labels == nil {
labels = map[string]string{}
}
labels[CatalogLabel] = sourceName
crd.SetLabels(labels)
// Add CRD Step
step, err := v1alpha1.NewStepResourceFromCRD(crd)
return step, "", err
}
csvs, err := source.ListLatestCSVsForCRD(crdKey)
if err != nil {
return v1alpha1.StepResource{}, "", err
}
if len(csvs) == 0 {
return v1alpha1.StepResource{}, "", fmt.Errorf("Unknown CRD %s", crdKey)
}
// TODO: Change to lookup the CSV from the preferred or default channel.
log.Infof("found %v owner %s", crdKey, csvs[0].CSV.Name)
return v1alpha1.StepResource{}, csvs[0].CSV.Name, nil
}
type stepResourceMap map[string][]v1alpha1.StepResource
func (srm stepResourceMap) Plan() []v1alpha1.Step {
steps := make([]v1alpha1.Step, 0)
for csvName, stepResSlice := range srm {
for _, stepRes := range stepResSlice {
steps = append(steps, v1alpha1.Step{
Resolving: csvName,
Resource: stepRes,
Status: v1alpha1.StepStatusUnknown,
})
}
}
return steps
}
func (srm stepResourceMap) Combine(y stepResourceMap) {
for csvName, stepResSlice := range y {
// Skip any redundant steps.
if _, alreadyExists := srm[csvName]; alreadyExists {
continue
}
srm[csvName] = stepResSlice
}
}
func resolveCSV(csvName, namespace, sourceName string, source registry.Source) (stepResourceMap, error) {
log.Debugf("resolving CSV with name: %s", csvName)
steps := make(stepResourceMap)
csvNamesToBeResolved := []string{csvName}
for len(csvNamesToBeResolved) != 0 {
// Pop off a CSV name.
currentName := csvNamesToBeResolved[0]
csvNamesToBeResolved = csvNamesToBeResolved[1:]
// If this CSV is already resolved, continue.
if _, exists := steps[currentName]; exists {
continue
}
// Get the full CSV object for the name.
csv, err := source.FindCSVByName(currentName)
if err != nil {
return nil, err
}
log.Debugf("found %#v", csv)
// Resolve each owned or required CRD for the CSV.
for _, crdDesc := range csv.GetAllCRDDescriptions() {
step, owner, err := resolveCRDDescription(crdDesc, sourceName, source, csv.OwnsCRD(crdDesc.Name))
if err != nil {
return nil, err
}
// If a different owner was resolved, add it to the list.
if owner != "" && owner != currentName {
csvNamesToBeResolved = append(csvNamesToBeResolved, owner)
continue
}
// Add the resolved step to the plan.
steps[currentName] = append(steps[currentName], step)
}
// Manually override the namespace and create the final step for the CSV,
// which is for the CSV itself.
csv.SetNamespace(namespace)
// Add the sourcename as a label on the CSV, so that we know where it came from
labels := csv.GetLabels()
if labels == nil {
labels = map[string]string{}
}
labels[CatalogLabel] = sourceName
csv.SetLabels(labels)
step, err := v1alpha1.NewStepResourceFromCSV(csv)
if err != nil {
return nil, err
}
// Add the final step for the CSV to the plan.
log.Infof("finished step: %v", step)
steps[currentName] = append(steps[currentName], step)
}
return steps, nil
}
func resolveInstallPlan(sourceName string, source registry.Source, plan *v1alpha1.InstallPlan) error {
srm := make(stepResourceMap)
for _, csvName := range plan.Spec.ClusterServiceVersionNames {
csvSRM, err := resolveCSV(csvName, plan.Namespace, sourceName, source)
if err != nil {
return err
}
srm.Combine(csvSRM)
}
plan.Status.Plan = srm.Plan()
return nil
}
// ExecutePlan applies a planned InstallPlan to a namespace.
func (o *Operator) ExecutePlan(plan *v1alpha1.InstallPlan) error {
if plan.Status.Phase != v1alpha1.InstallPlanPhaseInstalling {
panic("attempted to install a plan that wasn't in the installing phase")
}
for i, step := range plan.Status.Plan {
switch step.Status {
case v1alpha1.StepStatusPresent, v1alpha1.StepStatusCreated:
continue
case v1alpha1.StepStatusUnknown, v1alpha1.StepStatusNotPresent:
log.Debugf("resource kind: %s", step.Resource.Kind)
switch step.Resource.Kind {
case crdKind:
// Marshal the manifest into a CRD instance.
var crd v1beta1ext.CustomResourceDefinition
err := json.Unmarshal([]byte(step.Resource.Manifest), &crd)
if err != nil {
return err
}
// Attempt to create the CRD.
err = o.OpClient.CreateCustomResourceDefinition(&crd)
if k8serrors.IsAlreadyExists(err) {
// If it already existed, mark the step as Present.
plan.Status.Plan[i].Status = v1alpha1.StepStatusPresent
continue
} else if err != nil {
return err
} else {
// If no error occured, mark the step as Created.
plan.Status.Plan[i].Status = v1alpha1.StepStatusCreated
continue
}
case csvv1alpha1.ClusterServiceVersionKind:
// Marshal the manifest into a CRD instance.
var csv csvv1alpha1.ClusterServiceVersion
err := json.Unmarshal([]byte(step.Resource.Manifest), &csv)
if err != nil {
return err
}
// Attempt to create the CSV.
_, err = o.client.ClusterserviceversionV1alpha1().ClusterServiceVersions(csv.GetNamespace()).Create(&csv)
if k8serrors.IsAlreadyExists(err) {
// If it already existed, mark the step as Present.
plan.Status.Plan[i].Status = v1alpha1.StepStatusPresent
} else if err != nil {
return err
} else {
// If no error occurred, mark the step as Created.
plan.Status.Plan[i].Status = v1alpha1.StepStatusCreated
}
case secretKind:
// Get the pre-existing secret.
secret, err := o.OpClient.KubernetesInterface().CoreV1().Secrets(o.namespace).Get(step.Resource.Name, metav1.GetOptions{})
if k8serrors.IsNotFound(err) {
return fmt.Errorf("secret %s does not exist", step.Resource.Name)
} else if err != nil {
return err
}
// Set the namespace to the InstallPlan's namespace and attempt to
// create a new secret.
secret.Namespace = plan.Namespace
_, err = o.OpClient.KubernetesInterface().CoreV1().Secrets(plan.Namespace).Create(&v1.Secret{
ObjectMeta: metav1.ObjectMeta{
Name: secret.Name,
Namespace: plan.Namespace,
},
Data: secret.Data,
Type: secret.Type,
})
if k8serrors.IsAlreadyExists(err) {
// If it already existed, mark the step as Present.
plan.Status.Plan[i].Status = v1alpha1.StepStatusPresent
} else if err != nil {
return err
} else {
// If no error occured, mark the step as Created.
plan.Status.Plan[i].Status = v1alpha1.StepStatusCreated
}
default:
return v1alpha1.ErrInvalidInstallPlan
}
default:
return v1alpha1.ErrInvalidInstallPlan
}
}
// Loop over one final time to check and see if everything is good.
for _, step := range plan.Status.Plan {
switch step.Status {
case v1alpha1.StepStatusCreated, v1alpha1.StepStatusPresent:
default:
return nil
}
}
return nil
}
|
# pyenv是 python版本管理工具
# 可以改变全局的 python版本,安装多版本 python,可以设置目录级别的 python版本
# 还能创建和管理 virtual python environments
# 所有设置都是有用户级别的操作,不需要 sudo
# pyenv通过系统修改环境变量来实现 python不同版本的切换
# virtualenv通过将 python包安装到一个目录来作为 python包虚拟环境,通过切换目录来实现不同包环境间的切换
# pyenv没有采用将不同的 PATH植入不同的 shell这种高耦合的工作方式,而是简单地在 PATH的最前面插入了一个垫片路径(shims)
~/.pyenv/shims:/usr/local/bin:/usr/bin:/bin
# 所有对 python可执行文件的查找会首先被这个 shims路径截获,从而使后方的系统路径失效
# macos安装
brew install pyenv
# 在 .zshrc之类的文件中加入
export PATH = "$HOME/.pyenv/bin:$PATH"
# 常用命令
pyenv versions # 查看本机安装版本,星号表示当前正在使用的 python版本
pyenv install -l # 查看可安装版本
pyenv install 2.7.17 # 安装 pyhton
pyenv uninstall 2.7.17 # 卸载 python
# python切换
pyenv global 2.7.17 # 设置全局的 python版本,通过将版本号写入 ~/.pyenv/version文件的方式
pyenv local 2.7.17 # 设置 python本地版本,通过将版本号写入当前目录下的 .python-version文件的方式
# 以local方式设置的版本比 global高
pyenv shell 2.7.17 # 设置面向 shell的 python版本
pyenv shell --unset # 取消当前 shell设定的版本
pyenv rehash # 创建垫片路径(为所有已安装的可执行文件创建 shims)
# 增删 python版本或带有可执行文件的包(如 pip)以后,都需要执行一次命令
# pyenv-virtualenv
# pyenv的插件,通过 pyenv-virtualenv插件可以很好的和 virtualenv结合
# macos安装
brew install pyenv-virtualenv
# 在 .zshrc加入
eval "$(pyenv init -)"
eval "$(pyenv virtualenv-init -)"
# 创建虚拟环境
pyenv virtualenv 2.7.17 env-name
# 不指定 python版本,将使用默认的版本
# 列出当前虚拟环境
pyenv virtualenvs
pyenv activate env-name # 激活虚拟环境
pyenv deactivate # 退出虚拟环境,回到系统环境
pyenv virtualenvs-delete [-f|--force] env-name # 删除相应的虚拟环境
pyenv uninstall env-name # 删除相应的虚拟环境
|
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import javax.swing.JButton;
public class Grid_button extends JButton {
/**
*
*/
private static final long serialVersionUID = 1L;
private final int fX;
private final int fY;
public Grid_button(final int x, final int y) {
fX= x;
fY= y;
//fModel= model;
addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e) {
boolean text = false;
if (TicTacToe.turn == 1 && getText()=="")
{
setText("X");
text=true;
}
else if (getText()=="")
{
setText("O");
text = true;
}
if (text){
try {
TicTacToe.user_vs_user(fX,fY);
} catch (InterruptedException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
TicTacToe.turn = TicTacToe.turn*-1;
}
}
});
}
}
|
static func error_stdlib_module_name(
moduleName: String,
explicitModuleName: Bool
) -> Diagnostic.Message {
let suffix: String
if explicitModuleName {
suffix = " is not a valid identifier"
} else {
suffix = " is missing and not a valid identifier"
}
return .error("module name \"\(moduleName)\"\(suffix)")
} |
CREATE TABLE users (
id SERIAL PRIMARY KEY,
username VARCHAR(255) NOT NULL,
email VARCHAR(255) NOT NULL,
password VARCHAR(255) NOT NULL
);
CREATE TABLE articles (
id SERIAL PRIMARY KEY,
title VARCHAR(255) NOT NULL,
body TEXT NOT NULL,
user_id INTEGER NOT NULL REFERENCES users(id)
);
CREATE TABLE comments (
id SERIAL PRIMARY KEY,
commenter VARCHAR(255) NOT NULL,
body TEXT NOT NULL,
article_id INTEGER NOT NULL REFERENCES articles(id)
); |
set -e
# Run linter
find . -type f -name "*.js" -not -path "*node_modules*" \
| xargs eslint
|
<reponame>CloudsArk/loganalyzer
/*
Copyright © 2020 NAME HERE <EMAIL ADDRESS>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package cmd
import (
"github.com/cloudsark/loganalyzer/logalizer"
"github.com/spf13/cobra"
)
// agentCmd represents the agent command
var agentCmd = &cobra.Command{
Use: "agent",
Short: "Print top 10 agents",
Long: `The User-Agent request header is a characteristic string that lets servers and network peers identify
the application, operating system, vendor, and/or version of the requesting user agent.`,
Run: func(cmd *cobra.Command, args []string) {
logalizer.TopAgentCmd(&filename)
},
}
func init() {
topCmd.AddCommand(agentCmd)
}
|
#!/bin/bash
# In the name of God, the Compassionate, the Merciful
# Manix (c) 2021 Mani Jamali; Freedom at all
filename=003-calculator.c
cd ..
cp -i examples/$filename kernel.c
sh build.sh |
## Create an EC2 Instance / VM
ec2_create() {
echo 'hi'
}
list_ec2_instances() {
aws ec2 describe-instances --query 'Reservations[*].Instances[*]. { IP:PublicIpAddress, key_name:KeyName, ebs_volume:BlockDeviceMappings[0].Ebs.VolumeId, sgs: SecurityGroups[*].GroupName}'
}
|
import {
requiredData,
textValidate,
numValidate,
textNumberValidate,
rfcValidate,
rfcValidateClave,
emailValidate,
commonValidate,
comboValidate,
radioButtonValidate,
dateValidateFormat,
validationCheckBox,
validationDateRFC,
textAreSpecialValidate,
specialCharacterInValidate } from './functions.js'
export const SWITCH_VALIDATIONS = data => {
let { value, type, varError, expRegular, message, id, valueDate, titleRFC,
titleDate, varErrorDate, focusDate, title } = data;
let result = { status: false, error: "error" };
switch ( type ) {
case "R": // Requerido
result = requiredData( value, varError, id, title );
break;
case "T": // Texto
result = textValidate( value, varError, id, title )
break;
case "N": // Numero
result = numValidate( value, varError, id, title );
break;
case "TN": // Texto y Numero
result = textNumberValidate( value, varError, id, title );
break;
case "RFC": // RFC
result = rfcValidate( value, varError, id, title, title );
break;
case "RFC_CLAVE": // RFC sin cave
result = rfcValidateClave( value, varError, id, title );
break;
case "RFC_DATE": // RFC VS Fecha de nacimiento
result = validationDateRFC( value, valueDate, titleRFC, titleDate, varError, varErrorDate, id, focusDate );
break;
case "EMAIL": // Email
result = emailValidate( value, varError, id, title );
break;
case "COMMON": // Comun
result = commonValidate( value, expRegular, varError, message, id, title );
break;
case "C": // Combos
result = comboValidate( value, varError, id, title );
break;
case "RB": // Redio Buntons
result = radioButtonValidate( value, varError, id, title );
break;
case "DATE": // Formato de fecha
result = dateValidateFormat( value, varError, id, title );
break;
case "CHECK": // Checkbox
result = validationCheckBox( value, varError, id, title );
break;
case "TEXT_AREA_SPECIAL": // Carater especial multilinea
result = textAreSpecialValidate( value, varError, id, title );
break;
case "SPECIAL_CHARACTER": // Carater especial
result = specialCharacterInValidate( value, varError, id, title );
break;
default:
console.log("El tipo de validación no existe :(", type)
break;
}
return result;
}
|
# vim: tabstop=4:softtabstop=4:expandtab:shiftwidth=4
# $Id$
#
# Test that the environment of a crashed process is read correctly
# from core dumps
#
function test_core_env()
{
#test requires python plugin
if [ -f "../../plugin/zpython.so" ]
then
:
else
return 0
fi
echo ----- ${FUNCNAME}${1:-$debug} ----- >> $logfile
rm -rf core.*
cat > foo.cpp << '---end---'
// foo.cpp
void crash_me()
{
*(int*)0 = 1;
}
int main()
{
crash_me();
}
---end---
cat > script << '---end---'
call ( where )
call ( quit )
---end---
cat > auto.py << '---end---'
import zero
def on_process(process, thread):
print '-----',process.name(),'-----'
env=process.environ()
for name in env:
print name,'=',env[name]
assert env['FOOZY'] == 'bear'
assert env['WOOZY'] == 'was=he'
---end---
#compile
rm -f a.out core*
build ${1:-$debug} foo.cpp
ulimit -c unlimited
LS_COLORS= FOOZY=bear WOOZY="was=he" ./a.out foozy
mv core* core 2>/dev/null || mv -f a.out.core core 2>/dev/null || true
rm -f $config
run_debugger core --py-exec=auto.py
}
################################################################
# Run this test standalone
################################################################
function run()
{
source common.sh
if [ "$compiler" = "icc" ]
then
test_core_env -g
else
test_core_env -gdwarf-2 $@
test_core_env -gstabs+ $@
test_core_env -gstabs $@
fi
}
source suffix.sh
|
function removeArrayPart(inputArray, l, r) {
return [...inputArray.slice(0, l), ...inputArray.slice(r + 1)];
}
console.log(removeArrayPart([2, 3, 2, 3, 4, 5], 2, 4));
|
define(["require", "exports", '../operator/merge'], function (require, exports, merge_1) {
"use strict";
exports.merge = merge_1.mergeStatic;
});
//# sourceMappingURL=merge.js.map |
const mongoose = require('mongoose');
const Schema = mongoose.Schema;
const playerMissionSchema = new Schema({
player: {
type: Schema.ObjectID,
ref: "Players"
},
mission: {
type: Schema.ObjectID,
ref: "Missions"
},
status: {
type: String,
enum: ['done', 'backlog', 'in_progress'],
default: 'backlog'
}
});
module.exports = new mongoose.model('PlayerMission', playerMissionSchema); |
class Student:
def __init__(self, name, age, course):
self.name = name
self.age = age
self.course = course
def get_name(self):
return self.name
def get_age(self):
return self.age
def get_course(self):
return self.course |
import React from 'react';
import {TextInput, View, Text} from 'react-native';
const Input = ({
label,
value,
onChangeText,
placeholder,
secureTextEntr,
keyboardType,
style,
onBlur,
editable,
}) => {
const { labelStyle, inputStyle, containerStyle, } = styles;
return (
<View style={{...containerStyle, ...style}}>
<TextInput
style={inputStyle}
onChangeText={onChangeText}
onBlur={onBlur}
value={value.toString()}
placeholder={placeholder}
keyboardType={keyboardType}
editable={editable}
/>
</View>
)
};
const styles = {
inputStyle: {
flex:1,
fontSize: 22,
marginLeft: 5,
marginRight: 5,
fontFamily: 'Montserrat-Regular',
},
containerStyle: {
backgroundColor: 'rgba(255,255,255,0.8)',
borderRadius: 5,
margin: 3,
}
}
export {Input}; |
<gh_stars>1-10
/*
* (C) Copyright 2017-2018, by <NAME> and Contributors.
*
* JGraphT : a free Java graph-theory library
*
* This program and the accompanying materials are dual-licensed under
* either
*
* (a) the terms of the GNU Lesser General Public License version 2.1
* as published by the Free Software Foundation, or (at your option) any
* later version.
*
* or (per the licensee's choosing)
*
* (b) the terms of the Eclipse Public License v1.0 as published by
* the Eclipse Foundation.
*/
package org.jgrapht.alg.tour;
import org.jgrapht.Graph;
import org.jgrapht.GraphPath;
import org.jgrapht.alg.interfaces.TSPAlgorithm;
import org.jgrapht.graph.GraphWalk;
import java.util.*;
/**
* A dynamic programming algorithm for the TSP problem.
*
* <p>
* The travelling salesman problem (TSP) asks the following question: "Given a list of cities and
* the distances between each pair of cities, what is the shortest possible route that visits each
* city exactly once and returns to the origin city?".
*
* <p>
* This is an implementation of the Held-Karp algorithm which returns a optimal, minimum-cost Hamiltonian tour.
* The implementation requires the input graph to contain at least one vertex.
* The running time is $O(2^{|V|} \times |V|^2)$ and it takes $O(2^{|V|} \times |V|)$ extra memory.
*
* <p>
* See <a href="https://en.wikipedia.org/wiki/Travelling_salesman_problem">wikipedia</a> for more
* details about TSP.
*
* <p>
* See <a href="https://en.wikipedia.org/wiki/Held%E2%80%93Karp_algorithm">wikipedia</a> for more
* details about the dynamic programming algorithm.
*
* @param <V> the graph vertex type
* @param <E> the graph edge type
*
* @author <NAME>
*/
public class HeldKarpTSP<V, E>
implements TSPAlgorithm<V, E> {
/**
* Construct a new instance
*/
public HeldKarpTSP() {
}
private double memo(int previousNode, int state, double[][] C, double[][] W){
// have we seen this state before?
if (C[previousNode][state] != Double.MIN_VALUE)
return C[previousNode][state];
// no cycle has been found yet
double totalCost = Double.MAX_VALUE;
// check if all nodes have been visited (i.e. state + 1 == 2^n)
if (state == (1 << W.length) - 1){
// check if there is a return edge we can use
if (W[previousNode][0] != Double.MAX_VALUE)
totalCost = W[previousNode][0];
else
totalCost = Double.MAX_VALUE;
}
else{
// try to find the 'best' next (i.e. unvisited and adjacent to previousNode) node in the tour
for (int i = 0; i < W.length; i++) {
if (((state >> i) & 1) == 0 && W[previousNode][i] != Double.MAX_VALUE){
totalCost = Math.min(totalCost, W[previousNode][i] + memo(i, state ^ (1 << i), C, W));
}
}
}
return C[previousNode][state] = totalCost;
}
/**
* Computes a minimum-cost Hamiltonian tour.
*
* @param graph the input graph
* @return a minimum-cost tour if one exists, null otherwise
* @throws IllegalArgumentException if the graph contains no vertices
* @throws IllegalArgumentException if the graph contains more than 31 vertices
*/
@Override
public GraphPath<V, E> getTour(Graph<V, E> graph) {
final int n = graph.vertexSet().size(); // number of nodes
if (n == 0) {
throw new IllegalArgumentException("Graph contains no vertices");
}
if (n > 31){
throw new IllegalArgumentException("The internal representation of the dynamic programming state " +
"space cannot represent graphs containing more than 31 vertices. " +
"The runtime complexity of this implementation, O(2^|V| x |V|^2), makes it unsuitable " +
"for graphs with more than 31 vertices.");
}
if (n == 1){
V startNode = graph.vertexSet().iterator().next();
return new GraphWalk<>(graph, startNode, startNode, Collections.singletonList(startNode), null, 0);
}
// W[u, v] = the cost of the minimum weight between u and v
double[][] W = new double[n][n];
for (int i = 0; i < n; i++) {
Arrays.fill(W[i], Double.MAX_VALUE);
}
/*
* Normalize the graph
* map each vertex to an integer (using a HashMap)
* keep the reverse mapping (using an ArrayList)
*/
Map<V, Integer> vertexMap = new HashMap<>();
List<V> indexList = new ArrayList<>();
for (E e: graph.edgeSet()){
V source = graph.getEdgeSource(e);
V target = graph.getEdgeTarget(e);
// map 'source' if no mapping exists
if (!vertexMap.containsKey(source)){
vertexMap.put(source, vertexMap.size());
indexList.add(source);
}
// map 'target' if no mapping exists
if (!vertexMap.containsKey(target)){
vertexMap.put(target, vertexMap.size());
indexList.add(target);
}
int u = vertexMap.get(source);
int v = vertexMap.get(target);
// use Math.min in case we deal with a multigraph
W[u][v] = Math.min(W[u][v], graph.getEdgeWeight(e));
// If the graph is undirected we need to also consider the reverse edge
if (graph.getType().isUndirected())
W[v][u] = Math.min(W[v][u], graph.getEdgeWeight(e));
}
// C[prevNode, state] = the minimum cost of a tour that ends in prevNode and contains all
// nodes in the bitmask state
double[][] C = new double[n][1 << n];
for (int i = 0; i < n; i++) {
Arrays.fill(C[i], Double.MIN_VALUE);
}
// start the tour from node 0 (because the tour is a cycle the start vertex does not matter)
double tourWeight = memo(0, 1, C, W);
// check if there is no tour
if (tourWeight == Double.MAX_VALUE)
return null;
/*
* Reconstruct the tour
*/
List<V> vertexList = new ArrayList<>(n);
List<E> edgeList = new ArrayList<>(n);
int lastNode = 0;
int lastState = 1;
vertexList.add(indexList.get(lastNode));
for (int step = 1; step < n; step++) {
int nextNode = -1;
for (int node = 0; node < n; node++) {
if (C[node][lastState ^ (1 << node)] + W[lastNode][node] == C[lastNode][lastState]){
nextNode = node;
break;
}
}
assert nextNode != -1;
vertexList.add(indexList.get(nextNode));
edgeList.add(graph.getEdge(indexList.get(lastNode), indexList.get(nextNode)));
lastState ^= 1 << nextNode;
lastNode = nextNode;
}
// add start vertex
vertexList.add(indexList.get(0));
edgeList.add(graph.getEdge(indexList.get(lastNode), indexList.get(0)));
return new GraphWalk<>(graph, indexList.get(0), indexList.get(0), vertexList, edgeList, tourWeight);
}
}
|
from sklearn.neighbors import KNeighborsClassifier
# Load the data
from sklearn.datasets import fetch_openml
X, y = fetch_openml('mnist_784', version=1, return_X_y=True)
# Create the model and train it
model = KNeighborsClassifier(n_neighbors=3)
model.fit(X, y)
# Test the model
print(model.score(X, y)) |
Boolean();
|
import React from 'react'
import RaisedButton from 'material-ui/RaisedButton'
import { Card, CardMedia, CardTitle } from 'material-ui/Card'
import Checkbox from 'material-ui/Checkbox'
import Snackbar from 'material-ui/Snackbar'
import DropDownMenu from 'material-ui/DropDownMenu'
import MenuItem from 'material-ui/MenuItem'
import hljs from 'highlight.js'
import TextField from 'material-ui/TextField'
import vs from 'highlight.js/styles/vs.css'
import Clipboard from 'clipboard'
import ReactGA from 'react-ga'
import jsBeautify from 'js-beautify'
import config from './config'
import './HomeView.scss'
const items = []
config.languageItemsValue.forEach((val) => {
items.push(<MenuItem value={val[0]} key={val[1]} primaryText={`${val[2]}`} />)
})
const templateItems = []
config.templateItemsValue.forEach((val) => {
templateItems.push(<MenuItem value={val[1]} key={val[0]} primaryText={`${val[0]}`} />)
})
export default class DropDownMenuLongMenuExample extends React.Component {
constructor (props) {
super(props)
this.state = {
selectedLanguage: 'autodetect',
selectedTemplate: 'vs',
sourceCode: '',
destCode: '',
disabledClipboard: true,
open: false,
autoFormat: true,
disabledAutoFormat: true
}
hljs.initHighlightingOnLoad()
}
handleChangeLanguage = (event, index, value) => {
this.setState({ disabledAutoFormat: true, selectedLanguage: value })
for (let format of config.formatLibrary) {
if (format[0] === value) {
this.setState({ disabledAutoFormat: false, selectedLanguage: value })
break
}
}
ReactGA.event({
category: 'selectedLanguage',
action: value
})
}
handleChangeAutoFormat = (event, isInputChecked) => {
this.setState({ autoFormat: isInputChecked })
ReactGA.event({
category: 'autoFormat',
action: isInputChecked.toString()
})
}
handleChangeTemplate = (event, index, value) => {
this.setState({ selectedTemplate: value })
let url = 'https://cdnjs.cloudflare.com/ajax/libs/highlight.js/9.7.0/styles/' + value + '.min.css'
if (document.createStyleSheet) {
document.createStyleSheet(url)
} else {
var styles = "@import url('" + url + "')"
var newSS = document.createElement('link')
newSS.rel = 'stylesheet'
newSS.href = 'data:text/css,' + escape(styles)
document.getElementsByTagName('head')[0].appendChild(newSS)
}
ReactGA.event({
category: 'selectedTemplate',
action: value
})
}
handleRequestClose = () => {
this.setState({
open: false
})
};
onSourceChange = (ev) => {
this.setState({ sourceCode: ev.target.value })
}
keydownHandler (e) {
if (e.keyCode === 13 && e.ctrlKey) { // Ctrl+Enter
this.onSourceClick(null)
}
}
componentDidMount () {
document.addEventListener('keydown', this.keydownHandler.bind(this))
}
componentWillUnmount () {
document.removeEventListener('keydown', this.keydownHandler.bind(this))
}
onSourceClick = (ev) => {
this.setState({ destCode: this.state.sourceCode, disabledClipboard: false })
if (this.state.autoFormat) {
for (let format of config.formatLibrary) { // Check the format library
if (format[0] === this.state.selectedLanguage) {
if (format[1] === 'js-beautify-js') {
this.setState({
destCode: jsBeautify.js_beautify(this.state.sourceCode, { indent_size: 4 }),
disabledClipboard: false
})
break
} else if (format[1] === 'js-beautify-html') {
this.setState({
destCode: jsBeautify.html_beautify(this.state.sourceCode, { indent_size: 4 }),
disabledClipboard: false
})
break
} else if (format[1] === 'js-beautify-css') {
this.setState({
destCode: jsBeautify.css_beautify(this.state.sourceCode, { indent_size: 4 }),
disabledClipboard: false
})
break
}
}
}
}
var codepreview = document.querySelector('#codepreview')
codepreview.style.display = 'block'
var clipboard = new Clipboard('#copytoclipboard')
let $this = this
clipboard.on('success', function (e) {
$this.setState({
open: true
})
e.clearSelection()
ReactGA.event({
category: 'copyToClipboard',
action: 'copyToClipboard'
})
})
ReactGA.event({
category: 'highLight',
action: 'highLight'
})
var code = document.querySelector('#code') // remove old class first.
code.removeAttribute('class')
code.className = 'hljs'
// set language class to code
if (this.state.selectedLanguage !== 'auto' && this.state.selectedLanguage !== 'autodetect') {
code.classList.add(this.state.selectedLanguage)
}
setTimeout(() => {
var code = document.querySelector('#code')
hljs.highlightBlock(code)
}, 300)
};
render () {
return (
<div>
<Card id='titlecard' style={{ padding:'0px' }}>
<CardMedia id='titlecardmedia'
overlay={<CardTitle title='Online syntax highlight'
subtitle='Source code beautifier for the coder!'
style={{ paddingTop:null }} />}
style={{ paddingTop:null }}
>
<img src='code.jpg' style={{ maxHeight:'200px', minHeight:'80px' }} />
</CardMedia>
</Card>
<div style={{ float: 'left', width: '100%' }}>
<div style={{ float: 'left', minWidth:'220px' }}>
Language:<DropDownMenu maxHeight={300} value={this.state.selectedLanguage}
onChange={this.handleChangeLanguage}>
{items}
</DropDownMenu>
</div>
<div style={{ float: 'left', minWidth:'220px' }}>
Template: <DropDownMenu maxHeight={300} value={this.state.selectedTemplate}
onChange={this.handleChangeTemplate}>
{templateItems}
</DropDownMenu>
</div>
<div style={{ float: 'left', minWidth: '220px', margin:'auto auto' }}>
<Checkbox
label='Auto Format (JavaScript, JSON, CSS, HTML, XML)'
checked={this.state.autoFormat}
disabled={this.state.disabledAutoFormat}
labelStyle={{ width:'100%' }}
style={{ paddingTop:'12px' }}
onCheck={this.handleChangeAutoFormat}
/>
</div>
</div>
<div
style={{ width: 100 + '%', float: 'left', 'minWidth': '320px' }}>
<TextField
hintText=''
floatingLabelStyle={{ color: 'rgb(0,188,212)' }}
floatingLabelText='Source code:'
multiLine
rows={5}
rowsMax={10}
onChange={this.onSourceChange}
fullWidth
/>
<br />
<div style={{ margin: 'auto auto', 'textAlign': 'center' }}>
<RaisedButton label='Highlight' primary
onClick={this.onSourceClick} style={{ 'textAlign': 'center' }} />
<RaisedButton label='Copy to clipboard' id='copytoclipboard' secondary
style={{ 'textAlign': 'center', 'marginLeft': '10px' }} disabled={this.state.disabledClipboard}
data-clipboard-target='#code' />
</div>
</div>
<div id='codepreview'
style={{ width: 100 + '%', paddingTop:'10px', float: 'left', 'minWidth': '320px', display: 'none' }}>
Highlighted code: You can copy the code into Microsoft Word or Email:
<pre><code id='code' className={vs}>{this.state.destCode}
</code></pre>
</div>
<Snackbar
open={this.state.open}
message='Copied!'
autoHideDuration={2000}
onRequestClose={this.handleRequestClose}
/>
</div>
)
}
}
|
import { defaultSettings } from '../../src/config/defaultSettings'
import { prefixTokenName } from '../../src/utilities/prefixTokenName'
describe('prefixTokenName', () => {
test('token with alias', () => {
expect(prefixTokenName([
// @ts-ignore
{
name: 'token/withAlias/red',
category: 'color',
values: '#000000',
extensions: {
'org.lukasoppermann.figmaDesignTokens': {
exportKey: 'color',
styleId: 31,
alias: 'colors.red'
}
}
}
], {
...defaultSettings,
...{ keyInName: true }
})).toStrictEqual([{
name: 'color/token/withAlias/red',
category: 'color',
values: '#000000',
extensions: {
'org.lukasoppermann.figmaDesignTokens': {
exportKey: 'color',
styleId: 31,
alias: 'color.token.colors.red'
}
}
}])
})
test('token no prefix', () => {
expect(prefixTokenName([
// @ts-ignore
{
name: 'token/full',
category: 'color',
values: '#000000'
}
], {
...defaultSettings,
...{ prefixInName: false }
})).toStrictEqual([{
name: 'full',
category: 'color',
values: '#000000'
}])
})
test('no tokens', () => {
expect(prefixTokenName([], defaultSettings)).toStrictEqual([])
})
})
|
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global.Rotate24 = factory());
}(this, (function () { 'use strict';
var _24 = {
elem: 'svg',
attrs: {
xmlns: 'http://www.w3.org/2000/svg',
viewBox: '0 0 32 32',
width: 24,
height: 24,
},
content: [
{
elem: 'path',
attrs: {
d:
'M17.91 26.82l.35 2a12.9 12.9 0 0 0 4.24-1.54l-1-1.73a10.88 10.88 0 0 1-3.59 1.27zm6.51-3.75L26 24.35a13 13 0 0 0 2.24-3.91l-1.87-.68a11 11 0 0 1-1.95 3.31zM9.5 27.25a12.9 12.9 0 0 0 4.24 1.54l.35-2a10.88 10.88 0 0 1-3.59-1.3zm-3.83-7.49l-1.87.68A13 13 0 0 0 6 24.35l.32-.26 1.22-1a11 11 0 0 1-1.91-3.31zM29 16a12.85 12.85 0 0 0-.8-4.44l-1.87.68A11.18 11.18 0 0 1 27 16zm-3-8.35a13 13 0 0 0-20 0V4H4v8h8v-2H6.81a11 11 0 0 1 17.61-1.07z',
},
},
],
name: 'rotate',
size: 24,
};
return _24;
})));
|
<filename>modules/__tests__/TestSequences/execSteps.js
export default function execSteps(steps, history, done) {
let index = 0,
unlisten,
cleanedUp = false;
const cleanup = (...args) => {
if (!cleanedUp) {
cleanedUp = true;
unlisten();
done(...args);
}
};
const execNextStep = (...args) => {
try {
const nextStep = steps[index++];
if (!nextStep) throw new Error("Test is missing step " + index);
nextStep(...args);
if (index === steps.length) cleanup();
} catch (error) {
cleanup(error);
}
};
if (steps.length) {
unlisten = history.listen(execNextStep);
execNextStep(history.location);
} else {
done();
}
}
|
import sqlite3
# Connect to database
conn = sqlite3.connect("mydatabase.db")
cur = conn.cursor()
# Create table
sql = """CREATE TABLE Table1 (
column1 INTEGER,
column2 TEXT,
column3 TEXT
);"""
cur.execute(sql)
conn.commit()
conn.close() |
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.group.ui.lifecycle;
import java.util.List;
import org.olat.basesecurity.GroupRoles;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.components.form.flexible.FormItemContainer;
import org.olat.core.gui.components.form.flexible.elements.SingleSelection;
import org.olat.core.gui.components.form.flexible.impl.FormBasicController;
import org.olat.core.gui.components.form.flexible.impl.FormLayoutContainer;
import org.olat.core.gui.components.util.SelectionValues;
import org.olat.core.gui.components.util.SelectionValues.SelectionValue;
import org.olat.core.gui.control.Controller;
import org.olat.core.gui.control.Event;
import org.olat.core.gui.control.WindowControl;
import org.olat.core.util.Util;
import org.olat.group.BusinessGroup;
import org.olat.group.BusinessGroupLifecycleManager;
import org.olat.group.BusinessGroupService;
import org.olat.group.BusinessGroupStatusEnum;
import org.olat.group.ui.main.BusinessGroupListController;
import org.springframework.beans.factory.annotation.Autowired;
/**
*
* Initial date: 1 déc. 2021<br>
* @author srosse, <EMAIL>, http://www.frentix.com
*
*/
public class ConfirmRestoreController extends FormBasicController {
private SingleSelection activationEl;
private final List<BusinessGroup> businessGroups;
@Autowired
private BusinessGroupService businessGroupService;
@Autowired
private BusinessGroupLifecycleManager businessGroupLifecycleManager;
public ConfirmRestoreController(UserRequest ureq, WindowControl wControl, List<BusinessGroup> businessGroups) {
super(ureq, wControl, Util.createPackageTranslator(BusinessGroupListController.class, ureq.getLocale()));
this.businessGroups = businessGroups;
initForm(ureq);
}
@Override
protected void initForm(FormItemContainer formLayout, Controller listener, UserRequest ureq) {
SelectionValues modeValues = new SelectionValues();
modeValues.add(new SelectionValue("inactive", translate("restore.to.inactive.label"), translate("restore.to.inactive.desc")));
modeValues.add(new SelectionValue("active", translate("restore.to.active.label"), translate("restore.to.active.desc")));
activationEl = uifactory.addCardSingleSelectHorizontal("restore.mode", formLayout, modeValues.keys(), modeValues.values(), modeValues.descriptions(), null);
FormLayoutContainer buttonsCont = FormLayoutContainer.createButtonLayout("buttons", getTranslator());
formLayout.add(buttonsCont);
uifactory.addFormCancelButton("cancel", buttonsCont, ureq, getWindowControl());
uifactory.addFormSubmitButton("restore", buttonsCont);
}
@Override
protected boolean validateFormLogic(UserRequest ureq) {
boolean allOk = super.validateFormLogic(ureq);
activationEl.clearError();
if(!activationEl.isOneSelected()) {
activationEl.setErrorKey("form.legende.mandatory", null);
allOk &= false;
}
return allOk;
}
@Override
protected void formCancelled(UserRequest ureq) {
fireEvent(ureq, Event.CANCELLED_EVENT);
}
@Override
protected void formOK(UserRequest ureq) {
boolean activate = "active".equals(activationEl.getSelectedKey());
for(BusinessGroup group:businessGroups) {
boolean asOwner = businessGroupService.hasRoles(getIdentity(), group, GroupRoles.coach.name());
if(activate) {
businessGroupLifecycleManager.reactivateBusinessGroup(group, getIdentity(), asOwner);
} else {
businessGroupLifecycleManager.changeBusinessGroupStatus(group, BusinessGroupStatusEnum.inactive, getIdentity(), asOwner);
}
}
if(businessGroups.size() == 1) {
showInfo("group.restored");
} else {
showInfo("groups.restored", new String[] { Integer.toString(businessGroups.size()) });
}
fireEvent(ureq, Event.DONE_EVENT);
}
}
|
package com.globalcollect.gateway.sdk.java;
import java.util.List;
import com.globalcollect.gateway.sdk.java.gc.errors.definitions.APIError;
/**
* Represents an error response from a create payment, payout or refund call.
*/
@SuppressWarnings("serial")
public abstract class GcDeclinedTransactionException extends GcApiException {
public GcDeclinedTransactionException(int statusCode, String responseBody, String errorId, List<APIError> errors) {
super(statusCode, responseBody, errorId, errors);
}
public GcDeclinedTransactionException(String message, int statusCode, String responseBody, String errorId, List<APIError> errors) {
super(message, statusCode, responseBody, errorId, errors);
}
}
|
sudo rm /etc/nginx/sites-enabled/default
sudo ln -sf /home/box/web/etc/nginx.conf /etc/nginx/sites-enabled/default
echo Configuration done
sudo /etc/init.d/nginx restart
sudo ln -sf /home/box/web/etc/gunicorn.conf /etc/gunicorn.d/test_wsgi
sudo ln -sf /home/box/web/etc/gunicorn-django.conf /etc/gunicorn.d/test_django
echo Unicorn done
sudo /etc/init.d/gunicorn restart
echo setting up mysql
sudo /etc/init.d/mysql start
sudo mysql -u root -e "create database ask" |
<reponame>tmontes/python-wires
# ----------------------------------------------------------------------------
# Python Wires
# ----------------------------------------------------------------------------
# Copyright (c) <NAME>.
# See LICENSE for details.
# ----------------------------------------------------------------------------
"""
Python Wires
"""
from __future__ import absolute_import
__version__ = '19.2.0'
__title__ = 'wires'
__description__ = 'Python Wires'
__license__ = 'MIT'
__uri__ = 'https://github.com/tmontes/python-wires/'
__author__ = '<NAME>'
__email__ = '<EMAIL>'
from . _wires import Wires
from . _shared import w
__all__ = ['Wires', 'w']
# ----------------------------------------------------------------------------
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.