text
stringlengths
1
1.05M
<filename>src/utils.js<gh_stars>0 import { connect, Contract, keyStores, WalletConnection } from 'near-api-js' import getConfig from './config' const nearConfig = getConfig('testnet') //const nearConfig = getConfig(process.env.NODE_ENV || 'development') console.log(nearConfig) // Initialize contract & set global variables export async function initContract() { // Initialize connection to the NEAR testnet const near = await connect(Object.assign({ deps: { keyStore: new keyStores.BrowserLocalStorageKeyStore() } }, nearConfig)) // Initializing Wallet based Account. It can work with NEAR testnet wallet that // is hosted at https://wallet.testnet.near.org window.walletConnection = new WalletConnection(near) // Getting the Account ID. If still unauthorized, it's just empty string window.accountId = window.walletConnection.getAccountId() // Initializing our contract APIs by contract name and configuration window.contract = await new Contract(window.walletConnection.account(), nearConfig.contractName, { // View methods are read only. They don't modify the state, but usually return some value. viewMethods: [ 'get_caviar', 'get_caviar_vault', 'get_nemo', 'has_locker', 'get_dori', 'get_captain', 'get_ariel' ], // Change methods can modify the state. But you don't receive the returned value when called. changeMethods: [ 'init_locker', 'get_random', 'harvest_fish', 'stake_caviar', 'unstake_caviar', 'harvest_stake', 'swap_caviar_to_nemo', 'swap_nemo_to_dori', 'swap_dori_to_captain', 'swap_captain_to_ariel' ], }) } export function logout() { window.walletConnection.signOut() // reload page window.location.replace(window.location.origin + window.location.pathname) } export function login() { // Allow the current app to make calls to the specified contract on the // user's behalf. // This works by creating a new access key for the user's account and storing // the private key in localStorage. window.walletConnection.requestSignIn(nearConfig.contractName) }
#!/bin/bash python3 app.py export GIT_SSH_COMMAND="ssh -i `pwd`/.ssh/id_rsa" cp ${HOME}/growlab/app/html/* ${HOME}/growlab/docs/ git add .. git commit -s -m "Update images at `date`" git pull origin master --rebase git push origin master
#!/usr/bin/env bats @test "git binary found in PATH" { run which git [ "$status" -eq 0 ] }
import threading _SparseOperationKitEmbeddingLayerStoreKey = "SparseOperationKitEmbeddingLayerStore" class _EmbeddingLayerStore(threading.local): def __init__(self): super(_EmbeddingLayerStore, self).__init__() self._embedding_layer_container = dict() def _create_embedding(self, name, constructor, **kwargs): if constructor is None: raise ValueError("embedding_layer: '{}' does not exist and " "cannot create it with constructor: " "{}".format(name, constructor)) def _get_embedding(self, name): return self._embedding_layer_container.get(name, None) def test_embedding_store(): def create_and_get_embedding(store, name, constructor, expected_result): store._create_embedding(name, constructor) result = store._get_embedding(name) assert result == expected_result, f"Expected: {expected_result}, Got: {result}" def thread_worker(store, name, constructor, expected_result): create_and_get_embedding(store, name, constructor, expected_result) store = _EmbeddingLayerStore() # Create threads to test thread-local storage threads = [] for i in range(5): name = f"embedding_{i}" constructor = f"constructor_{i}" expected_result = (name, constructor) t = threading.Thread(target=thread_worker, args=(store, name, constructor, expected_result)) threads.append(t) t.start() # Wait for all threads to complete for t in threads: t.join() test_embedding_store()
<gh_stars>0 package org.hiro; import org.hiro.character.Player; import org.hiro.input.InputDevice; import org.hiro.input.KeyboardDevice; import org.hiro.map.Dungeon; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; public class Game { private static final Game instance = new Game(); private boolean goal; private Map<String, Dungeon> dungeons; private List<InputDevice> inputDevices; private Player player; /* * Options */ /** * Follow passages (Option) 通路の角で止まらない */ private boolean passgo; private Game() { this.goal = false; this.passgo = false; this.dungeons = new HashMap<>(); this.inputDevices = new ArrayList<>(); inputDevices.add(new KeyboardDevice()); } public static Game getInstance() { return instance; } public void setOptions(Map<String, Object> options) { } public boolean isGoal() { return this.goal; } public void setGoal(boolean b) { this.goal = b; } public void addDungeons(Dungeon d) { this.dungeons.put("", d); } public void setPlayer(Player player) { this.player = player; } public boolean move(String dungeonName, Player p) { if (!this.dungeons.containsKey(dungeonName)) { return false; } this.dungeons.get(dungeonName).setPlayer(p); return true; } }
# Copyright 2016 OCLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require 'spec_helper' I18n.locale = :en I18n.default_locale = :en I18n.load_path << Dir[File.join(File.expand_path(File.expand_path(File.dirname(__FILE__)) + '/../config/locales'), '*.yml')] I18n.load_path.flatten! describe "the home page" do describe "before logging in" do before do config = YAML::load(File.read("#{File.expand_path(File.dirname(__FILE__))}/../../config/lbmc.yml")) $app_url = config[$environment]['app_url'] $base_url = config[$environment]['base_url'] $institutions = config[$environment]['institutions'] get '/' @doc = Nokogiri::HTML(last_response.body) end it "should redirect to login" do get '/' @oauth_url = 'https://authn.sd00.worldcat.org/oauth2/authorizeCode?authenticatingInstitutionId=128807&client_id=' + WSKEY.key + '&contextInstitutionId=128807' @oauth_url += '&redirect_uri=' + Rack::Utils.escape($app_url + '/catch_auth_code') + '&response_type=code&scope=WorldCatMetadataAPI' expect(last_response).to be_redirect expect(last_response.location).to eql(@oauth_url) end end describe "after logging in when an access token is present in the user session" do before do access_token = OCLC::Auth::AccessToken.new('grant_type', ['FauxService'], 128807, 128807) access_token.value = 'tk_faux_token' access_token.expires_at = DateTime.parse("9999-01-01 00:00:00Z") get '/', params={}, rack_env={ 'rack.session' => {:token => access_token, :registry_id => 128807} } @doc = Nokogiri::HTML(last_response.body) end it "should welcome the user" do xpath = "//h3[text()='Welcome!']" expect(@doc.xpath(xpath)).not_to be_empty end it "should have a link to create a new record" do xpath = "//a[@id='new-record']" expect(@doc.xpath(xpath).size).to eq(1) end it "should have a link to the home page" do xpath = "//a[@id='home']" expect(@doc.xpath(xpath).size).to eq(1) end it "should have a link to create a new record" do xpath = "//a[@id='create']" expect(@doc.xpath(xpath).size).to eq(1) end it "should have a link to logoff" do xpath = "//a[@id='logoff']" expect(@doc.xpath(xpath).size).to eq(1) end it "should have hints for new users" do xpath = "//span[text()='Hints for new users']" expect(@doc.xpath(xpath)).not_to be_empty end end end
<filename>pkg/library/service.go package library import ( "fmt" "github.com/liampulles/banger/pkg/file" ) type Service interface { PipeAllTracks() ([]Track, error) } type ServiceImpl struct { rootPath string } var _ Service = &ServiceImpl{} func NewService(rootPath string) *ServiceImpl { return &ServiceImpl{ rootPath: rootPath, } } func (s *ServiceImpl) PipeAllTracks() ([]Track, error) { paths, err := file.FindAllFilesRecursively(s.rootPath) if err != nil { return nil, fmt.Errorf("could not pipe tracks - find paths error: %w", err) } var tracks []Track for _, path := range paths { track, err := NewTagTrack(path) if err != nil { continue } tracks = append(tracks, track) } return tracks, nil }
<reponame>ritaswc/wechat_app_template function setOnShowScene(t) { getApp().onShowData || (getApp().onShowData = {}), getApp().onShowData.scene = t; } Page({ data: { list: "" }, onLoad: function(t) { getApp().page.onLoad(this, t); var e = this; e.setData({ my: "undefined" != typeof my }), getApp().core.showLoading({ title: "加载中" }), getApp().request({ url: getApp().api.user.member, method: "POST", success: function(t) { getApp().core.hideLoading(), 0 == t.code && (e.setData(t.data), e.setData({ current_key: 0 }), t.data.list && e.setData({ buy_price: t.data.list[0].price })); } }); }, showDialogBtn: function() { this.setData({ showModal: !0 }); }, preventTouchMove: function() {}, hideModal: function() { this.setData({ showModal: !1 }); }, onCancel: function() { this.hideModal(); }, pay: function(t) { var e = t.currentTarget.dataset.key, a = this.data.list[e].id, n = t.currentTarget.dataset.payment; this.hideModal(), getApp().request({ url: getApp().api.user.submit_member, data: { level_id: a, pay_type: n }, method: "POST", success: function(t) { if (0 == t.code) { if (setTimeout(function() { getApp().core.hideLoading(); }, 1e3), "WECHAT_PAY" == n) return setOnShowScene("pay"), void getApp().core.requestPayment({ _res: t, timeStamp: t.data.timeStamp, nonceStr: t.data.nonceStr, package: t.data.package, signType: t.data.signType, paySign: t.data.paySign, complete: function(t) { "requestPayment:fail" != t.errMsg && "requestPayment:fail cancel" != t.errMsg ? "requestPayment:ok" == t.errMsg && getApp().core.showModal({ title: "提示", content: "充值成功", showCancel: !1, confirmText: "确认", success: function(t) { getApp().core.navigateBack({ delta: 1 }); } }) : getApp().core.showModal({ title: "提示", content: "订单尚未支付", showCancel: !1, confirmText: "确认" }); } }); "BALANCE_PAY" == n && getApp().core.showModal({ title: "提示", content: "充值成功", showCancel: !1, confirmText: "确认", success: function(t) { getApp().core.navigateBack({ delta: 1 }); } }); } else getApp().core.showModal({ title: "提示", content: t.msg, showCancel: !1 }), getApp().core.hideLoading(); } }); }, changeTabs: function(t) { if ("undefined" == typeof my) var e = t.detail.currentItemId; else e = this.data.list[t.detail.current].id; for (var a = t.detail.current, n = parseFloat(this.data.list[0].price), i = this.data.list, o = 0; o < a; o++) n += parseFloat(i[o + 1].price); this.setData({ current_id: e, current_key: a, buy_price: parseFloat(n) }); }, det: function(t) { var e = t.currentTarget.dataset.index, a = t.currentTarget.dataset.idxs; if (e != this.data.ids) { var n = t.currentTarget.dataset.content; this.setData({ ids: e, cons: !0, idx: a, content: n }); } else this.setData({ ids: -1, cons: !1, idx: a }); } });
#!/bin/sh set -e set -u set -o pipefail function on_error { echo "$(realpath -mq "${0}"):$1: error: Unexpected failure" } trap 'on_error $LINENO' ERR if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then # If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy # frameworks to, so exit 0 (signalling the script phase was successful). exit 0 fi echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}" SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" BCSYMBOLMAP_DIR="BCSymbolMaps" # This protects against multiple targets copying the same framework dependency at the same time. The solution # was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????") # Copies and strips a vendored framework install_framework() { if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then local source="${BUILT_PRODUCTS_DIR}/$1" elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")" elif [ -r "$1" ]; then local source="$1" fi local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" if [ -L "${source}" ]; then echo "Symlinked..." source="$(readlink "${source}")" fi if [ -d "${source}/${BCSYMBOLMAP_DIR}" ]; then # Locate and install any .bcsymbolmaps if present, and remove them from the .framework before the framework is copied find "${source}/${BCSYMBOLMAP_DIR}" -name "*.bcsymbolmap"|while read f; do echo "Installing $f" install_bcsymbolmap "$f" "$destination" rm "$f" done rmdir "${source}/${BCSYMBOLMAP_DIR}" fi # Use filter instead of exclude so missing patterns don't throw errors. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}" local basename basename="$(basename -s .framework "$1")" binary="${destination}/${basename}.framework/${basename}" if ! [ -r "$binary" ]; then binary="${destination}/${basename}" elif [ -L "${binary}" ]; then echo "Destination binary is symlinked..." dirname="$(dirname "${binary}")" binary="${dirname}/$(readlink "${binary}")" fi # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then strip_invalid_archs "$binary" fi # Resign the code if required by the build settings to avoid unstable apps code_sign_if_enabled "${destination}/$(basename "$1")" # Embed linked Swift runtime libraries. No longer necessary as of Xcode 7. if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then local swift_runtime_libs swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u) for lib in $swift_runtime_libs; do echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\"" rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}" code_sign_if_enabled "${destination}/${lib}" done fi } # Copies and strips a vendored dSYM install_dsym() { local source="$1" warn_missing_arch=${2:-true} if [ -r "$source" ]; then # Copy the dSYM into the targets temp dir. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}" local basename basename="$(basename -s .dSYM "$source")" binary_name="$(ls "$source/Contents/Resources/DWARF")" binary="${DERIVED_FILES_DIR}/${basename}.dSYM/Contents/Resources/DWARF/${binary_name}" # Strip invalid architectures from the dSYM. if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then strip_invalid_archs "$binary" "$warn_missing_arch" fi if [[ $STRIP_BINARY_RETVAL == 0 ]]; then # Move the stripped file into its final destination. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --links --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.dSYM" "${DWARF_DSYM_FOLDER_PATH}" else # The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing. mkdir -p "${DWARF_DSYM_FOLDER_PATH}" touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.dSYM" fi fi } # Used as a return value for each invocation of `strip_invalid_archs` function. STRIP_BINARY_RETVAL=0 # Strip invalid architectures strip_invalid_archs() { binary="$1" warn_missing_arch=${2:-true} # Get architectures for current target binary binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)" # Intersect them with the architectures we are building for intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)" # If there are no archs supported by this binary then warn the user if [[ -z "$intersected_archs" ]]; then if [[ "$warn_missing_arch" == "true" ]]; then echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)." fi STRIP_BINARY_RETVAL=1 return fi stripped="" for arch in $binary_archs; do if ! [[ "${ARCHS}" == *"$arch"* ]]; then # Strip non-valid architectures in-place lipo -remove "$arch" -output "$binary" "$binary" stripped="$stripped $arch" fi done if [[ "$stripped" ]]; then echo "Stripped $binary of architectures:$stripped" fi STRIP_BINARY_RETVAL=0 } # Copies the bcsymbolmap files of a vendored framework install_bcsymbolmap() { local bcsymbolmap_path="$1" local destination="${BUILT_PRODUCTS_DIR}" echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}" } # Signs a framework with the provided identity code_sign_if_enabled() { if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then # Use the current code_sign_identity echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}" local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'" if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then code_sign_cmd="$code_sign_cmd &" fi echo "$code_sign_cmd" eval "$code_sign_cmd" fi } if [[ "$CONFIGURATION" == "Debug" ]]; then install_framework "${BUILT_PRODUCTS_DIR}/Koyomi/Koyomi.framework" fi if [[ "$CONFIGURATION" == "Release" ]]; then install_framework "${BUILT_PRODUCTS_DIR}/Koyomi/Koyomi.framework" fi if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then wait fi
#!/bin/sh set -e set -u set -o pipefail if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then # If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy # frameworks to, so exit 0 (signalling the script phase was successful). exit 0 fi echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}" SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" # Used as a return value for each invocation of `strip_invalid_archs` function. STRIP_BINARY_RETVAL=0 # This protects against multiple targets copying the same framework dependency at the same time. The solution # was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????") # Copies and strips a vendored framework install_framework() { if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then local source="${BUILT_PRODUCTS_DIR}/$1" elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")" elif [ -r "$1" ]; then local source="$1" fi local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" if [ -L "${source}" ]; then echo "Symlinked..." source="$(readlink "${source}")" fi # Use filter instead of exclude so missing patterns don't throw errors. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}" local basename basename="$(basename -s .framework "$1")" binary="${destination}/${basename}.framework/${basename}" if ! [ -r "$binary" ]; then binary="${destination}/${basename}" fi # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then strip_invalid_archs "$binary" fi # Resign the code if required by the build settings to avoid unstable apps code_sign_if_enabled "${destination}/$(basename "$1")" # Embed linked Swift runtime libraries. No longer necessary as of Xcode 7. if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then local swift_runtime_libs swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]}) for lib in $swift_runtime_libs; do echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\"" rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}" code_sign_if_enabled "${destination}/${lib}" done fi } # Copies and strips a vendored dSYM install_dsym() { local source="$1" if [ -r "$source" ]; then # Copy the dSYM into a the targets temp dir. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}" local basename basename="$(basename -s .framework.dSYM "$source")" binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}" # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"Mach-O dSYM companion"* ]]; then strip_invalid_archs "$binary" fi if [[ $STRIP_BINARY_RETVAL == 1 ]]; then # Move the stripped file into its final destination. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}" else # The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing. touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM" fi fi } # Signs a framework with the provided identity code_sign_if_enabled() { if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then # Use the current code_sign_identitiy echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}" local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'" if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then code_sign_cmd="$code_sign_cmd &" fi echo "$code_sign_cmd" eval "$code_sign_cmd" fi } # Strip invalid architectures strip_invalid_archs() { binary="$1" # Get architectures for current target binary binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)" # Intersect them with the architectures we are building for intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)" # If there are no archs supported by this binary then warn the user if [[ -z "$intersected_archs" ]]; then echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)." STRIP_BINARY_RETVAL=0 return fi stripped="" for arch in $binary_archs; do if ! [[ "${ARCHS}" == *"$arch"* ]]; then # Strip non-valid architectures in-place lipo -remove "$arch" -output "$binary" "$binary" || exit 1 stripped="$stripped $arch" fi done if [[ "$stripped" ]]; then echo "Stripped $binary of architectures:$stripped" fi STRIP_BINARY_RETVAL=1 } if [[ "$CONFIGURATION" == "Debug" ]]; then install_framework "${BUILT_PRODUCTS_DIR}/CaelinCore/CaelinCore.framework" fi if [[ "$CONFIGURATION" == "Release" ]]; then install_framework "${BUILT_PRODUCTS_DIR}/CaelinCore/CaelinCore.framework" fi if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then wait fi
#!/bin/bash # # runs benchmark and reports time to convergence # to use the script: # run_and_time_multi.sh set -x source ./config_${DGXSYSTEM}.sh #source ./config_2xDSS8440x8A100-PCIE-40GB.sh #echo "DGXSYSTEM=${DGXSYSTEM}" # start timing start=$(date +%s) start_fmt=$(date +%Y-%m-%d\ %r) echo "STARTING TIMING RUN AT $start_fmt" # run benchmark readonly global_rank=${SLURM_PROCID:-} readonly local_rank="${LOCAL_RANK:=${SLURM_LOCALID:=${OMPI_COMM_WORLD_LOCAL_RANK:-}}}" SLURM_NTASKS_PER_NODE=${SLURM_NTASKS_PER_NODE:-$DGXNGPU} OPTIMIZER=${OPTIMIZER:-"sgd"} BATCHSIZE=${BATCHSIZE:-1664} INPUT_BATCH_MULTIPLIER=${INPUT_BATCH_MULTIPLIER:-1} KVSTORE=${KVSTORE:-"device"} LR=${LR:-"0.6"} MOM=${MOM:-"0.9"} LRSCHED=${LRSCHED:-"30,60,80"} WARMUP_EPOCHS=${WARMUP_EPOCHS:-5} LARSETA=${LARSETA:-'0.001'} DALI_HW_DECODER_LOAD=${DALI_HW_DECODER_LOAD:-'0.0'} WD=${WD:-'0.0001'} LABELSMOOTHING=${LABELSMOOTHING:-'0.0'} SEED=${SEED:-1} EVAL_OFFSET=${EVAL_OFFSET:-2} EVAL_PERIOD=${EVAL_PERIOD:-4} DALI_PREFETCH_QUEUE=${DALI_PREFETCH_QUEUE:-2} DALI_NVJPEG_MEMPADDING=${DALI_NVJPEG_MEMPADDING:-64} DALI_THREADS=${DALI_THREADS:-3} DALI_CACHE_SIZE=${DALI_CACHE_SIZE:-0} DALI_ROI_DECODE=${DALI_ROI_DECODE:-0} DALI_PREALLOCATE_WIDTH=${DALI_PREALLOCATE_WIDTH:-0} DALI_PREALLOCATE_HEIGHT=${DALI_PREALLOCATE_HEIGHT:-0} DALI_TMP_BUFFER_HINT=${DALI_TMP_BUFFER_HINT:-25273239} DALI_DECODER_BUFFER_HINT=${DALI_DECODER_BUFFER_HINT:-1315942} DALI_CROP_BUFFER_HINT=${DALI_CROP_BUFFER_HINT:-165581} DALI_NORMALIZE_BUFFER_HINT=${DALI_NORMALIZE_BUFFER_HINT:-441549} DALI_DONT_USE_MMAP=${DALI_DONT_USE_MMAP:-0} NUMEPOCHS=${NUMEPOCHS:-90} #echo "NUMEPOCHS=$NUMEPOCHS" #NUMEPOCHS=44 #export EVAL_OFFSET="3" NETWORK=${NETWORK:-"resnet-v1b-fl"} BN_GROUP=${BN_GROUP:-1} PROFILE=${PROFILE:-0} PROFILE_EXCEL=${PROFILE_EXCEL:-0} NODALI=${NODALI:-0} NUMEXAMPLES=${NUMEXAMPLES:-} PROFILE_ALL_LOCAL_RANKS=${PROFILE_ALL_LOCAL_RANKS:-0} THR="0.759" if [[ ${PROFILE} == 1 ]]; then THR="0" fi DATAROOT="/data" echo "running benchmark" export NGPUS=$SLURM_NTASKS_PER_NODE export NCCL_DEBUG=${NCCL_DEBUG:-"WARN"} if [[ ${PROFILE} -ge 1 ]]; then export TMPDIR="/result/" fi #GPUS=$(seq 0 $(($NGPUS - 1)) | tr "\n" "," | sed 's/,$//') GPUS=$(seq 0 $(( 8 - 1 )) | tr "\n" "," | sed 's/,$//') #echo "GPU="$GPUS #echo "NGPUS="$NGPUS PARAMS=( --gpus "${GPUS}" --batch-size "${BATCHSIZE}" --kv-store "${KVSTORE}" --lr "${LR}" --mom "${MOM}" --lr-step-epochs "${LRSCHED}" --lars-eta "${LARSETA}" --label-smoothing "${LABELSMOOTHING}" --wd "${WD}" --warmup-epochs "${WARMUP_EPOCHS}" --eval-period "${EVAL_PERIOD}" --eval-offset "${EVAL_OFFSET}" --optimizer "${OPTIMIZER}" --network "${NETWORK}" --num-layers "50" --num-epochs "${NUMEPOCHS}" --accuracy-threshold "${THR}" --seed "${SEED}" --dtype "float16" --disp-batches "20" --image-shape "4,224,224" --fuse-bn-relu "1" --fuse-bn-add-relu "1" --bn-group "${BN_GROUP}" --min-random-area "0.05" --max-random-area "1.0" --conv-algo "1" --force-tensor-core "1" --input-layout "NHWC" --conv-layout "NHWC" --batchnorm-layout "NHWC" --pooling-layout "NHWC" --batchnorm-mom "0.9" --batchnorm-eps "1e-5" --data-train "${DATAROOT}/train.rec" --data-train-idx "${DATAROOT}/train.idx" --data-val "${DATAROOT}/val.rec" --data-val-idx "${DATAROOT}/val.idx" --dali-dont-use-mmap "${DALI_DONT_USE_MMAP}" --dali-hw-decoder-load "${DALI_HW_DECODER_LOAD}" --dali-prefetch-queue "${DALI_PREFETCH_QUEUE}" --dali-nvjpeg-memory-padding "${DALI_NVJPEG_MEMPADDING}" --input-batch-multiplier "${INPUT_BATCH_MULTIPLIER}" --dali-threads "${DALI_THREADS}" --dali-cache-size "${DALI_CACHE_SIZE}" --dali-roi-decode "${DALI_ROI_DECODE}" --dali-preallocate-width "${DALI_PREALLOCATE_WIDTH}" --dali-preallocate-height "${DALI_PREALLOCATE_HEIGHT}" --dali-tmp-buffer-hint "${DALI_TMP_BUFFER_HINT}" --dali-decoder-buffer-hint "${DALI_DECODER_BUFFER_HINT}" --dali-crop-buffer-hint "${DALI_CROP_BUFFER_HINT}" --dali-normalize-buffer-hint "${DALI_NORMALIZE_BUFFER_HINT}" --profile "${PROFILE}" ) if [[ ${NODALI} -lt 1 ]]; then PARAMS+=( --use-dali ) fi # If numexamples is set then we will override the numexamples if [[ ${NUMEXAMPLES} -ge 1 ]]; then PARAMS+=( --num-examples "${NUMEXAMPLES}" ) fi echo "PARAMS="${PARAMS[@]} python train_imagenet.py "${PARAMS[@]}"; ret_code=$? sleep 3 if [[ $ret_code != 0 ]]; then exit $ret_code; fi # end timing end=$(date +%s) end_fmt=$(date +%Y-%m-%d\ %r) echo "ENDING TIMING RUN AT $end_fmt" # report result result=$(( $end - $start )) result_name="IMAGE_CLASSIFICATION" echo "RESULT,$result_name,,$result,$USER,$start_fmt" export PROFILE=0
#!/bin/bash ./bin/pong
package Algorithms.Other /** * Created by MikBac on 04.09.2020 */ object MatrixTransposition { def transposition(matrix: Array[Array[Int]]): Array[Array[Int]] = { var ans: Array[Array[Int]] = Array.fill(matrix(0).length) { Array.fill(matrix.length) { 0 } } for (i <- matrix(0).indices) { for (j <- matrix.indices) { ans(i)(j) = matrix(j)(i) } } ans } def printMatrix(matrix: Array[Array[Int]]): Unit = { var matrixVal: Int = 0 for (i <- matrix.indices) { for (j <- matrix(0).indices) { matrixVal = matrix(i)(j) print(s"$matrixVal ") } println("") } } def main(args: Array[String]): Unit = { val matrix = Array(Array(7, 2, 8, -1), Array(1, 3, 2, 0), Array(-5, 9, 5, -7)) val transMatrix = transposition(matrix) printMatrix(transMatrix) } }
/*********************************************************************************************************************** * OpenStudio(R), Copyright (c) 2008-2021, Alliance for Sustainable Energy, LLC, and other contributors. All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted provided that the * following conditions are met: * * (1) Redistributions of source code must retain the above copyright notice, this list of conditions and the following * disclaimer. * * (2) Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided with the distribution. * * (3) Neither the name of the copyright holder nor the names of any contributors may be used to endorse or promote products * derived from this software without specific prior written permission from the respective party. * * (4) Other than as required in clauses (1) and (2), distributions in any form of modifications or other derivative works * may not use the "OpenStudio" trademark, "OS", "os", or any other confusingly similar designation without specific prior * written permission from Alliance for Sustainable Energy, LLC. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND ANY CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S), ANY CONTRIBUTORS, THE UNITED STATES GOVERNMENT, OR THE UNITED * STATES DEPARTMENT OF ENERGY, NOR ANY OF THEIR EMPLOYEES, BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ***********************************************************************************************************************/ #include <gtest/gtest.h> #include "UnitsFixture.hpp" #include "../ScaleFactory.hpp" #include "../Quantity.hpp" #include "../Unit.hpp" #include "../BTUUnit.hpp" #include "../CFMUnit.hpp" #include "../IPUnit.hpp" #include "../SIUnit.hpp" #include "../CelsiusUnit.hpp" #include "../FahrenheitUnit.hpp" #include "../TemperatureUnit.hpp" #include "../../core/Exception.hpp" using namespace openstudio; using std::stringstream; TEST_F(UnitsFixture, Quantity_Constructors) { LOG(Debug, "Quantity_Constructors") Quantity q1; testStreamOutput("0", q1); Quantity q2(1.0); testStreamOutput("1", q2); Unit u3; u3.setBaseUnitExponent("ft", 1); Quantity q3(34.2, u3); testStreamOutput("34.2 ft", q3, 1); q3.setScale(3); testStreamOutput("0.0342 kft", q3, 4); } TEST_F(UnitsFixture, Quantity_ArithmeticOperators) { LOG(Debug, "Quantity_ArithmeticOperators"); Quantity q1; Quantity q2(1.0); Unit u3; u3.setBaseUnitExponent("ft", 1); Quantity q3(34.2, u3); // addition Quantity q4 = q1 + q2; q4 += q4; testNumbersEqual(2.0, q4.value()); testStreamOutput("2", q4); ASSERT_THROW(q3 + q1, Exception); // subtraction; ASSERT_THROW(q3 - q1, Exception); Quantity len(3.5, u3); len -= q3; testNumbersEqual(-30.7, len.value()); testStreamOutput("-30.7 ft", len, 1); // multiplication q3 *= q4; testNumbersEqual(68.4, q3.value()); testStreamOutput("68.4 ft", q3, 1); Unit u5; u5.setBaseUnitExponent("s", -2); u5.setBaseUnitExponent("ft", 1); Quantity q5(0.5, u5); Quantity q6 = q5 * q3; testNumbersEqual(34.2, q6.value()); testStreamOutput("34.2 ft^2/s^2", q6, 1); // division len = Quantity(1.1, u3); Quantity a = q6 / len; testNumbersEqual(31.0909090909, a.value()); EXPECT_EQ("ft/s^2", a.standardUnitsString()); // pow a.pow(6); EXPECT_EQ("ft^6/s^12", a.standardUnitsString()); Quantity b = openstudio::pow(a, 1, 3); EXPECT_EQ("ft^2/s^4", b.standardUnitsString()); testNumbersEqual(966.644628099, b.value()); a = b; // multiplication and division with double a *= 1.21; testNumbersEqual(1169.64, a.value()); b = a / 2; testNumbersEqual(584.82, b.value()); EXPECT_EQ("ft^2/s^4", b.standardUnitsString()); EXPECT_EQ("", b.prettyUnitsString()); Quantity c = 32.0 / b; EXPECT_EQ("s^4/ft^2", c.standardUnitsString()); testNumbersEqual(0.054717690913, c.value()); // Operations on Quantities with units of temperature LOG(Debug, "Quantity_FahrenheitUnit_Arithmetic"); FahrenheitUnit fu1(1, 0, ""); Quantity T1(60.0, fu1); FahrenheitUnit fu2(1); Quantity T2(74.0, fu2); Quantity TT1 = pow(T1, 2); Quantity TT2 = pow(T2, 2); EXPECT_TRUE(TT1.isAbsolute()); EXPECT_TRUE(TT2.isAbsolute()); Quantity q; // -, unary q = -T1; testStreamOutput("-60 F", q); q.setAsRelative(); q = -q; testStreamOutput("60 F", q); EXPECT_FALSE(q.isAbsolute()); // +, good units, bad units, absolute + relative = absolute q = T1 + T2; EXPECT_TRUE(q.isAbsolute()); testStreamOutput("134 F", q, 0); EXPECT_THROW(T1 + TT1, Exception); q.setAsRelative(); q += T1; EXPECT_TRUE(q.isAbsolute()); // -, binary, good units, bad units, absolute -> relative when exp == 1, ow same as + q = T2 - T1; testStreamOutput("14 F", q, 0); EXPECT_FALSE(q.isAbsolute()); EXPECT_THROW(q - TT2, Exception); q -= T1; EXPECT_TRUE(q.isAbsolute()); testStreamOutput("-46 F", q, 0); q = TT2 - TT1; EXPECT_TRUE(q.isAbsolute()); // *, absolute*relative = absolute Quantity deltaT = T2 - T1; q = deltaT * TT1; EXPECT_TRUE(q.isAbsolute()); testNumbersEqual(14.0 * 3600.0, q.value()); q = TT1 * deltaT; EXPECT_TRUE(q.isAbsolute()); testNumbersEqual(14.0 * 3600.0, q.value()); EXPECT_EQ("F^3", q.standardUnitsString()); // /, absolute/relative = absolute and vice-versa q = TT1 / deltaT; EXPECT_TRUE(q.isAbsolute()); q = deltaT / TT2; EXPECT_TRUE(q.isAbsolute()); testNumbersEqual(14.0 / TT2.value(), q.value()); EXPECT_EQ("1/F", q.standardUnitsString()); // * w/ double, retains absolute v. relative of quantity q = T1 * 3.0; EXPECT_TRUE(q.isAbsolute()); testStreamOutput("180 F", q, 0); q = 2.0 * deltaT; EXPECT_FALSE(q.isAbsolute()); testNumbersEqual(28.0, q.value()); // / w/ double, retains absolute v. relative of quantity q = 20.0 / T1; EXPECT_TRUE(q.isAbsolute()); testStreamOutput("0.333 1/F", q, 3); q = deltaT / 2.0; EXPECT_FALSE(q.isAbsolute()); testStreamOutput("7 F", q, 0); } TEST_F(UnitsFixture, Quantity_MixedTypeOperations) { openstudio::SIUnit siu(SIExpnt(0, 1), 0, ""); Quantity l1(3.0, siu); openstudio::IPUnit ipu(IPExpnt(0, 1), 0, ""); Quantity l2(2.0, ipu); openstudio::BTUUnit btuu(BTUExpnt(0, 1), 0, ""); Quantity l3(0.5, btuu); Quantity r; r = l1 * l2; EXPECT_EQ(UnitSystem(UnitSystem::Mixed), r.system()); r = l1 / l2; EXPECT_EQ(UnitSystem(UnitSystem::Mixed), r.system()); l3 *= l1; EXPECT_EQ(UnitSystem(UnitSystem::Mixed), l3.system()); ASSERT_THROW(l1 + l2, Exception); }
import React, { useState } from 'react'; const CharacterCounter = () => { const [text, setText] = useState(''); return ( <div> <input type="text" onChange={e => setText(e.target.value)} /> <p>Number of characters: {text.length}</p> </div> ); }; export default CharacterCounter;
#!/bin/bash # setup all Dependencies sudo apt-get update sudo apt-get upgrade -y sudo apt-get install -y build-essential libtool autotools-dev automake pkg-config libssl-dev libevent-dev bsdmainutils python3 libboost-system-dev libboost-filesystem-dev libboost-chrono-dev libboost-test-dev libboost-thread-dev libboost-all-dev libboost-program-options-dev sudo apt-get install -y libminiupnpc-dev libzmq3-dev libprotobuf-dev protobuf-compiler unzip software-properties-common sudo apt-get install -y libboost-all-dev libdb++-dev libminiupnpc-dev sudo apt-get install -y libqt5gui5 libqt5core5a libqt5dbus5 qttools5-dev qttools5-dev-tools libprotobuf-dev protobuf-compiler sudo apt-get install -y libqrencode-dev ./contrib/install_db4.sh `pwd` export BDB_PREFIX='/home/ubuntu/maximcoin/db4' chmod 777 autogen.sh ./autogen.sh ./configure BDB_LIBS="-L${BDB_PREFIX}/lib -ldb_cxx-4.8" BDB_CFLAGS="-I${BDB_PREFIX}/include" make
alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" matrix_size = len(alphabet) col, row = 0, 0 matrix = [[0 for col in range(matrix_size)] for row in range(matrix_size)] for char in alphabet: matrix[row][col] = char col +=1 if(col == matrix_size): col = 0 row += 1 for row in range(matrix_size): for col in range(matrix_size): print(matrix[row][col], end = " ") print()
SELECT product, MAX(units_sold) FROM sales_table GROUP BY product;
<reponame>linc01n/rapi_doc # encoding: utf-8 require 'method_doc' require 'doc_parser' module RapiDoc # ResourceDoc holds the information a resource contains. It parses the class header and also the # method documentation, which will be contained in MethodDoc. class ResourceDoc attr_reader :name, :resource_location, :controller_name, :function_blocks, :class_block # Initializes ResourceDoc. def initialize(name, resource_location, controller_name, options = {}) @name = name @standard_methods = options[:standard_methods] || [:put, :post, :get, :delete] @resource_location, @controller_name = resource_location, controller_name @function_blocks = [] @method_codes = [] @header_code = "" unless File.exist?(controller_location) raise "Unable to find or open controller. Make sure it's set properly in config/rapidoc/config.yml File: #{controller_location}" end end # returns the location of the controller that is to be parsed def controller_location # @resource_location "#{::Rails.root.to_s}/doc/#{controller_name}" end def get_binding binding end # parse the controller def parse_apidoc! line_no = 0 parser = DocParser.new order = 1 File.open(controller_location).each do |line| case when line =~ /=begin apidoc/ parser.start(order) when line =~ /=end/ if parser.current_api_block.nil? puts "#{controller_location}:#{line_no} - No starttag for '=end' found" exit else case parser.current_scope when :class @class_block = parser.current_api_block when :function @function_blocks << parser.current_api_block end parser.reset_current_scope_and_api_block order += 1 end when line =~ /class/ parser.in_class = true when line =~ /::response-end::/, line =~ /::request-end::/, line =~ /::output-end::/ parser.current_scope = :function else parser.parse(line) end line_no += 1 end puts "Generated #{name}.html" end def generate_view!(resources, temp_dir) @resources = resources @header_code = get_parsed_header unless @class_block.nil? i = 1 function_blocks.each do |mb| @method_codes << get_parsed_method(mb, i) i += 1 end # write it to a file template = "" File.open(layout_file(:target)).each { |line| template << line } parsed = ERB.new(template).result(binding) File.open(File.join(temp_dir, name + ".html"), 'w') { |file| file.write parsed } end def get_parsed_header template = "" File.open(File.join(File.dirname(__FILE__), '..', 'templates', '_resource_header.html.erb')).each { |line| template << line } ERB.new(template).result(@class_block.get_binding) end def get_parsed_method(method_block, method_order) template = "" File.open(File.join(File.dirname(__FILE__), '..', 'templates', '_resource_method.html.erb')).each { |line| template << line } return ERB.new(template).result(method_block.get_binding) end end end
/home/fractal/protobuf/src/protoc --cpp_out=. user.proto
#!/bin/bash # ch6/ebpf_stacktrace_eg/runit.sh # *************************************************************** # * This program is part of the source code released for the book # * "Linux Kernel Programming" # * (c) Author: Kaiwan N Billimoria # * Publisher: Packt # * GitHub repository: # * https://github.com/PacktPublishing/Linux-Kernel-Programming # * # * From: Ch 6 : Kernel and Memory Management Internals Essentials # **************************************************************** # * Brief Description: # * Script to demo using the stackcount-bpfcc BCC tool to trace both kernel # * and user-mode stacks of our Hello, world process for the write(s) # * # * For details, please refer the book, Ch 6. # **************************************************************** [ ! -f ./helloworld_dbg ] && { echo "Pl build the helloworld_dbg program first... (with 'make')" exit 1 } pkill helloworld_dbg 2>/dev/null ./helloworld_dbg >/dev/null & PID=$(pgrep helloworld_dbg) #PID=$(ps -e|grep "helloworld_dbg" |tail -n1|awk '{print $1}') [ -z "${PID}" ] && { echo "Oops, could not get PID of the helloworld_dbg process, aborting..." exit 1 } # Ubuntu specific name for BCC tool(s), pl adjust as required for other distros PRG=stackcount-bpfcc which ${PRG} >/dev/null [ $? -ne 0 ] && { echo "Oops, ${PRG} not installed? aborting..." exit 1 } echo "sudo ${PRG} -p ${PID} -r ".*sys_write.*" -v -d" sudo ${PRG} -p ${PID} -r ".*sys_write.*" -v -d exit 0
#!/bin/bash sbt -java-home /opt/zing/zing-jdk11 -no-colors -Dsbt.supershell=false -Dmacro.settings=print-codecs clean 'jsoniter-scala-benchmark/jmh:run -p size=128 -prof gc -rf json -rff zingjdk11.json .*' 2>&1 | tee zingjdk11.txt sbt -java-home /usr/lib/jvm/graalvm-ee-19 -no-colors -Dsbt.supershell=false -Dmacro.settings=print-codecs clean 'jsoniter-scala-benchmark/jmh:run -p size=128 -prof gc -rf json -rff graalvmee19.json .*' 2>&1 | tee graalvmee19.txt sbt -java-home /usr/lib/jvm/graalvm-ce-19 -no-colors -Dsbt.supershell=false -Dmacro.settings=print-codecs clean 'jsoniter-scala-benchmark/jmh:run -p size=128 -prof gc -rf json -rff graalvmce19.json .*' 2>&1 | tee graalvmce19.txt sbt -java-home /usr/lib/jvm/openjdk-11 -no-colors -Dsbt.supershell=false -Dmacro.settings=print-codecs clean 'jsoniter-scala-benchmark/jmh:run -jvm /usr/lib/jvm/openjdk-13/bin/java -jvmArgsAppend "-XX:+UnlockExperimentalVMOptions -XX:+UseJVMCICompiler" -p size=128 -prof gc -rf json -rff openjdk13graal.json .*' 2>&1 | tee openjdk13graal.txt sbt -java-home /usr/lib/jvm/openjdk-11 -no-colors -Dsbt.supershell=false -Dmacro.settings=print-codecs clean 'jsoniter-scala-benchmark/jmh:run -jvm /usr/lib/jvm/openjdk-13/bin/java -p size=128 -prof gc -rf json -rff openjdk13.json .*' 2>&1 | tee openjdk13.txt sbt -java-home /usr/lib/jvm/openjdk-11 -no-colors -Dsbt.supershell=false -Dmacro.settings=print-codecs clean 'jsoniter-scala-benchmark/jmh:run -p size=128 -prof gc -rf json -rff openjdk11.json .*' 2>&1 | tee openjdk11.txt sbt -java-home /usr/lib/jvm/openjdk-8 -no-colors -Dsbt.supershell=false -Dmacro.settings=print-codecs clean 'jsoniter-scala-benchmark/jmh:run -p size=128 -prof gc -rf json -rff openjdk8.json .*' 2>&1 | tee openjdk8.txt sbt -java-home /usr/lib/jvm/openj9-jdk11 -no-colors -Dsbt.supershell=false -Dmacro.settings=print-codecs clean 'jsoniter-scala-benchmark/jmh:run -p size=128 -prof gc -rf json -rff openj9jdk11.json .*' 2>&1 | tee openj9jdk11.txt
<gh_stars>0 import { GetterTree } from 'vuex'; import { RootState } from './types'; const suffix = '.json'; const prefix = './covid-19-gr-'; const getters: GetterTree<RootState, any> = { files(state: RootState): string[] { const results: string[] = []; for (const file in state.data) { results.push(file); } return results; }, availableDates(state: RootState, getters: any): string[] { return getters.files.map((file: string) => { const end = file.indexOf(suffix); return file.substring(prefix.length, end); }).filter((date: string) => date.startsWith('2020')); }, latestDate(state: RootState, getters): string | null { const total = getters.availableDates.length; return total > 0 ? getters.availableDates[total -1] : null; } }; export default getters;
var vows = require('vows'), assert = require('assert'), dissoc = require('../src/dissoc'); vows.describe('dissoc()').addBatch({ 'Dissociating': { topic: function() { return { foo: 1, bar: 'baz' }; }, 'a property results in a new object where all other properties are the same as in the original object': function(obj) { var newObj = dissoc(obj, 'foo'); assert.notStrictEqual(newObj, obj); assert.isUndefined(newObj.foo); assert.equal(obj.foo, 1); assert.strictEqual(newObj.bar, obj.bar); }, 'a non-existing property returns the same unchanged object': function(obj) { var newObj = dissoc(obj, 'blah'); assert.strictEqual(newObj, obj); assert.deepEqual(newObj, obj); }, } }).export(module);
<reponame>minyong-jeong/hello-algorithm import java.util.Scanner; class Main { public static void main(String[] args) { Scanner scan = new Scanner(System.in); int n = scan.nextInt(); for (int i = 0; i < n; i++) { int total = scan.nextInt(); int[] score = new int[total]; int sum = 0; for (int j = 0; j < total; j++) { score[j] = scan.nextInt(); sum += score[j]; } double avg = sum / total; double result = 0; for (int j = 0; j < total; j++) { if (avg < score[j]) result++; } double percent = result / total * 100; System.out.printf("%.3f%%\n", percent); } } }
#!/bin/bash # # Copyright 2019 The TCMalloc Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # This script that can be invoked to test tcmalloc in a hermetic environment # using a Docker image on Linux. You must have Docker installed to use this # script. set -euox pipefail if [ -z ${TCMALLOC_ROOT:-} ]; then TCMALLOC_ROOT="$(realpath $(dirname ${0})/..)" fi if [ -z ${STD:-} ]; then STD="c++17" fi if [ -z ${COMPILATION_MODE:-} ]; then COMPILATION_MODE="fastbuild opt" fi if [ -z ${EXCEPTIONS_MODE:-} ]; then EXCEPTIONS_MODE="-fno-exceptions -fexceptions" fi readonly DOCKER_CONTAINER="gcr.io/google.com/absl-177019/linux_hybrid-latest:20210617" # USE_BAZEL_CACHE=1 only works on Kokoro. # Without access to the credentials this won't work. if [[ ${USE_BAZEL_CACHE:-0} -ne 0 ]]; then DOCKER_EXTRA_ARGS="--volume=${KOKORO_KEYSTORE_DIR}:/keystore:ro ${DOCKER_EXTRA_ARGS:-}" # Bazel doesn't track changes to tools outside of the workspace # (e.g. /usr/bin/gcc), so by appending the docker container to the # remote_http_cache url, we make changes to the container part of # the cache key. Hashing the key is to make it shorter and url-safe. container_key=$(echo ${DOCKER_CONTAINER} | sha256sum | head -c 16) BAZEL_EXTRA_ARGS="--remote_http_cache=https://storage.googleapis.com/absl-bazel-remote-cache/${container_key} --google_credentials=/keystore/73103_absl-bazel-remote-cache ${BAZEL_EXTRA_ARGS:-}" fi for std in ${STD}; do for compilation_mode in ${COMPILATION_MODE}; do for exceptions_mode in ${EXCEPTIONS_MODE}; do echo "--------------------------------------------------------------------" time docker run \ --volume="${TCMALLOC_ROOT}:/tcmalloc:ro" \ --workdir=/tcmalloc \ --cap-add=SYS_PTRACE \ --rm \ -e CC="/usr/local/bin/gcc" \ -e BAZEL_CXXOPTS="-std=${std}" \ ${DOCKER_EXTRA_ARGS:-} \ ${DOCKER_CONTAINER} \ /usr/local/bin/bazel test ... \ --compilation_mode="${compilation_mode}" \ --copt="${exceptions_mode}" \ --define="absl=1" \ --distdir="/bazel-distdir" \ --keep_going \ --show_timestamps \ --test_env="GTEST_INSTALL_FAILURE_SIGNAL_HANDLER=1" \ --test_output=errors \ --test_tag_filters=-benchmark \ ${BAZEL_EXTRA_ARGS:-} done done done
CURRENT_DIR=`pwd` export MODEL_DIR=$CURRENT_DIR/pretrained_models/bert-base export DATA_DIR=$CURRENT_DIR/dataset export OUTPUR_DIR=$CURRENT_DIR/outputs export TASK_NAME=epidemic # ------------------ save every epoch -------------- python task_triple_similarity_epidemic.py \ --model_type=bert \ --model_path=$MODEL_DIR \ --task_name=$TASK_NAME \ --do_train \ --do_lower_case \ --gpu=0,1 \ --monitor=eval_acc \ --data_dir=$DATA_DIR/${TASK_NAME}/ \ --train_max_seq_length=128 \ --eval_max_seq_length=128 \ --per_gpu_train_batch_size=32 \ --per_gpu_eval_batch_size=32 \ --learning_rate=2e-5 \ --num_train_epochs=4.0 \ --logging_steps=220 \ --save_steps=220 \ --output_dir=$OUTPUR_DIR/${TASK_NAME}_output/ \ --overwrite_output_dir \ --seed=42
// Define a generic interface for the entity types interface Entity<T> { T getType(); } // Define the Single entity class class Single<T> implements Entity<T> { private T type; public Single(T type) { this.type = type; } public T getType() { return type; } } // Define the Multi entity class class Multi<T, U> implements Entity<T> { private T type; private U properties; public Multi(T type, U properties) { this.type = type; this.properties = properties; } public T getType() { return type; } public U getProperties() { return properties; } } // Define the physics simulation class class PhysicsSimulation { // Method to calculate the total mass of the system public static <T, U> double calculateTotalMass(Entity<T>[] entities) { double totalMass = 0.0; for (Entity<T> entity : entities) { if (entity instanceof Single) { // For single-body entity, add its mass directly totalMass += getSingleBodyMass((Single<T>) entity); } else if (entity instanceof Multi) { // For multi-body entity, calculate and add its total mass totalMass += getMultiBodyMass((Multi<T, U>) entity); } } return totalMass; } // Method to calculate the mass of a single-body entity private static <T> double getSingleBodyMass(Single<T> entity) { // Implement logic to calculate mass based on the single-body entity type // Example: // if (entity.getType() == "SIN") { // return singleBodyMass; // } return 0.0; // Placeholder, replace with actual calculation } // Method to calculate the mass of a multi-body entity private static <T, U> double getMultiBodyMass(Multi<T, U> entity) { // Implement logic to calculate mass based on the multi-body entity type and properties // Example: // if (entity.getType() == "MUL") { // return multiBodyMass; // } return 0.0; // Placeholder, replace with actual calculation } } // Example usage public class Main { public static void main(String[] args) { // Create entities Entity<String>[] entities = new Entity[]{ new Single<>("SIN"), new Multi<>("MUL", 10.0) // Example with properties (e.g., mass) }; // Calculate total mass of the system double totalMass = PhysicsSimulation.calculateTotalMass(entities); System.out.println("Total mass of the system: " + totalMass); } }
// Define the Storage trait pub trait Storage { fn get(&self, key: &str) -> Option<String>; fn set(&mut self, key: &str, value: &str) -> Result<(), String>; } // Implement the Storage trait for in-memory hash map storage pub struct HashMapStorage { data: std::collections::HashMap<String, String>, } impl HashMapStorage { pub fn new() -> Self { HashMapStorage { data: std::collections::HashMap::new(), } } } impl Storage for HashMapStorage { fn get(&self, key: &str) -> Option<String> { self.data.get(key).cloned() } fn set(&mut self, key: &str, value: &str) -> Result<(), String> { self.data.insert(key.to_string(), value.to_string()); Ok(()) } } // Define a custom error type for Redis storage operations #[derive(Debug)] pub struct RedisStorageError { message: String, } impl RedisStorageError { pub fn new(message: &str) -> Self { RedisStorageError { message: message.to_string(), } } } // Implement the Storage trait for Redis storage using a Redis database pub struct RedisStorage { // Assume the presence of a Redis client and relevant operations // For brevity, the Redis client and operations are not implemented here } impl Storage for RedisStorage { fn get(&self, key: &str) -> Option<String> { // Implement Redis-specific get operation unimplemented!() } fn set(&mut self, key: &str, value: &str) -> Result<(), String> { // Implement Redis-specific set operation unimplemented!() } }
import "typings-global"; export import browserify = require("./gulpbrowser.browserify");
<gh_stars>0 package com.yin.springboot.user.center.server; import java.util.List; import com.yin.springboot.user.center.domain.TbUserRole; public interface TbUserRoleService { int updateBatch(List<TbUserRole> list); int batchInsert(List<TbUserRole> list); int insertOrUpdate(TbUserRole record); int insertOrUpdateSelective(TbUserRole record); }
#!/bin/bash set -e set -o pipefail umask 0002 #### SET THE STAGE SCRATCH_DIR=/scratch/BWA_Reseq_low_051_100_on_A_2020-03-16--07-41-41_91_temp$$ GSTORE_DIR=/srv/gstore/projects INPUT_DATASET=/srv/gstore/projects/p1634/BWA_Reseq_low_051_100_on_A_2020-03-16--07-41-41/input_dataset.tsv LAST_JOB=FALSE echo "Job runs on `hostname`" echo "at $SCRATCH_DIR" mkdir $SCRATCH_DIR || exit 1 cd $SCRATCH_DIR || exit 1 source /usr/local/ngseq/etc/lmod_profile module add Tools/samtools/1.10 Aligner/BWA/0.7.17 QC/Flexbar/3.0.3 QC/Trimmomatic/0.36 Dev/R/3.6.1 Tools/sambamba/0.6.7 #### NOW THE ACTUAL JOBS STARTS R --vanilla --slave<< EOT EZ_GLOBAL_VARIABLES <<- '/usr/local/ngseq/opt/EZ_GLOBAL_VARIABLES.txt' library(ezRun) param = list() param[['cores']] = '8' param[['ram']] = '16' param[['scratch']] = '100' param[['node']] = 'fgcz-c-048,fgcz-h-004,fgcz-h-006,fgcz-h-007,fgcz-h-008,fgcz-h-009,fgcz-h-010,fgcz-h-011,fgcz-h-012,fgcz-h-013,fgcz-h-014,fgcz-h-015,fgcz-h-016,fgcz-h-017,fgcz-h-018,fgcz-h-019' param[['process_mode']] = 'SAMPLE' param[['samples']] = '51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98,99,100' param[['refBuild']] = 'Finger_millet/KEN/DENOVO_v2.0_A_subgenome/Annotation/Release_01-2019-02-04' param[['paired']] = 'true' param[['algorithm']] = 'mem' param[['cmdOptions']] = '-M' param[['trimAdapter']] = 'true' param[['trim_front']] = '0' param[['trim_tail']] = '0' param[['cut_front']] = '0' param[['cut_tail']] = '0' param[['cut_right']] = '0' param[['average_qual']] = '0' param[['max_len1']] = '0' param[['poly_x_min_len']] = '10' param[['length_required']] = '18' param[['cmdOptionsFastp']] = '' param[['mail']] = '' param[['dataRoot']] = '/srv/gstore/projects' param[['resultDir']] = 'p1634/BWA_Reseq_low_051_100_on_A_2020-03-16--07-41-41' param[['isLastJob']] = FALSE output = list() output[['Name']] = '91' output[['BAM [File]']] = 'p1634/BWA_Reseq_low_051_100_on_A_2020-03-16--07-41-41/91.bam' output[['BAI [File]']] = 'p1634/BWA_Reseq_low_051_100_on_A_2020-03-16--07-41-41/91.bam.bai' output[['IGV Starter [Link]']] = 'p1634/BWA_Reseq_low_051_100_on_A_2020-03-16--07-41-41/91-igv.jnlp' output[['Species']] = 'n/a' output[['refBuild']] = 'Finger_millet/KEN/DENOVO_v2.0_A_subgenome/Annotation/Release_01-2019-02-04' output[['paired']] = 'true' output[['refFeatureFile']] = '' output[['strandMode']] = '' output[['Read Count']] = '13818271' output[['IGV Starter [File]']] = 'p1634/BWA_Reseq_low_051_100_on_A_2020-03-16--07-41-41/91-igv.jnlp' output[['IGV Session [File]']] = 'p1634/BWA_Reseq_low_051_100_on_A_2020-03-16--07-41-41/91-igv.xml' output[['PreprocessingLog [File]']] = 'p1634/BWA_Reseq_low_051_100_on_A_2020-03-16--07-41-41/91_preprocessing.log' output[['Condition [Factor]']] = '' output[['Extract Id [B-Fabric]']] = 'bfe_59756' output[['FragmentSize [Characteristic]']] = '0' output[['SampleConc [Characteristic]']] = '25' output[['Tube [Characteristic]']] = 'p1634_3646/91' output[['Index [Characteristic]']] = 'AGCGATAG-CCTATCCT' output[['PlatePosition [Characteristic]']] = 'SA_030817_1_C12' output[['LibConc_100_800bp [Characteristic]']] = '83.3' output[['LibConc_qPCR [Characteristic]']] = '0' output[['InputAmount [Characteristic]']] = '100' input = list() input[['Name']] = '91' input[['Condition']] = '' input[['Read1']] = 'p1634/HiSeq4000_20170811_RUN373_o3646/20170811.B-91_R1.fastq.gz' input[['Read2']] = 'p1634/HiSeq4000_20170811_RUN373_o3646/20170811.B-91_R2.fastq.gz' input[['Species']] = 'n/a' input[['FragmentSize']] = '0' input[['SampleConc']] = '25' input[['Tube']] = 'p1634_3646/91' input[['Extract Id']] = 'bfe_59756' input[['Index']] = 'AGCGATAG-CCTATCCT' input[['PlatePosition']] = 'SA_030817_1_C12' input[['LibConc_100_800bp']] = '83.3' input[['LibConc_qPCR']] = '0' input[['Adapter1']] = 'GATCGGAAGAGCACACGTCTGAACTCCAGTCAC' input[['Adapter2']] = 'AGATCGGAAGAGCGTCGTGTAGGGAAAGAGTGT' input[['strandMode']] = 'both' input[['LibraryPrepKit']] = 'TruSeq DNA Nano' input[['EnrichmentMethod']] = 'None' input[['InputAmount']] = '100' input[['Read Count']] = '13818271' EzAppBWA\$new()\$run(input=input, output=output, param=param) EOT #### JOB IS DONE WE PUT THINGS IN PLACE AND CLEAN AUP g-req -w copy 91.bam /srv/gstore/projects/p1634/BWA_Reseq_low_051_100_on_A_2020-03-16--07-41-41 g-req -w copy 91.bam.bai /srv/gstore/projects/p1634/BWA_Reseq_low_051_100_on_A_2020-03-16--07-41-41 g-req -w copy 91-igv.jnlp /srv/gstore/projects/p1634/BWA_Reseq_low_051_100_on_A_2020-03-16--07-41-41 g-req -w copy 91-igv.xml /srv/gstore/projects/p1634/BWA_Reseq_low_051_100_on_A_2020-03-16--07-41-41 g-req -w copy 91_preprocessing.log /srv/gstore/projects/p1634/BWA_Reseq_low_051_100_on_A_2020-03-16--07-41-41 cd /scratch rm -rf /scratch/BWA_Reseq_low_051_100_on_A_2020-03-16--07-41-41_91_temp$$ || exit 1
#!/bin/bash # system-stats/system-stats-1.sh 3.75.189 2018-08-12_21:26:10_CDT https://github.com/BradleyA/pi-display uadmin three-rpi3b.cptx86.com 3.74 # sync to standard script design changes # system-stats/system-stats-1.sh 2.8.47 2018-02-28_12:46:40_CST https://github.com/BradleyA/pi-display uadmin three-rpi3b.cptx86.com 2.7-7-gca0e0da # moved cpu-temperature and syste-stats project into this repository # system-stats-1.sh 1.12.51 2018-02-23_09:09:04_CST uadmin six-rpi3b.cptx86.com 1.11-3-g63003c3 # added a comment about project # ../system-stats/system-stats-1.sh 1.4.34 2018-02-21_21:22:51_CST uadmin six-rpi3b.cptx86.com 1.3 # ruff draft addition display_help cpu-temperature.sh & system-stats-1.sh # system-stats-1.sh 1.0 2017-12-20_22:12:37_CST uadmin rpi3b-two.cptx86.com # initial version ### # evaluate different states for raspberry pi ### DEBUG=0 # 0 = debug off, 1 = debug on # set -x # set -v BOLD=$(tput bold) NORMAL=$(tput sgr0) ### display_help() { echo -e "\n${NORMAL}${0} - >>> NEED TO COMPLETE THIS SOON, ONCE I KNOW HOW IT IS GOING TO WORK :-) <<<" echo -e "\nUSAGE\n ${0}" echo " ${0} [--help | -help | help | -h | h | -? | ?]" echo " ${0} [--version | -version | -v]" echo -e "\nDESCRIPTION\nXXXXXX " echo -e "\nOPTIONS " echo -e "\nDOCUMENTATION\n https://github.com/BradleyA/pi-scripts/tree/master/system-stats-1.sh" echo -e "\nEXAMPLES\n ${0}\n\n XXXXXX\n" if ! [ "${LANG}" == "en_US.UTF-8" ] ; then echo -e "${NORMAL}${0} ${LINENO} [${BOLD}WARNING${NORMAL}]: Your language, ${LANG}, is not supported.\n\tWould you like to help?\n" 1>&2 fi } if [ "$1" == "--help" ] || [ "$1" == "-help" ] || [ "$1" == "help" ] || [ "$1" == "-h" ] || [ "$1" == "h" ] || [ "$1" == "-?" ] || [ "$1" == "?" ] ; then display_help exit 0 fi if [ "$1" == "--version" ] || [ "$1" == "-version" ] || [ "$1" == "version" ] || [ "$1" == "-v" ] ; then head -2 ${0} | awk {'print$2"\t"$3'} exit 0 fi ### # if [ "${DEBUG}" == "1" ] ; then echo -e "> DEBUG ${LINENO} >${0}< >${1}<" 1>&2 ; fi ### echo -e "Need to determine what I want to use in these notes to move forward with this project. On hold until a later time.\n" echo -e " --> Hostname = " `hostname` CPUTEMP=$(/usr/bin/vcgencmd measure_temp | \ sed -e 's/?C$//' | \ sed -e 's/temp=//') CPUTEMP=${CPUTEMP//\'C/} echo "Celsius = " $CPUTEMP echo $CPUTEMP | awk '{print "Fahrenheit = " 1.8 * $1 +32}' cat /proc/cpuinfo echo "" # Shows clock frequency, clock can be one of arm, core, h264, isp, v3d, uart, pwm, emmc, pixel, vec, hdmi, dpi. for src in arm core h264 isp v3d uart pwm emmc pixel vec hdmi dpi ; do \ echo -e "$src:\t$(vcgencmd measure_clock $src)" ; \ done echo "" echo "/opt/vc/bin/vcgencmd measure_temp" /opt/vc/bin/vcgencmd measure_temp echo "" echo "/opt/vc/bin/vcgencmd measure_volts" /opt/vc/bin/vcgencmd measure_volts # Shows voltage. id can be one of core, sdram_c, sdram_i, sdram_p, and defaults to core if not specified. echo "" for id in core sdram_c sdram_i sdram_p ; do \ echo -e "$id:\t$(vcgencmd measure_volts $id)" ; \ done echo "" echo "/usr/bin/vcgencmd measure_temp" /usr/bin/vcgencmd measure_temp # cgencmd get_config [config|int|str] Will print the configurations you have set. # Argument can ether be a specific option or int, showing all configs with # number-datatype, or str showing all configurations with datatype sting (aka text). echo "" vcgencmd get_config int # Shows how much memory is split between the CPU (arm) and GPU. echo "" vcgencmd get_mem arm && vcgencmd get_mem gpu # Shows the firmware version echo "" vcgencmd version echo"" # Here is a script I found on the internet written by Mickaël Le Baillif # https://superuser.com/users/177298/micka%c3%abl-le-baillif # cat /proc/cpuinfo | \ awk -v FS=':' ' \ /^physical id/ { if(nb_cpu<$2) { nb_cpu=$2 } } \ /^cpu cores/ { if(nb_cores<$2){ nb_cores=$2 } } \ /^processor/ { if(nb_units<$2){ nb_units=$2 } } \ /^model name/ { model=$2 } \ \ END{ \ nb_cpu=(nb_cpu+1); \ nb_units=(nb_units+1); \ \ print "CPU model:",model; \ print nb_cpu,"CPU,",nb_cores,"physical cores per CPU, total",nb_units,"logical CPU units" \ }' # This is a very cool cluster ssh tool that needs more of my time written by Duncan Ferguson # Cluster SSH - Cluster Admin Via SSH https://github.com/duncs/clusterssh/wiki # # The command opens an administration console and an xterm to all # specified hosts. Any text typed into the administration console is # replicated to all windows. All windows may also be typed into directly. # More work needed to see how itf I want to use this # xinetd - the extended Internet services daemon # sudo apt install xinetd # # xinetd performs the same function as inetd: it starts programs that provide Internet services. # Instead of having such servers started at system initialization time, and be dormant until # a connection request arrives, xinetd is the only dae‐mon process started and it listens # on all service ports for the services listed in its configuration file. When a request
<reponame>nepoche/webb.js<filename>packages/api-providers/src/utils/relayer-utils.ts // Copyright 2022 @nepoche/ // SPDX-License-Identifier: Apache-2.0 import { InternalChainId } from '../chains/index.js'; export function relayerSubstrateNameToChainId (name: string): InternalChainId { switch (name) { case 'localnode': return InternalChainId.WebbDevelopment; } throw new Error('unhandled relayed chain name ' + name); } export function relayerNameToChainId (name: string): InternalChainId { switch (name) { case 'beresheet': return InternalChainId.EdgewareTestNet; case 'harmonytestnet1': return InternalChainId.HarmonyTestnet1; case 'harmonytestnet0': return InternalChainId.HarmonyTestnet0; case 'harmonymainnet0': return InternalChainId.HarmonyMainnet0; case 'ganache': return InternalChainId.Ganache; case 'webb': case 'edgeware': case 'hedgeware': break; case 'ropsten': return InternalChainId.Ropsten; case 'rinkeby': return InternalChainId.Rinkeby; case 'goerli': return InternalChainId.Goerli; case 'kovan': return InternalChainId.Kovan; case 'shiden': return InternalChainId.Shiden; case 'optimismtestnet': return InternalChainId.OptimismTestnet; case 'arbitrumtestnet': return InternalChainId.ArbitrumTestnet; case 'polygontestnet': return InternalChainId.PolygonTestnet; } throw new Error('unhandled relayed chain name ' + name); } enum RelayerChainName { Edgeware = 'edgeware', Webb = 'webb', Ganache = 'ganache', Beresheet = 'beresheet', HarmonyTestnet0 = 'harmonytestnet0', HarmonyTestnet1 = 'harmonytestnet1', HarmonyMainnet0 = 'harmonymainnet0', Ropsten = 'ropsten', Rinkeby = 'rinkeby', Goerli = 'goerli', Kovan = 'kovan', Shiden = 'shiden', OptimismTestnet = 'optimismtestnet', ArbitrumTestnet = 'arbitrumtestnet', PolygonTestnet = 'polygontestnet' } export function chainIdToRelayerName (id: InternalChainId): string { switch (id) { case InternalChainId.Edgeware: return RelayerChainName.Edgeware; case InternalChainId.EdgewareTestNet: return RelayerChainName.Beresheet; case InternalChainId.EdgewareLocalNet: break; case InternalChainId.EthereumMainNet: break; case InternalChainId.Rinkeby: return RelayerChainName.Rinkeby; case InternalChainId.Ropsten: return RelayerChainName.Ropsten; case InternalChainId.Kovan: return RelayerChainName.Kovan; case InternalChainId.Goerli: return RelayerChainName.Goerli; case InternalChainId.HarmonyTestnet0: return RelayerChainName.HarmonyTestnet0; case InternalChainId.HarmonyTestnet1: return RelayerChainName.HarmonyTestnet1; case InternalChainId.HarmonyMainnet0: return RelayerChainName.HarmonyMainnet0; case InternalChainId.Shiden: return RelayerChainName.Shiden; case InternalChainId.OptimismTestnet: return RelayerChainName.OptimismTestnet; case InternalChainId.ArbitrumTestnet: return RelayerChainName.ArbitrumTestnet; case InternalChainId.PolygonTestnet: return RelayerChainName.PolygonTestnet; } throw new Error(`unhandled Chain id ${id}`); }
# The script must be sourced by install_MLiy.sh # Copyright 2017 MLiy Contributors # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Setup proxy if needed if [[ ! -z "$PROXY_SCRIPT" && -f $PROXY_SCRIPT ]]; then source $PROXY_SCRIPT fi # Change to Analyst home directory to install/configure cd ~analyst if [[ ! -z "$LDAP_HOST_NAME" ]]; then # LDAP packages wget -q https://arthurdejong.org/nss-pam-ldapd/nss-pam-ldapd-0.9.7.tar.gz tar xzf nss-pam-ldapd-0.9.7.tar.gz && rm -f nss-pam-ldapd-0.9.7.tar.gz wget -q https://fedorapeople.org/cgit/adelton/public_git/mod_authnz_pam.git/snapshot/mod_authnz_pam-1.0.2.tar.gz tar xzf mod_authnz_pam-1.0.2.tar.gz && rm -f mod_authnz_pam-1.0.2.tar.gz fi # Install NLopt wget -q https://dl.fedoraproject.org/pub/epel/6/x86_64/Packages/n/NLopt-2.4.2-2.el6.x86_64.rpm wget -q https://dl.fedoraproject.org/pub/epel/6/x86_64/Packages/n/NLopt-devel-2.4.2-2.el6.x86_64.rpm yum install -y -q NLopt-2.4.2-2.el6.x86_64.rpm NLopt-devel-2.4.2-2.el6.x86_64.rpm && rm -f NLopt-2.4.2-2.el6.x86_64.rpm NLopt-devel-2.4.2-2.el6.x86_64.rpm # Install HDF5 wget -q https://support.hdfgroup.org/ftp/HDF5/prev-releases/hdf5-1.8/hdf5-1.8.9/bin/RPMS/x86_64/hdf5-1.8.9-1.el6.x86_64.rpm yum install -y -q hdf5-1.8.9-1.el6.x86_64.rpm && rm -f hdf5-1.8.9-1.el6.x86_64.rpm # Download R wget -q https://cran.r-project.org/src/base/R-3/R-3.5.1.tar.gz tar xzf R-3.5.1.tar.gz && rm -f R-3.5.1.tar.gz # Download AWS packages wget -q https://github.com/cloudyr/cloudyr.github.io/raw/master/drat/src/contrib/aws.signature_0.3.5.tar.gz wget -q https://github.com/cloudyr/cloudyr.github.io/raw/master/drat/src/contrib/aws.s3_0.3.8.tar.gz wget -q https://github.com/cloudyr/cloudyr.github.io/raw/master/drat/src/contrib/aws.ec2metadata_0.1.2.tar.gz # Download bsts packages wget -q https://cran.r-project.org/src/contrib/bsts_0.8.0.tar.gz wget -q https://cran.r-project.org/src/contrib/Boom_0.8.tar.gz wget -q https://cran.r-project.org/src/contrib/Archive/RcppEigen/RcppEigen_0.3.3.4.0.tar.gz wget -q https://cran.r-project.org/src/contrib/BoomSpikeSlab_1.0.0.tar.gz # Download openBLAS wget -q https://github.com/xianyi/OpenBLAS/archive/v0.3.5.tar.gz tar xzf v0.3.5.tar.gz && rm -f v0.3.5.tar.gz # Install RStudio wget -q https://download2.rstudio.org/rstudio-server-rhel-1.1.463-x86_64.rpm yum install -y -q --nogpgcheck rstudio-server-rhel-1.1.463-x86_64.rpm && rm -f rstudio-server-rhel-1.1.463-x86_64.rpm # Install RShiny wget -q https://download3.rstudio.org/centos6.3/x86_64/shiny-server-1.5.7.907-rh6-x86_64.rpm yum install -y -q --nogpgcheck shiny-server-1.5.7.907-rh6-x86_64.rpm && rm -f shiny-server-1.5.7.907-rh6-x86_64.rpm # Extract WEKA wget -q https://prdownloads.sourceforge.net/weka/weka-3-8-2.zip unzip -q weka-3-8-2.zip && rm -f weka-3-8-2.zip # Install H2O wget -q https://h2o-release.s3.amazonaws.com/h2o/rel-wright/3/h2o-3.20.0.3.zip unzip -q h2o-3.20.0.3.zip && rm h2o-3.20.0.3.zip # Install Scala and SBT wget -q https://downloads.lightbend.com/scala/2.11.8/scala-2.11.8.tgz tar xzf scala-2.11.8.tgz && rm -f scala-2.11.8.tgz wget -q https://downloads.lightbend.com/scala/2.10.6/scala-2.10.6.tgz tar xzf scala-2.10.6.tgz && rm -f scala-2.10.6.tgz wget -q https://piccolo.link/sbt-0.13.17.tgz tar xzf sbt-0.13.17.tgz && rm -f sbt-0.13.17.tgz # Install Spark wget -q https://archive.apache.org/dist/spark/spark-2.4.0/spark-2.4.0-bin-hadoop2.7.tgz tar xzf spark-2.4.0-bin-hadoop2.7.tgz -C /usr/local/ && ln -s /usr/local/spark-2.4.0-bin-hadoop2.7 /usr/local/spark && rm -f spark-2.4.0-bin-hadoop2.7.tgz if [[ ! -z $GPU_TYPE ]] ; then # Cuda Toolkit wget -q https://developer.nvidia.com/compute/cuda/9.0/Prod/local_installers/cuda_9.0.176_384.81_linux-run wget -q https://developer.nvidia.com/compute/cuda/9.0/Prod/patches/1/cuda_9.0.176.1_linux-run wget -q https://developer.nvidia.com/compute/cuda/9.0/Prod/patches/2/cuda_9.0.176.2_linux-run # Cannot download CuDnn from NVidia wget -q http://developer.download.nvidia.com/compute/redist/cudnn/v7.0.5/cudnn-9.0-linux-x64-v7.tgz # NCCL wget -q -O nccl_2.2.13-1+cuda9.0_x86_64.txz https://sourceforge.net/projects/tensorrt-rel/files/4/CUDA-9.0/nccl_2.2.13-1%2Bcuda9.0_x86_64.txz/download # Theano Pre-requisites wget -q https://download.opensuse.org/repositories/home:/Milliams/CentOS_CentOS-6/x86_64/cmake-3.0.0-142.1.x86_64.rpm git clone https://github.com/Theano/libgpuarray.git >/dev/null # Install Torch and dependencies git clone https://github.com/torch/distro.git torch --recursive >/dev/null wget -q https://dl.fedoraproject.org/pub/epel/6/x86_64/Packages/o/openpgm-5.1.118-3.el6.x86_64.rpm wget -q https://dl.fedoraproject.org/pub/epel/6/x86_64/Packages/z/zeromq3-3.2.5-1.el6.x86_64.rpm wget -q https://dl.fedoraproject.org/pub/epel/6/x86_64/Packages/z/zeromq3-devel-3.2.5-1.el6.x86_64.rpm yum install -y -q openpgm-5.1.118-3.el6.x86_64.rpm zeromq3-3.2.5-1.el6.x86_64.rpm zeromq3-devel-3.2.5-1.el6.x86_64.rpm && rm -f openpgm-5.1.118-3.el6.x86_64.rpm zeromq3-3.2.5-1.el6.x86_64.rpm zeromq3-devel-3.2.5-1.el6.x86_64.rpm # download iTorch git clone https://github.com/facebook/iTorch.git >/dev/null # Download pytorch git clone https://github.com/hughperkins/pytorch.git >/dev/null fi # ODBC Driver Manager wget -q http://www.unixodbc.org/unixODBC-2.3.6.tar.gz tar xzf unixODBC-2.3.6.tar.gz && rm -f unixODBC-2.3.6.tar.gz # MS SQL Server ODBC Driver wget -q https://github.com/tax/mssqldriver/raw/master/msodbcsql-11.0.2270.0.tar.gz tar xzf msodbcsql-11.0.2270.0.tar.gz && rm -f msodbcsql-11.0.2270.0.tar.gz # JDBC Drivers mkdir jdbc cd jdbc wget -q https://s3.amazonaws.com/redshift-downloads/drivers/RedshiftJDBC41-1.1.10.1010.jar wget -q https://jdbc.postgresql.org/download/postgresql-9.4.1207.jar wget -q http://awssupportdatasvcs.com/bootstrap-actions/Simba/AmazonHiveJDBC-1.0.9.1060.zip cd $SCRIPT_DIR
import { Firewall } from "./firewall"; import { Snapshot } from "./snapshot"; export class Qemu { firewall: Firewall; snapshot: Snapshot; getStatus(node, qemu) get(node, qemu) del(node, qemu) getStatusCurrent(node, qemu) start(node, qemu) stop(node, qemu) reset(node, qemu) shutdown(node, qemu) suspend(node, qemu) resume(node, qemu) rrd(node, qemu, data) rrdData(node, qemu, data) getConfig(node, qemu) updateConfig(node, qemu, data) setConfig(node, qemu, data) pending(node, qemu) unlink(node, qemu, data) vncproxy(node, qemu) vncwebsocket(node, qemu, data) spiceproxy(node, qemu, data) sendkey(node, qemu, data) feature(node, qemu, data) clone(node, qemu, data) moveDisk(node, qemu, data) migrate(node, qemu, data) monitor(node, qemu, data) resize(node, qemu, data) template(node, qemu) }
import {url} from "../util/utils" import {parseContentType} from "./contentType" import {Enhancer, ZealotPayload, ZReponse} from "../types" import {createIterator} from "./iterator" import {createStream} from "./stream" import {createError} from "../util/error" import {createPushableIterator} from "./pushable_iterator" import {parseLines} from "../ndjson/lines" export type FetchArgs = { path: string method: string body?: string | FormData headers?: Headers enhancers?: Enhancer[] signal?: AbortSignal } export function createFetcher(host: string) { return { async promise(args: FetchArgs) { const {path, method, body, signal, headers} = args const resp = await fetch(url(host, path), {method, body, signal, headers}) const content = await parseContentType(resp) return resp.ok ? content : Promise.reject(createError(content)) }, async stream(args: FetchArgs): Promise<ZReponse> { const {path, method, body, signal, headers} = args const resp = await fetch(url(host, path), {method, body, signal, headers}) if (!resp.ok) { const content = await parseContentType(resp) return Promise.reject(createError(content)) } const iterator = createIterator(resp, args) return createStream(iterator, resp) }, async upload(args: FetchArgs): Promise<ZReponse> { return new Promise((resolve) => { const iterator = createPushableIterator<ZealotPayload>() const xhr = new XMLHttpRequest() xhr.upload.addEventListener("progress", (e) => { if (!e.lengthComputable) return iterator.push({ value: {type: "UploadProgress", progress: e.loaded / e.total}, done: false }) }) xhr.addEventListener("load", async () => { for (const value of parseLines(xhr.responseText)) iterator.push({value, done: false}) }) xhr.addEventListener("error", () => { iterator.throw(new Error(xhr.responseText)) }) xhr.addEventListener("loadend", () => { iterator.push({done: true, value: undefined}) }) xhr.open(args.method, url(host, args.path), true) if (args.headers) { for (const [header, val] of args.headers.entries()) xhr.setRequestHeader(header, val) } xhr.send(args.body) resolve(createStream(iterator, xhr)) }) } } }
<reponame>dragondjf/QMarkdowner #!/usr/bin/env python # -*- coding: utf-8 -*- import socket import pkg import logging logger = logging.getLogger(__name__) def deal_pkg(dc_ip, raw, ver=2): header, body = pkg.unpack(raw, ver) back = {} if header.cmd == pkg.GET_SAMPLING_CTRL_RSP: back = body.apply_to_dc(back) if header.cmd == pkg.GET_CHANNEL_CTRL_RSP: # back = body.apply_to_pa(back) back = body elif header.cmd == pkg.GET_BASE_INFO_RSP: back = body.apply_to_dc(back) elif header.cmd == pkg.GET_COMM_INFO_RSP: back = body.apply_to_dc(back) elif header.cmd == pkg.GET_DEVICE_FUNCTION_RSP: back = body.apply_to_dc(back) return back def send_cmd(sk, address, header=None, body=None): if body: buf = pkg.combine(header, body).pack() else: buf = header.pack() try: sk.sendto(buf, address) except Exception, e: logger.error(e) def getinforsp(ip, port, cmd, channel=1, ver=2): back = "timeout check ip" try: address = (ip, port) s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.settimeout(5) header = pkg.Header(cmd=cmd, channel=channel) send_cmd(s, address, header) raw, address = s.recvfrom(1024) ip = address[0] back = deal_pkg(ip, raw, ver) s.close() except Exception, e: raise logger.error(e) return back def setinforsp(ip, port, cmd, ver=2, body_dict=None): try: address = (ip, port) s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.settimeout(2) header, body = pkg.get_pair(cmd, ver) if body_dict: body.load_from_dict(body_dict) send_cmd(s, address, header, body) raw, address = s.recvfrom(1024) header, body = pkg.unpack(raw, ver) s.close() if header.ret == 0: return True else: return False except Exception, e: logger.error(e) return False def save_reboot(ip, port, ver=2): try: address = (ip, port) s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.settimeout(5) header, save = pkg.get_pair(0x0d, ver) send_cmd(s, address, header, save) raw, address = s.recvfrom(1024) header, body = pkg.unpack(raw, 2) import time time.sleep(1) header, reboot = pkg.get_pair(0x0c, ver) send_cmd(s, address, header, reboot) raw, address = s.recvfrom(1024) header, body = pkg.unpack(raw, 2) s.close() except Exception, e: logger.error(e) return False else: return True
sudo apt-get update sudo apt-get install -y nginx sudo apt-get install -y default-jdk sudo apt-get install -y maven sudo ufw allow 'Nginx Full' sudo ufw --force enable sudo systemctl reload nginx curl https://www.shiftleft.io/download/sl-latest-linux-x64.tar.gz > /tmp/sl.tar.gz && sudo tar -C /usr/local/bin -xzf /tmp/sl.tar.gz sl auth --org "0a4c8175-57fa-4b4c-91fb-b79b98d4cd3a" --token "eyJhbGciOiJSUzUxMiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE1ODU5NDYxODksImlzcyI6IlNoaWZ0TGVmdCIsIm9yZ0lEIjoiMGE0YzgxNzUtNTdmYS00YjRjLTkxZmItYjc5Yjk4ZDRjZDNhIiwic2NvcGVzIjpbImFwaTp2MiIsInVwbG9hZHM6d3JpdGUiLCJsb2c6d3JpdGUiLCJwaXBlbGluZXN0YXR1czpyZWFkIiwibG9nOndyaXRlIiwicG9saWNpZXM6cmVhZCJdfQ.TflrHbl4Ypdkj9fsNdy8YocDdFoGhfyWOSZiflYoSec1d2Rymwnx82fKUpU40UUzNcjxPU9113q3npZIYwZdxTJ3A_UXd136ohHBi8hC-ui9C9cGc5w7Cg-0GrWcjT1ixLRP5G1QL_GzU4ozDbbpiRwKeKhguNqGs24DhcIaWl8m1ZcsvloxCkUqFUYxxkb3YfFn7wpa7paRLOQC3ZV2fLzEVwDuoFCuiKrWNZ7GDBtgsR6vB_uP0KImIxqZJhRlYcvcumYvd5UEoZ4cHfwkQJjy35vIkhzEKMSBDiYaX8MHmt4wjbnhGIN8h94afr4tfAGpk-hDzqechw1f_LBAaQ" git clone https://github.com/arunmecheri/HelloShiftLeft.git cd HelloShiftLeft mvn clean package sudo mv /etc/nginx/sites-available/default /etc/nginx/sites-available/default.old sudo cat <<EOF > /etc/nginx/sites-available/default server { listen 80; server_name _; location / { proxy_pass http://127.0.0.1:8081; proxy_http_version 1.1; proxy_set_header Upgrade \$http_upgrade; proxy_set_header Connection 'upgrade'; proxy_set_header Host \$host; proxy_cache_bypass \$http_upgrade; } } EOF sudo systemctl reload nginx java -jar target/hello-shiftleft-0.0.1.jar & # sl run --analyze target/hello-shiftleft-0.0.1.jar --app hello-shiftleft-CD -- java -jar target/hello-shiftleft-0$
#!/bin/bash SERVICE_ACCOUNT=$1 PROJECT_ID=$2 KEY_NAME=$SERVICE_ACCOUNT-key.json gcloud iam service-accounts create $SERVICE_ACCOUNT gcloud projects add-iam-policy-binding $PROJECT_ID \ --member "serviceAccount:$SERVICE_ACCOUNT@$PROJECT_ID.iam.gserviceaccount.com" --role "roles/owner" gcloud iam service-accounts keys create ~/$KEY_NAME \ --iam-account $SERVICE_ACCOUNT@$PROJECT-ID.iam.gserviceaccount.com
def mean_absolute_difference(list1, list2): differences = [] for i, e in enumerate(list1): differences.append(abs(list1[i] - list2[i])) return sum(differences) / len(differences)
import React, { useEffect, useState } from 'react'; import PropTypes from 'prop-types'; import { makeStyles } from '@material-ui/core/styles'; import { connect } from 'react-redux'; import { addComment } from 'actions/post'; import { getCurrentProfile } from 'actions/profile'; import GridContainer from 'components/Grid/GridContainer.js'; import GridItem from 'components/Grid/GridItem.js'; import Button from 'components/CustomButtons/Button.js'; import Media from 'components/Media/Media.js'; import CustomInput from 'components/CustomInput/CustomInput.js'; import Spinner from 'layout/Spinner' import style from 'assets/jss/material-kit-pro-react/views/componentsSections/contentAreas.js'; import Reply from '@material-ui/icons/Reply'; const useStyles = makeStyles(style); const CreateComment = ({ getCurrentProfile, postId, addComment, profile: { profile } }) => { const [text, setText] = useState(''); const classes = useStyles(); const onSubmit = (e) => { e.preventDefault(); addComment(postId, { text }); setText(''); }; const onChange = (e) => setText(e.target.value); useEffect(() => { getCurrentProfile(); }, [getCurrentProfile]); return ( <div className="cd-section" id="contentAreas"> <div id="comments"> <GridContainer> {profile === null ? ( <Spinner /> ) : ( <GridItem xs={12} sm={12} md={12}> <h3 className={classes.textCenter}>Leave a Comment</h3> <form className={classes.form} onSubmit={onSubmit}> <Media avatar={profile.user.avatar} body={ <CustomInput id="logged" formControlProps={{ fullWidth: true }} inputProps={{ name: 'text', placeholder: 'Comment the post', value: text, onChange: onChange, multiline: true, rows: 5 }} /> } footer={ <Button color="primary" className={classes.floatRight} type="submit" value="Submit" > <Reply /> Reply </Button> } /> </form> </GridItem> )} </GridContainer> </div> </div> ); }; CreateComment.propTypes = { addComment: PropTypes.func.isRequired, getCurrentProfile: PropTypes.func.isRequired, profile: PropTypes.object.isRequired, postId: PropTypes.number }; const mapStateToProps = (state) => ({ profile: state.profile }); export default connect(mapStateToProps, { getCurrentProfile, addComment })(CreateComment);
SELECT COUNT(*) FROM Articles WHERE Tags LIKE '%music%' OR Tags LIKE '%gaming%'
<gh_stars>0 package com.profiling.ui; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import com.profiling.model.StackTraceNode; public class TreeReportBuilder { private static final String TITLE = "Application Stack"; private static final String HTML_FILENAME = "tree-stack.html"; private static final TreeReportBuilder htmlFacto = new TreeReportBuilder(); private static String pathDir; private static FileOutputStream writer; private TreeReportBuilder() { } public static void generateReport(String outputDir) throws IOException { pathDir = outputDir; generateHtml(); } public static void generateHtml() throws IOException { // Init writer with path directory and writer = new FileOutputStream(new File(pathDir + HTML_FILENAME)); writer.write("<!DOCTYPE html>".getBytes()); writer.write("<html>".getBytes()); htmlFacto.generateHtmlHeader(); htmlFacto.generateHtmlBody(); writer.write("</html>".getBytes()); writer.close(); } private void generateHtmlHeader() throws IOException { // Let the JVM optimized. String result = ""; // Init Header tags. result +="<head>"; result +="<title>"+TITLE+"</title>"; result +="<style type=\"text/css\">"; // CSS functional result +="input {display: none;}"; result +="input ~ ul {display: none;}"; result +="input:checked ~ ul {display: block;}"; result +="input ~ .minus {display: none;}"; result +="input:checked ~ .plus {display: none;}"; result +="input:checked ~ .minus {display: inline;}"; // CSS decorator. result +="li {"; result +="display: block;"; result +="font-family: 'Arial';"; result +="font-size: 15px;"; result +="padding: 0.2em;"; result +="border: 1px solid transparent;"; result +="}"; result +="li:hover {"; result +="border: 1px solid grey;"; result +="border-radius: 3px;"; result +="background-color: lightgrey;"; result +="}"; result +="icon {"; result +="font-weight: bold;"; result +="}"; result +="line {"; result +="color: red;"; result +="font-style: italic;"; result +="}"; result +="count {"; result +="color: green;"; result +="font-style: italic;"; result +="}"; result +="time {"; result +="color: blue;"; result +="font-style: italic;"; result +="}"; result +="package {"; result +="display: none;"; result +="}"; result +=".totaltime {"; result +="color: blue;"; result +="font-weight: bold;"; result +="}"; // Close opened tags. result +="</style>"; result +="</head>"; // Write in file. writer.write(result.getBytes()); } private void generateHtmlBody() throws IOException { String result = "<body>"; result += StackTraceNode.ROOT.toHtml().replaceAll("<init>", "{init}").replaceAll("<clinit>", "{clinit}"); result += "</body>"; // Write in file. writer.write(result.getBytes()); } }
import {createStore, combineReducers, applyMiddleware} from 'redux' import thunkMiddleware from 'redux-thunk' import {composeWithDevTools} from 'redux-devtools-extension' import {default as userState} from './user' import {default as transState} from './transactions' const reducer = combineReducers({userState, transState}) const middleware = composeWithDevTools( applyMiddleware(thunkMiddleware) ) const store = createStore(reducer, middleware) export default store export * from './user'
// Define the Reddit post class class RedditPost { var media: Media var mediaEmbed: MediaEmbed var userReports: [Any] var secureMedia: Any? var reportReasons: [Any] var modReports: [Any] var secureMediaEmbed: Any? // Initialize the properties from a JSON dictionary init(json: [String: Any]) { media = Media(json: json["media"] as? [String: Any] ?? [:]) mediaEmbed = MediaEmbed(json: json["media_embed"] as? [String: Any] ?? [:]) userReports = [] secureMedia = nil reportReasons = [] modReports = [] secureMediaEmbed = nil } }
#!/bin/bash export ANDROID_NDK=/home/usr/android-ndk-r16b cmake -DCMAKE_TOOLCHAIN_FILE=$ANDROID_NDK/build/cmake/android.toolchain.cmake \ -DANDROID_ABI="arm64-v8a" \ -DANDROID_PLATFORM=android-21 \ -DANDROID_STL=c++_shared \ -DTENGINE_DIR=/home/usr/tengine \ -DOpenCV_DIR=/home/usr/opencv/sdk/native/jni \ -DPROTOBUF_DIR=/home/usr/protobuf_lib \ ..
from django import forms from django.core.exceptions import ValidationError from cyder.models import Ctnr from cyder.cydns.address_record.models import AddressRecord from cyder.cydns.view.models import View from cyder.cydns.forms import DNSForm from cyder.cydns.nameserver.models import Nameserver from cyder.cydhcp.interface.static_intr.models import StaticInterface from cyder.base.mixins import UsabilityFormMixin class NameserverForm(DNSForm, UsabilityFormMixin): glue_ip_str = forms.CharField(label="Glue's IP Address", required=False) glue_ctnr = forms.ModelChoiceField( queryset=Ctnr.objects.all(), required=False, label="Glue's Container") class Meta: model = Nameserver fields = ('domain', 'server', 'views', 'ttl', 'glue_ip_str', 'glue_ctnr', 'description') exclude = ('addr_glue', 'intr_glue') widgets = {'views': forms.CheckboxSelectMultiple} def __init__(self, *args, **kwargs): super(NameserverForm, self).__init__(*args, **kwargs) if not self.instance: return if not self.instance.glue: # If it doesn't have glue, it doesn't need it. return addr_glue = AddressRecord.objects.filter( label=self.instance.glue.label, domain=self.instance.glue.domain) intr_glue = StaticInterface.objects.filter( label=self.instance.glue.label, domain=self.instance.glue.domain) glue_choices = [] for glue in addr_glue: glue_choices.append(("addr_{0}".format(glue.pk), str(glue))) for glue in intr_glue: glue_choices.append(("intr_{0}".format(glue.pk), str(glue))) if isinstance(self.instance.glue, AddressRecord): initial = "addr_{0}".format(self.instance.glue.pk) elif isinstance(self.instance.glue, StaticInterface): initial = "intr_{0}".format(self.instance.glue.pk) self.fields['glue'] = forms.ChoiceField(choices=glue_choices, initial=initial) def clean(self, *args, **kwargs): self.glue = None if self.instance.pk is None: domain = self.cleaned_data['domain'] glue_ip_str, glue_ctnr = (self.cleaned_data['glue_ip_str'], self.cleaned_data['glue_ctnr']) server = self.cleaned_data['server'].strip('.') if '.' in server: if server.lower() != domain.name.lower(): glue_label, glue_domain = tuple(server.split('.', 1)) glue_domain = glue_domain.strip('.').lower() else: glue_label, glue_domain = "", server.lower() else: raise ValidationError( "Please provide a fully qualified server name.") if domain.delegated: if glue_domain and glue_domain != domain.name.lower(): if glue_ip_str or glue_ctnr: raise ValidationError( "This record does not need glue, so " "please leave the glue fields blank.") else: if not (glue_ip_str and glue_ctnr): raise ValidationError( "This zone is delegated, so " "please provide information for glue.") gluekwargs = {'domain': domain, 'label': glue_label, 'ip_str': glue_ip_str, 'ctnr': glue_ctnr} try: self.glue = AddressRecord.objects.get(**gluekwargs) except AddressRecord.DoesNotExist: self.glue = AddressRecord(**gluekwargs) self.glue.set_is_glue() self.glue.save() for v in View.objects.all(): self.glue.views.add(v) elif not domain.delegated and (glue_ip_str or glue_ctnr): raise ValidationError("This zone is not delegated, so please " "leave the glue fields blank.") cleaned_data = super(NameserverForm, self).clean(*args, **kwargs) return cleaned_data def save(self, *args, **kwargs): try: super(NameserverForm, self).save(*args, **kwargs) except Exception, e: if self.glue and self.glue.pk is not None: self.glue.delete(validate_glue=False) raise ValidationError(e) class NSDelegated(forms.Form): server = forms.CharField() server_ip_address = forms.CharField()
<filename>examples/timeout/main.go package main import ( "fmt" "gitlab.com/jonas.jasas/condchan" "sync" "time" ) func main() { fmt.Println("Timeout example") cc := condchan.New(&sync.Mutex{}) timeoutChan := time.After(time.Second) cc.L.Lock() // Passing func that gets channel c that signals when // Signal or Broadcast is called on CondChan cc.Select(func(c <-chan struct{}) { // Waiting with select select { case <-c: // Never ending wait case <-timeoutChan: fmt.Println("Hooray! Just escaped from eternal wait.") } }) cc.L.Unlock() }
#include "PluginProcessor.h" static constexpr float twopi = 6.2831853f; MDATestToneAudioProcessor::MDATestToneAudioProcessor() : AudioProcessor(BusesProperties() .withInput ("Input", juce::AudioChannelSet::stereo(), true) .withOutput("Output", juce::AudioChannelSet::stereo(), true)) { apvts.state.addListener(this); apvts.addParameterListener("Mode", this); apvts.addParameterListener("0dB =", this); } MDATestToneAudioProcessor::~MDATestToneAudioProcessor() { apvts.removeParameterListener("Mode", this); apvts.removeParameterListener("0dB =", this); apvts.state.removeListener(this); } const juce::String MDATestToneAudioProcessor::getName() const { return JucePlugin_Name; } int MDATestToneAudioProcessor::getNumPrograms() { return 1; } int MDATestToneAudioProcessor::getCurrentProgram() { return 0; } void MDATestToneAudioProcessor::setCurrentProgram(int index) { } const juce::String MDATestToneAudioProcessor::getProgramName(int index) { return {}; } void MDATestToneAudioProcessor::changeProgramName(int index, const juce::String &newName) { } void MDATestToneAudioProcessor::prepareToPlay(double sampleRate, int samplesPerBlock) { resetState(); _parametersChanged.store(true); } void MDATestToneAudioProcessor::releaseResources() { } void MDATestToneAudioProcessor::reset() { resetState(); } bool MDATestToneAudioProcessor::isBusesLayoutSupported(const BusesLayout &layouts) const { return layouts.getMainOutputChannelSet() == juce::AudioChannelSet::stereo(); } void MDATestToneAudioProcessor::resetState() { // Reset the filter delays and the oscillator phase. _z0 = _z1 = _z2 = _z3 = _z4 = _z5 = _phase = 0.0f; } void MDATestToneAudioProcessor::update() { _mode = int(apvts.getRawParameterValue("Mode")->load()); // Level is -60 db to 0 dB. float fParam1 = apvts.getRawParameterValue("Level")->load(); _left = juce::Decibels::decibelsToGain(fParam1); // For white noise and pink noise, the random generator outputs numbers // between ±16384, so scale these back to the range 0 - 1 (more or less). if (_mode == 2) _left *= 0.0000610f; // white noise if (_mode == 3) _left *= 0.0000243f; // pink noise // Using left channel, right channel, or both? float fParam2 = apvts.getRawParameterValue("Channel")->load(); if (fParam2 < 0.3f) _right = 0.0f; else _right = _left; if (fParam2 > 0.6f) _left = 0.0f; // The sweep time parameter is in milliseconds. Convert to number of samples. float fParam6 = apvts.getRawParameterValue("Sweep")->load(); _durationInSeconds = fParam6 / 1000.0f; _durationInSamples = int(_durationInSeconds * getSampleRate()); _sweepRemaining = _durationInSamples; // Calibration: This code looks complicated but it just creates a slider that // goes from -20 dB to 0 dB; there's also a section between -1 dB and 0 dB // that goes in smaller steps. float fParam7 = apvts.getRawParameterValue("0dB =")->load(); if (fParam7 > 0.8f) { // -1 dB to 0 dB float cal; if (fParam7 > 0.96f) cal = 0.0f; else if (fParam7 > 0.92f) cal = -0.01000001f; else if (fParam7 > 0.88f) cal = -0.02000001f; else if (fParam7 > 0.84f) cal = -0.1f; else cal = -0.2f; // Convert decibels to a linear gain. Use this to trim the output level. cal = std::pow(10.0f, 0.05f * cal); _left *= cal; _right *= cal; } else { // Here, the slider is between -21 dB to -1 dB. We only use this to change // how the output level is displayed. There's nothing to calculate here. } // The meaning of F1 and F2 depends on the mode the user has chosen. float fParam3 = apvts.getRawParameterValue("F1")->load(); float fParam4 = apvts.getRawParameterValue("F2")->load(); // F2 is sometimes used to "fine tune" the value of F1. In that case, the F2 // slider goes from -0.5 to +0.5. However, this is not entirely linear: there // is an area around the center (between 40% and 60%) that's always 0. Maybe // that was done to make it easier to set this parameter to zero. float df = 0.0f; if (fParam4 > 0.6) df = 1.25f*fParam4 - 0.75f; // 0 to 0.5 if (fParam4 < 0.4) df = 1.25f*fParam4 - 0.50f; // -0.5 to 0 // To get radians, multiply the frequency in Hz by 2 pi / sampleRate. _fscale = twopi / getSampleRate(); switch (_mode) { // MIDI note: // F1 chooses the MIDI note number // F2 is used to tune the note up or down by ±50 cents case 0: { float f = std::floor(128.0f * fParam3); // The usual formula for converting a MIDI note number into a frequency // is 440 * 2^((note - 69) / 12). Then to find the phase increment, we // multiply by 2 pi and divide by the sample rate. That works out to be // exactly the same as the formula used here: _phaseInc = 51.37006f * std::pow(1.0594631f, f + df) / getSampleRate(); break; } // Sine wave: // F1 chooses the frequency on a logaritmic scale // F2 fine-tunes that frequency by 2 semitones up or down case 5: { // Convert F1 from 0 - 1 into the range 13 - 43 (integers only), and then // turn it in to a frequency between 20 and 20000 Hz. Why these strange // numbers? This is from the ISO 266 standard for preferred frequencies // for acoustical measurements. The formula is 1000 * 10^(n / 10) where n // is an integer. For n = -17, the freq is 20 Hz; for n = 13, the freq is // 20 kHz. Here, that formula was rewritten so that param = 0 gives 20 Hz // and param = 1 gives 20 kHz. The new formula is 1000 * 10^((f - 30)/10) // where f is now between 13 and 43. The factor 1000 cancels out against // the -30/10 in the exponent, which leaves 10^(f / 10). float f = 13.0f + std::floor(30.0f * fParam3); f = std::pow(10.0f, 0.1f * (f + df)); _phaseInc = _fscale * f; break; } // Log sweep & step: // F1 is the first frequency // F2 is the second frequency case 6: case 7: // Convert F1 and F2 from 0 - 1 into the range 13 - 43 (integers only), // but don't turn these numbers into real frequencies yet. In the audio // callback, we'll turn this into a frequency using pow(10, 0.1 * value). // That makes these variables log-frequencies, i.e. they are the base-10 // logarithms of the actual frequencies, where 13 corresponds to 20 Hz // and 43 to 20 kHz. We do this to keep these values in log-space so that // interpolating from start to finish is done in logarithmic steps. _sweepStart = 13.0f + std::floor(30.0f * fParam3); _sweepEnd = 13.0f + std::floor(30.0f * fParam4); // Only sweep up. Swap the frequencies if start is lower than end. // (It shouldn't be very hard to change the plug-in so it allows sweeps // from high to low frequencies as well.) if (_sweepStart > _sweepEnd) { float tmp = _sweepEnd; _sweepEnd = _sweepStart; _sweepStart = tmp; } // In step mode, make sure to include the end frequency as the final step. if (_mode == 7) _sweepEnd += 1.0f; // How much do we need to increment the log-frequency on every sample? _sweepInc = (_sweepEnd - _sweepStart) / (_durationInSeconds * getSampleRate()); _sweepFreq = _sweepStart; // Put two seconds of silence between sweeps. _sweepRemaining = _durationInSamples = 2 * int(getSampleRate()); break; // Linear sweep: // F1 is the first frequency // F2 is the second frequency case 8: // Turn F1 and F2 directly into a frequency between 0 Hz and 20000 Hz. _sweepStart = 200.0f * std::floor(100.0f * fParam3); _sweepEnd = 200.0f * std::floor(100.0f * fParam4); // Only sweep up. Swap the frequencies if start is lower than end. if (_sweepStart > _sweepEnd) { float tmp = _sweepEnd; _sweepEnd = _sweepStart; _sweepStart = tmp; } // Convert the start / end frequencies to an angle in radians. _sweepStart = _fscale * _sweepStart; _sweepEnd = _fscale * _sweepEnd; _sweepFreq = _sweepStart; // We increment the angle by this step size on every sample. This will // linearly move the frequency of the tone from the start angle to the // end angle over the total duration of the sweep. _sweepInc = (_sweepEnd - _sweepStart) / (_durationInSeconds * getSampleRate()); // Put two seconds of silence between sweeps. _sweepRemaining = _durationInSamples = 2 * int(getSampleRate()); break; } // Audio thru determines the loudness of input audio in the mix. This is a // decibel value between -40 dB and 0 dB, so convert it to linear gain. float fParam5 = apvts.getRawParameterValue("Thru")->load(); if (fParam5 == 0.0f) { _thru = 0.0f; // no audio thru } else { _thru = juce::Decibels::decibelsToGain(fParam5); } } void MDATestToneAudioProcessor::processBlock(juce::AudioBuffer<float> &buffer, juce::MidiBuffer &midiMessages) { juce::ScopedNoDenormals noDenormals; auto totalNumInputChannels = getTotalNumInputChannels(); auto totalNumOutputChannels = getTotalNumOutputChannels(); // Clear any output channels that don't contain input data. for (auto i = totalNumInputChannels; i < totalNumOutputChannels; ++i) { buffer.clear(i, 0, buffer.getNumSamples()); } // Only recalculate when a parameter has changed. // The original plug-in used two counters for this, updateRx and updateTx. // Whenever a parameter changed, updateTx was incremented. At the start of // the audio processing code, it would call update() if updateRx != updateTx. // Inside update(), updateRx would be set equal to updateTx again. (The way // we do it is a bit more thread-safe.) bool expected = true; if (_parametersChanged.compare_exchange_strong(expected, false)) { update(); } const float *in1 = buffer.getReadPointer(0); const float *in2 = buffer.getReadPointer(1); float *out1 = buffer.getWritePointer(0); float *out2 = buffer.getWritePointer(1); const int mode = _mode; const float thru = _thru; const float left = _left; const float right = _right; const float fscale = _fscale; const float sweepStart = _sweepStart; const float sweepEnd = _sweepEnd; const float step = _sweepInc; const int sweepDuration = _durationInSamples; int samplesRemaining = _sweepRemaining; float freq = _sweepFreq; float phase = _phase; float phaseInc = _phaseInc; float z0 = _z0, z1 = _z1, z2 = _z2, z3 = _z3, z4 = _z4, z5 = _z5; for (int i = 0; i < buffer.getNumSamples(); ++i) { float a = in1[i]; float b = in2[i]; float x = 0.0f; switch (mode) { // Impulse case 1: // This simply counts down. When the timer is zero, output a short // pulse and reset the timer. Otherwise, always output silence. if (samplesRemaining > 0) { samplesRemaining--; x = 0.0f; } else { samplesRemaining = sweepDuration; x = 1.0f; } break; // White noise and pink noise case 2: case 3: // The original plug-in assumed RAND_MAX is 32767, but that's usually // wrong now. It would be better to use juce::Random for this. x = float(std::rand()) / RAND_MAX * 32767 - 16384; // Filter the white noise to get pink noise. if (mode == 3) { z0 = 0.997f * z0 + 0.029591f * x; z1 = 0.985f * z1 + 0.032534f * x; z2 = 0.950f * z2 + 0.048056f * x; z3 = 0.850f * z3 + 0.090579f * x; z4 = 0.620f * z4 + 0.108990f * x; z5 = 0.250f * z5 + 0.255784f * x; x = z0 + z1 + z2 + z3 + z4 + z5; } break; // Mute case 4: x = 0.0f; break; // Tones, for MIDI note / sine wave mode case 0: case 5: // Just a simple sine wave oscillator. Increment the phase using the // phase increment and wrap around when it reaches 2 pi. phase = std::fmod(phase + phaseInc, twopi); x = std::sin(phase); break; // Log sweep & step case 6: case 7: // Before the sweep starts, count down and output zeros. This puts two // seconds of silence between sweeps. if (samplesRemaining > 0) { samplesRemaining--; phase = 0.0f; x = 0.0f; } else { // Output sine value for the current frequency. x = std::sin(phase); // Increment the frequency in log-space. freq += step; // Turn the log-frequency into an actual frequency in radians. if (mode == 7) { // In step mode, round off the frequency value. This steps upwards // by roughly 4 semitones (a major third interval). phaseInc = fscale * std::pow(10.0f, 0.1f * float(int(freq))); } else { phaseInc = fscale * std::pow(10.0f, 0.1f * freq); } // Use the frequency in radians to increment the oscillator phase. phase = std::fmod(phase + phaseInc, twopi); // If we've reached the end frequency, restart the sweep. if (freq > sweepEnd) { samplesRemaining = sweepDuration; freq = sweepStart; } } break; // Linear sweep case 8: // Before the sweep starts, count down and output zeros. This puts two // seconds of silence between sweeps. if (samplesRemaining > 0) { samplesRemaining--; phase = 0.0f; x = 0.0f; } else { // Output sine value for the current frequency. x = std::sin(phase); // Increment the phase linearly and wrap around at 2 pi. freq += step; phase = std::fmod(phase + freq, twopi); // If we've reached the end frequency, restart the sweep. if (freq > sweepEnd) { samplesRemaining = sweepDuration; freq = sweepStart; } } break; } // Mix the incoming audio signal with the sound we produced. out1[i] = thru*a + left*x; out2[i] = thru*b + right*x; } _phase = phase; _sweepFreq = freq; _sweepRemaining = samplesRemaining; _z0 = z0; _z1 = z1; _z2 = z2; _z3 = z3; _z4 = z4; _z5 = z5; } juce::AudioProcessorEditor *MDATestToneAudioProcessor::createEditor() { return new juce::GenericAudioProcessorEditor(*this); } void MDATestToneAudioProcessor::getStateInformation(juce::MemoryBlock &destData) { copyXmlToBinary(*apvts.copyState().createXml(), destData); } void MDATestToneAudioProcessor::setStateInformation(const void *data, int sizeInBytes) { std::unique_ptr<juce::XmlElement> xml(getXmlFromBinary(data, sizeInBytes)); if (xml.get() != nullptr && xml->hasTagName(apvts.state.getType())) { apvts.replaceState(juce::ValueTree::fromXml(*xml)); _parametersChanged.store(true); } } juce::String MDATestToneAudioProcessor::midi2string(float n) { // It's a lot easier to build up the string with juce::String but this // is how the original plug-in did it. char t[8]; int p = 0; // Convert the MIDI note number to text. This has up to 3 digits. int nn = int(n); if (nn > 99) t[p++] = 48 + (int(0.01 * n) % 10); if (nn > 9) t[p++] = 48 + (int(0.10 * n) % 10); t[p++] = 48 + (int(n) % 10); t[p++] = ' '; // Find the octave and the note inside the octave. int o = int(nn / 12.0f); int s = nn - (12 * o); // Octaves go from -2 to +8. o -= 2; // Convert to a note name. switch (s) { case 0: t[p++] = 'C'; break; case 1: t[p++] = 'C'; t[p++] = '#'; break; case 2: t[p++] = 'D'; break; case 3: t[p++] = 'D'; t[p++] = '#'; break; case 4: t[p++] = 'E'; break; case 5: t[p++] = 'F'; break; case 6: t[p++] = 'F'; t[p++] = '#'; break; case 7: t[p++] = 'G'; break; case 8: t[p++] = 'G'; t[p++] = '#'; break; case 9: t[p++] = 'A'; break; case 10: t[p++] = 'A'; t[p++] = '#'; break; default: t[p++] = 'B'; } // Convert the octave number to text. if (o < 0) { t[p++] = '-'; o = -o; } t[p++] = 48 + (o % 10); // Null-terminate the string. t[p] = 0; return t; } juce::String MDATestToneAudioProcessor::iso2string(float b) { switch (int(b)) { case 13: return "20 Hz"; case 14: return "25 Hz"; case 15: return "31 Hz"; case 16: return "40 Hz"; case 17: return "50 Hz"; case 18: return "63 Hz"; case 19: return "80 Hz"; case 20: return "100 Hz"; case 21: return "125 Hz"; case 22: return "160 Hz"; case 23: return "200 Hz"; case 24: return "250 Hz"; case 25: return "310 Hz"; case 26: return "400 Hz"; case 27: return "500 Hz"; case 28: return "630 Hz"; case 29: return "800 Hz"; case 30: return "1 kHz"; case 31: return "1.25 kHz"; case 32: return "1.6 kHz"; case 33: return "2.0 kHz"; case 34: return "2.5 kHz"; case 35: return "3.1 kHz"; case 36: return "4 kHz"; case 37: return "5 kHz"; case 38: return "6.3 kHz"; case 39: return "8 kHz"; case 40: return "10 kHz"; case 41: return "12.5 kHz"; case 42: return "16 kHz"; case 43: return "20 kHz"; default: return "--"; } } juce::String MDATestToneAudioProcessor::stringFromValueF1(float value) { const int mode = int(apvts.getRawParameterValue("Mode")->load()); switch (mode) { // MIDI note case 0: return midi2string(std::floor(128.0f * value)); // semitones // sine wave, iso band freq case 5: return iso2string(13.0f + std::floor(30.0f * value)); // log sweep & step start freq case 6: case 7: return iso2string(13.0f + std::floor(30.0f * value)); // linear sweep start freq case 8: return juce::String(int(200.0f * std::floor(100.0f * value))); // no frequency display default: return "--"; } } juce::String MDATestToneAudioProcessor::stringFromValueF2(float value) { const int mode = int(apvts.getRawParameterValue("Mode")->load()); float df = 0.0f; if (value > 0.6) df = 1.25f*value - 0.75f; // 0 to 0.5 if (value < 0.4) df = 1.25f*value - 0.50f; // -0.5 to 0 switch (mode) { // MIDI note case 0: return juce::String(int(100.0f * df)); // cents // sine wave, Hz case 5: { float f1Value = apvts.getRawParameterValue("F1")->load(); float f = 13.0f + std::floor(30.0f * f1Value); f = std::pow(10.0f, 0.1f * (f + df)); return juce::String(f, 2); } // log sweep & step end freq case 6: case 7: return iso2string(13.0f + std::floor(30.0f * value)); // linear sweep end freq case 8: return juce::String(int(200.0f * std::floor(100.0f * value))); // no frequency display default: return "--"; } } juce::String MDATestToneAudioProcessor::stringFromValueOutputLevel(float value) { // If value of the "0dB =" slider is -1 dB or less, it isn't actually used // for any calculations -- it just changes the value displayed on the output // level slider. float fParam7 = apvts.getRawParameterValue("0dB =")->load(); float calx; if (fParam7 > 0.8f) { // Don't calibrate the output level. calx = 0.0f; } else { // Convert the parameter to decibels: -21 dB to -1 dB. calx = int(25.0f * fParam7 - 21.1f); } return juce::String(value - calx, 2); } juce::String MDATestToneAudioProcessor::stringFromValueCalibration(float value) { // Note that there's some duplicate code in these stringFromValue functions // that also exists in update(). Could refactor this and put the calculations // in a single place, but for a small plug-in like this, I'm not bothering. float cal = 0.0f; // From -1 dB to 0 dB the slider uses smaller steps. if (value > 0.8f) { if (value > 0.96f) cal = 0.0f; else if (value > 0.92f) cal = -0.01000001f; else if (value > 0.88f) cal = -0.02000001f; else if (value > 0.84f) cal = -0.1f; else cal = -0.2f; } else { // The rest of the slider goes from -21 dB to -1 dB in steps of 1 dB. cal = int(25.0f * value - 21.1f); } return juce::String(cal, 2); } juce::AudioProcessorValueTreeState::ParameterLayout MDATestToneAudioProcessor::createParameterLayout() { juce::AudioProcessorValueTreeState::ParameterLayout layout; // The UI for this plug-in takes a little getting used to. Not all parameters // are relevant to all modes, and some parameters change meaning based on mode. layout.add(std::make_unique<juce::AudioParameterChoice>( "Mode", "Mode", juce::StringArray({ "MIDI #", "IMPULSE", "WHITE", "PINK", "---", "SINE", "LOG SWP.", "LOG STEP", "LIN SWP." }), 4)); layout.add(std::make_unique<juce::AudioParameterFloat>( "Level", "Level", juce::NormalisableRange<float>(-60.0f, 0.0f, 0.01f), -16.0f, "dB", juce::AudioProcessorParameter::genericParameter, [this](float value, int) { return stringFromValueOutputLevel(value); })); layout.add(std::make_unique<juce::AudioParameterFloat>( "Channel", "Channel", juce::NormalisableRange<float>(), 0.5f, "L <> R", juce::AudioProcessorParameter::genericParameter, [](float value, int) { return value > 0.3f ? (value > 0.7f ? "RIGHT" : "CENTRE" ) : "LEFT"; })); layout.add(std::make_unique<juce::AudioParameterFloat>( "F1", "F1", juce::NormalisableRange<float>(), 0.57f, "", juce::AudioProcessorParameter::genericParameter, [this](float value, int) { return stringFromValueF1(value); })); layout.add(std::make_unique<juce::AudioParameterFloat>( "F2", "F2", juce::NormalisableRange<float>(), 0.50f, "", juce::AudioProcessorParameter::genericParameter, [this](float value, int) { return stringFromValueF2(value); })); layout.add(std::make_unique<juce::AudioParameterFloat>( "Sweep", "Sweep", juce::NormalisableRange<float>(1000.0f, 32000.0f, 500.0f), 10000.0f, "ms")); layout.add(std::make_unique<juce::AudioParameterFloat>( "Thru", "Thru", juce::NormalisableRange<float>(-40.0f, 0.0f, 0.01f), 0.0f, "dB", juce::AudioProcessorParameter::genericParameter, [](float value, int) { return value == 0.0f ? "OFF" : juce::String(value, 2); })); layout.add(std::make_unique<juce::AudioParameterFloat>( "0dB =", "0dB =", juce::NormalisableRange<float>(), 1.0f, "dBFS", juce::AudioProcessorParameter::genericParameter, [this](float value, int) { return stringFromValueCalibration(value); })); return layout; } juce::AudioProcessor *JUCE_CALLTYPE createPluginFilter() { return new MDATestToneAudioProcessor(); }
<gh_stars>0 module SVGAbstract class Transformation def initialize @transforms = [] end def to_s @transforms.join(' ') end #Meta-program the transformation methods transformations = [:rotate, :translate, :scale, :matrix, :skewX, :skewY] transformations.each do |t| define_method(t) do |*t_args| @transforms << "#{t} ( #{t_args.join(' ')} )" end end end end
<reponame>TJHello/BillingEasy package com.tjhello.lib.billing.base.anno; import androidx.annotation.StringDef; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; @Documented @Retention(RetentionPolicy.CLASS) @Target({ElementType.PARAMETER,ElementType.FIELD}) @StringDef(value = {ProductType.TYPE_INAPP_CONSUMABLE,ProductType.TYPE_INAPP_NON_CONSUMABLE,ProductType.TYPE_SUBS}) public @interface ProductType { String TYPE_INAPP_CONSUMABLE = "inapp-consumable"; String TYPE_INAPP_NON_CONSUMABLE = "inapp-non-consumable"; String TYPE_SUBS = "subs"; }
createcertificatesForOrg1() { echo echo "Enroll the CA admin" echo mkdir -p crypto-config-ca/peerOrganizations/org1.example.com/ export FABRIC_CA_CLIENT_HOME=${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/ fabric-ca-client enroll -u https://admin:adminpw@localhost:7054 --caname ca.org1.example.com --tls.certfiles ${PWD}/fabric-ca/org1/tls-cert.pem echo 'NodeOUs: Enable: true ClientOUIdentifier: Certificate: cacerts/localhost-7054-ca-org1-example-com.pem OrganizationalUnitIdentifier: client PeerOUIdentifier: Certificate: cacerts/localhost-7054-ca-org1-example-com.pem OrganizationalUnitIdentifier: peer AdminOUIdentifier: Certificate: cacerts/localhost-7054-ca-org1-example-com.pem OrganizationalUnitIdentifier: admin OrdererOUIdentifier: Certificate: cacerts/localhost-7054-ca-org1-example-com.pem OrganizationalUnitIdentifier: orderer' >${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/msp/config.yaml echo echo "Register peer0" echo fabric-ca-client register --caname ca.org1.example.com --id.name peer0 --id.secret peer0pw --id.type peer --tls.certfiles ${PWD}/fabric-ca/org1/tls-cert.pem echo echo "Register peer1" echo fabric-ca-client register --caname ca.org1.example.com --id.name peer1 --id.secret peer1pw --id.type peer --tls.certfiles ${PWD}/fabric-ca/org1/tls-cert.pem echo echo "Register user" echo fabric-ca-client register --caname ca.org1.example.com --id.name user1 --id.secret user1pw --id.type client --tls.certfiles ${PWD}/fabric-ca/org1/tls-cert.pem echo echo "Register the org admin" echo fabric-ca-client register --caname ca.org1.example.com --id.name org1admin --id.secret org1adminpw --id.type admin --tls.certfiles ${PWD}/fabric-ca/org1/tls-cert.pem mkdir -p crypto-config-ca/peerOrganizations/org1.example.com/peers # ----------------------------------------------------------------------------------- # Peer 0 mkdir -p crypto-config-ca/peerOrganizations/org1.example.com/peers/peer0.org1.example.com echo echo "## Generate the peer0 msp" echo fabric-ca-client enroll -u https://peer0:peer0pw@localhost:7054 --caname ca.org1.example.com -M ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/msp --csr.hosts peer0.org1.example.com --tls.certfiles ${PWD}/fabric-ca/org1/tls-cert.pem cp ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/msp/config.yaml ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/msp/config.yaml echo echo "## Generate the peer0-tls certificates" echo fabric-ca-client enroll -u https://peer0:peer0pw@localhost:7054 --caname ca.org1.example.com -M ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls --enrollment.profile tls --csr.hosts peer0.org1.example.com --csr.hosts localhost --tls.certfiles ${PWD}/fabric-ca/org1/tls-cert.pem cp ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls/tlscacerts/* ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls/ca.crt cp ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls/signcerts/* ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls/server.crt cp ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls/keystore/* ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls/server.key mkdir ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/msp/tlscacerts cp ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls/tlscacerts/* ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/msp/tlscacerts/ca.crt mkdir ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/tlsca cp ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls/tlscacerts/* ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/tlsca/tlsca.org1.example.com-cert.pem mkdir ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/ca cp ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/msp/cacerts/* ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/ca/ca.org1.example.com-cert.pem # ------------------------------------------------------------------------------------------------ # Peer1 mkdir -p crypto-config-ca/peerOrganizations/org1.example.com/peers/peer1.org1.example.com echo echo "## Generate the peer1 msp" echo fabric-ca-client enroll -u https://peer1:peer1pw@localhost:7054 --caname ca.org1.example.com -M ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/peers/peer1.org1.example.com/msp --csr.hosts peer1.org1.example.com --tls.certfiles ${PWD}/fabric-ca/org1/tls-cert.pem cp ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/msp/config.yaml ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/peers/peer1.org1.example.com/msp/config.yaml echo echo "## Generate the peer1-tls certificates" echo fabric-ca-client enroll -u https://peer1:peer1pw@localhost:7054 --caname ca.org1.example.com -M ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/peers/peer1.org1.example.com/tls --enrollment.profile tls --csr.hosts peer1.org1.example.com --csr.hosts localhost --tls.certfiles ${PWD}/fabric-ca/org1/tls-cert.pem cp ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/peers/peer1.org1.example.com/tls/tlscacerts/* ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/peers/peer1.org1.example.com/tls/ca.crt cp ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/peers/peer1.org1.example.com/tls/signcerts/* ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/peers/peer1.org1.example.com/tls/server.crt cp ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/peers/peer1.org1.example.com/tls/keystore/* ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/peers/peer1.org1.example.com/tls/server.key # -------------------------------------------------------------------------------------------------- mkdir -p crypto-config-ca/peerOrganizations/org1.example.com/users mkdir -p crypto-config-ca/peerOrganizations/org1.example.com/users/User1@org1.example.com echo echo "## Generate the user msp" echo fabric-ca-client enroll -u https://user1:user1pw@localhost:7054 --caname ca.org1.example.com -M ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/users/User1@org1.example.com/msp --tls.certfiles ${PWD}/fabric-ca/org1/tls-cert.pem mkdir -p crypto-config-ca/peerOrganizations/org1.example.com/users/Admin@org1.example.com echo echo "## Generate the org admin msp" echo fabric-ca-client enroll -u https://org1admin:org1adminpw@localhost:7054 --caname ca.org1.example.com -M ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/users/Admin@org1.example.com/msp --tls.certfiles ${PWD}/fabric-ca/org1/tls-cert.pem cp ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/msp/config.yaml ${PWD}/crypto-config-ca/peerOrganizations/org1.example.com/users/Admin@org1.example.com/msp/config.yaml } # createcertificatesForOrg1 createCertificateForOrg2() { echo echo "Enroll the CA admin" echo mkdir -p /crypto-config-ca/peerOrganizations/org2.example.com/ export FABRIC_CA_CLIENT_HOME=${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/ fabric-ca-client enroll -u https://admin:adminpw@localhost:8054 --caname ca.org2.example.com --tls.certfiles ${PWD}/fabric-ca/org2/tls-cert.pem echo 'NodeOUs: Enable: true ClientOUIdentifier: Certificate: cacerts/localhost-8054-ca-org2-example-com.pem OrganizationalUnitIdentifier: client PeerOUIdentifier: Certificate: cacerts/localhost-8054-ca-org2-example-com.pem OrganizationalUnitIdentifier: peer AdminOUIdentifier: Certificate: cacerts/localhost-8054-ca-org2-example-com.pem OrganizationalUnitIdentifier: admin OrdererOUIdentifier: Certificate: cacerts/localhost-8054-ca-org2-example-com.pem OrganizationalUnitIdentifier: orderer' >${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/msp/config.yaml echo echo "Register peer0" echo fabric-ca-client register --caname ca.org2.example.com --id.name peer0 --id.secret peer0pw --id.type peer --tls.certfiles ${PWD}/fabric-ca/org2/tls-cert.pem echo echo "Register peer1" echo fabric-ca-client register --caname ca.org2.example.com --id.name peer1 --id.secret peer1pw --id.type peer --tls.certfiles ${PWD}/fabric-ca/org2/tls-cert.pem echo echo "Register user" echo fabric-ca-client register --caname ca.org2.example.com --id.name user1 --id.secret user1pw --id.type client --tls.certfiles ${PWD}/fabric-ca/org2/tls-cert.pem echo echo "Register the org admin" echo fabric-ca-client register --caname ca.org2.example.com --id.name org2admin --id.secret org2adminpw --id.type admin --tls.certfiles ${PWD}/fabric-ca/org2/tls-cert.pem mkdir -p crypto-config-ca/peerOrganizations/org2.example.com/peers mkdir -p crypto-config-ca/peerOrganizations/org2.example.com/peers/peer0.org2.example.com # -------------------------------------------------------------- # Peer 0 echo echo "## Generate the peer0 msp" echo fabric-ca-client enroll -u https://peer0:peer0pw@localhost:8054 --caname ca.org2.example.com -M ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/msp --csr.hosts peer0.org2.example.com --tls.certfiles ${PWD}/fabric-ca/org2/tls-cert.pem cp ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/msp/config.yaml ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/msp/config.yaml echo echo "## Generate the peer0-tls certificates" echo fabric-ca-client enroll -u https://peer0:peer0pw@localhost:8054 --caname ca.org2.example.com -M ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/tls --enrollment.profile tls --csr.hosts peer0.org2.example.com --csr.hosts localhost --tls.certfiles ${PWD}/fabric-ca/org2/tls-cert.pem cp ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/tls/tlscacerts/* ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/tls/ca.crt cp ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/tls/signcerts/* ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/tls/server.crt cp ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/tls/keystore/* ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/tls/server.key mkdir ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/msp/tlscacerts cp ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/tls/tlscacerts/* ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/msp/tlscacerts/ca.crt mkdir ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/tlsca cp ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/tls/tlscacerts/* ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/tlsca/tlsca.org2.example.com-cert.pem mkdir ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/ca cp ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/msp/cacerts/* ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/ca/ca.org2.example.com-cert.pem # -------------------------------------------------------------------------------- # Peer 1 echo echo "## Generate the peer1 msp" echo fabric-ca-client enroll -u https://peer1:peer1pw@localhost:8054 --caname ca.org2.example.com -M ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/peers/peer1.org2.example.com/msp --csr.hosts peer1.org2.example.com --tls.certfiles ${PWD}/fabric-ca/org2/tls-cert.pem cp ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/msp/config.yaml ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/peers/peer1.org2.example.com/msp/config.yaml echo echo "## Generate the peer1-tls certificates" echo fabric-ca-client enroll -u https://peer1:peer1pw@localhost:8054 --caname ca.org2.example.com -M ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/peers/peer1.org2.example.com/tls --enrollment.profile tls --csr.hosts peer1.org2.example.com --csr.hosts localhost --tls.certfiles ${PWD}/fabric-ca/org2/tls-cert.pem cp ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/peers/peer1.org2.example.com/tls/tlscacerts/* ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/peers/peer1.org2.example.com/tls/ca.crt cp ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/peers/peer1.org2.example.com/tls/signcerts/* ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/peers/peer1.org2.example.com/tls/server.crt cp ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/peers/peer1.org2.example.com/tls/keystore/* ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/peers/peer1.org2.example.com/tls/server.key # ----------------------------------------------------------------------------------- mkdir -p crypto-config-ca/peerOrganizations/org2.example.com/users mkdir -p crypto-config-ca/peerOrganizations/org2.example.com/users/User1@org2.example.com echo echo "## Generate the user msp" echo fabric-ca-client enroll -u https://user1:user1pw@localhost:8054 --caname ca.org2.example.com -M ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/users/User1@org2.example.com/msp --tls.certfiles ${PWD}/fabric-ca/org2/tls-cert.pem mkdir -p crypto-config-ca/peerOrganizations/org2.example.com/users/Admin@org2.example.com echo echo "## Generate the org admin msp" echo fabric-ca-client enroll -u https://org2admin:org2adminpw@localhost:8054 --caname ca.org2.example.com -M ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/users/Admin@org2.example.com/msp --tls.certfiles ${PWD}/fabric-ca/org2/tls-cert.pem cp ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/msp/config.yaml ${PWD}/crypto-config-ca/peerOrganizations/org2.example.com/users/Admin@org2.example.com/msp/config.yaml } # createCertificateForOrg2 createCretificateForOrderer() { echo echo "Enroll the CA admin" echo mkdir -p crypto-config-ca/ordererOrganizations/example.com export FABRIC_CA_CLIENT_HOME=${PWD}/crypto-config-ca/ordererOrganizations/example.com fabric-ca-client enroll -u https://admin:adminpw@localhost:9054 --caname ca-orderer --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem echo 'NodeOUs: Enable: true ClientOUIdentifier: Certificate: cacerts/localhost-9054-ca-orderer.pem OrganizationalUnitIdentifier: client PeerOUIdentifier: Certificate: cacerts/localhost-9054-ca-orderer.pem OrganizationalUnitIdentifier: peer AdminOUIdentifier: Certificate: cacerts/localhost-9054-ca-orderer.pem OrganizationalUnitIdentifier: admin OrdererOUIdentifier: Certificate: cacerts/localhost-9054-ca-orderer.pem OrganizationalUnitIdentifier: orderer' >${PWD}/crypto-config-ca/ordererOrganizations/example.com/msp/config.yaml echo echo "Register orderer" echo fabric-ca-client register --caname ca-orderer --id.name orderer --id.secret ordererpw --id.type orderer --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem echo echo "Register orderer2" echo fabric-ca-client register --caname ca-orderer --id.name orderer2 --id.secret ordererpw --id.type orderer --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem echo echo "Register orderer3" echo fabric-ca-client register --caname ca-orderer --id.name orderer3 --id.secret ordererpw --id.type orderer --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem echo echo "Register the orderer admin" echo fabric-ca-client register --caname ca-orderer --id.name ordererAdmin --id.secret ordererAdminpw --id.type admin --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem mkdir -p crypto-config-ca/ordererOrganizations/example.com/orderers # mkdir -p crypto-config-ca/ordererOrganizations/example.com/orderers/example.com # --------------------------------------------------------------------------- # Orderer mkdir -p crypto-config-ca/ordererOrganizations/example.com/orderers/orderer.example.com echo echo "## Generate the orderer msp" echo fabric-ca-client enroll -u https://orderer:ordererpw@localhost:9054 --caname ca-orderer -M ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer.example.com/msp --csr.hosts orderer.example.com --csr.hosts localhost --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem cp ${PWD}/crypto-config-ca/ordererOrganizations/example.com/msp/config.yaml ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer.example.com/msp/config.yaml echo echo "## Generate the orderer-tls certificates" echo fabric-ca-client enroll -u https://orderer:ordererpw@localhost:9054 --caname ca-orderer -M ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer.example.com/tls --enrollment.profile tls --csr.hosts orderer.example.com --csr.hosts localhost --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem cp ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer.example.com/tls/tlscacerts/* ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer.example.com/tls/ca.crt cp ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer.example.com/tls/signcerts/* ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer.example.com/tls/server.crt cp ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer.example.com/tls/keystore/* ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer.example.com/tls/server.key mkdir ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer.example.com/msp/tlscacerts cp ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer.example.com/tls/tlscacerts/* ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer.example.com/msp/tlscacerts/tlsca.example.com-cert.pem mkdir ${PWD}/crypto-config-ca/ordererOrganizations/example.com/msp/tlscacerts cp ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer.example.com/tls/tlscacerts/* ${PWD}/crypto-config-ca/ordererOrganizations/example.com/msp/tlscacerts/tlsca.example.com-cert.pem # ----------------------------------------------------------------------- # Orderer 2 mkdir -p crypto-config-ca/ordererOrganizations/example.com/orderers/orderer2.example.com echo echo "## Generate the orderer msp" echo fabric-ca-client enroll -u https://orderer2:ordererpw@localhost:9054 --caname ca-orderer -M ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer2.example.com/msp --csr.hosts orderer2.example.com --csr.hosts localhost --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem cp ${PWD}/crypto-config-ca/ordererOrganizations/example.com/msp/config.yaml ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer2.example.com/msp/config.yaml echo echo "## Generate the orderer-tls certificates" echo fabric-ca-client enroll -u https://orderer2:ordererpw@localhost:9054 --caname ca-orderer -M ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer2.example.com/tls --enrollment.profile tls --csr.hosts orderer2.example.com --csr.hosts localhost --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem cp ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer2.example.com/tls/tlscacerts/* ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer2.example.com/tls/ca.crt cp ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer2.example.com/tls/signcerts/* ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer2.example.com/tls/server.crt cp ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer2.example.com/tls/keystore/* ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer2.example.com/tls/server.key mkdir ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer2.example.com/msp/tlscacerts cp ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer2.example.com/tls/tlscacerts/* ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer2.example.com/msp/tlscacerts/tlsca.example.com-cert.pem # mkdir ${PWD}/crypto-config-ca/ordererOrganizations/example.com/msp/tlscacerts # cp ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer2.example.com/tls/tlscacerts/* ${PWD}/crypto-config-ca/ordererOrganizations/example.com/msp/tlscacerts/tlsca.example.com-cert.pem # --------------------------------------------------------------------------- # Orderer 3 mkdir -p crypto-config-ca/ordererOrganizations/example.com/orderers/orderer3.example.com echo echo "## Generate the orderer msp" echo fabric-ca-client enroll -u https://orderer3:ordererpw@localhost:9054 --caname ca-orderer -M ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer3.example.com/msp --csr.hosts orderer3.example.com --csr.hosts localhost --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem cp ${PWD}/crypto-config-ca/ordererOrganizations/example.com/msp/config.yaml ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer3.example.com/msp/config.yaml echo echo "## Generate the orderer-tls certificates" echo fabric-ca-client enroll -u https://orderer3:ordererpw@localhost:9054 --caname ca-orderer -M ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer3.example.com/tls --enrollment.profile tls --csr.hosts orderer3.example.com --csr.hosts localhost --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem cp ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer3.example.com/tls/tlscacerts/* ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer3.example.com/tls/ca.crt cp ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer3.example.com/tls/signcerts/* ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer3.example.com/tls/server.crt cp ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer3.example.com/tls/keystore/* ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer3.example.com/tls/server.key mkdir ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer3.example.com/msp/tlscacerts cp ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer3.example.com/tls/tlscacerts/* ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer3.example.com/msp/tlscacerts/tlsca.example.com-cert.pem # mkdir ${PWD}/crypto-config-ca/ordererOrganizations/example.com/msp/tlscacerts # cp ${PWD}/crypto-config-ca/ordererOrganizations/example.com/orderers/orderer3.example.com/tls/tlscacerts/* ${PWD}/crypto-config-ca/ordererOrganizations/example.com/msp/tlscacerts/tlsca.example.com-cert.pem # --------------------------------------------------------------------------- mkdir -p crypto-config-ca/ordererOrganizations/example.com/users mkdir -p crypto-config-ca/ordererOrganizations/example.com/users/Admin@example.com echo echo "## Generate the admin msp" echo fabric-ca-client enroll -u https://ordererAdmin:ordererAdminpw@localhost:9054 --caname ca-orderer -M ${PWD}/crypto-config-ca/ordererOrganizations/example.com/users/Admin@example.com/msp --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem cp ${PWD}/crypto-config-ca/ordererOrganizations/example.com/msp/config.yaml ${PWD}/crypto-config-ca/ordererOrganizations/example.com/users/Admin@example.com/msp/config.yaml } # createCretificateForOrderer sudo rm -rf crypto-config-ca/* # sudo rm -rf fabric-ca/* createcertificatesForOrg1 createCertificateForOrg2 createCretificateForOrderer
#!/bin/bash . ./build_config.sh # Retrieve the AMI id for Amazon Linux ami_id=$(ec2-describe-images -o amazon --region us-west-1 -F "architecture=x86_64" -F "block-device-mapping.volume-type=gp2" -F "image-type=machine" -F "root-device-type=ebs" -F "virtualization-type=hvm" -F "name=amzn-ami-hvm-2015.03.0*" | grep "ami-" | cut -f 2) # cleanup partial instance requests later <- bug if [ ! -e $server_instance_file ] then echo "Requesting instances for AMI: $ami_id" # Load subnet and vpc id from infra file subnet_id=$(cat $subnet_file) security_group_id=$(cat $sg_file) # Launch server nodes instance_id_array=( $(ec2-run-instances $ami_id -region $region -k $EC2_KEY_NAME -n $server_count -z $availability -t $server_instance_type -a ":0:$subnet_id:::$security_group_id" --placement-group $placement_group --associate-public-ip-address true | grep "^INSTANCE" | cut -f 2) ) # get length of spot request id array instance_id_size=${#instance_id_array[@]} for (( i=0; i<${instance_id_size}; i++ )); do echo "Created ${instance_id_array[i]}" echo ${instance_id_array[i]} >> $server_instance_file done else # Try to reuse the existing requests instance_id_array=( $(cat $server_instance_file) ) instance_id_size=${#instance_id_array[@]} for (( i=0; i<${instance_id_size}; i++ )); do echo "Re-using ${instance_id_array[i]}" done fi
#!/bin/bash ### # bootstrap.sh # # configuration variables ### USE_APACHE=true # PHP stuff USE_PHP=true USE_COMPOSER=true COMPOSER_AUTO=false USE_PHPUNIT=true # Database stuff USE_MYSQL=true USE_PHPMYADMIN=true DB_HOST=localhost DB_NAME=projectdb DB_USER=root DB_PASSWD=root MYSQL_IMPORT=true #Git stuff USE_GIT=true GIT_USER="Git User" GIT_EMAIL=user.email@domain.com # Node stuff USE_NODE=false USE_GULP=false USE_BOWER=false USE_GRUNT=false USE_ANGULAR=false USE_MAILCATCHER=false ### # end configuration ### echo "Creating swap file" /bin/dd if=/dev/zero of=/var/swap.1 bs=1M count=1024 > /dev/null 2>&1 /sbin/mkswap /var/swap.1 > /dev/null 2>&1 /sbin/swapon /var/swap.1 > /dev/null 2>&1 echo '/var/swap.1 none swap sw 0 0' >> /etc/fstab echo "Updating ..." apt-get -qq update > /dev/null 2>&1 sed -i 's|\\u@\\h:\\w\\$ |\\n\\u@\\h:\\w\\n\\$ |g' /home/ubuntu/.bashrc if $USE_APACHE; then echo "Installing Apache" apt-get -qq -y install apache2 > /dev/null 2>&1 cp -f /vagrant/provision/000-default.conf /etc/apache2/sites-available/000-default.conf a2enmod rewrite > /dev/null 2>&1 cp -f /vagrant/provision/servername.conf /etc/apache2/conf-available/servername.conf a2enconf servername > /dev/null 2>&1 service apache2 restart > /dev/null 2>&1 fi if $USE_PHP; then echo "Installing PHP" apt-get -qq -y install php libapache2-mod-php > /dev/null 2>&1 apt-get -qq -y install php-curl > /dev/null 2>&1 apt-get -qq -y install php-gd > /dev/null 2>&1 apt-get -qq -y install php-sqlite3 > /dev/null 2>&1 apt-get -qq -y install php-mcrypt > /dev/null 2>&1 phpenmod mcrypt cp -f /vagrant/provision/php.ini /etc/php/7.0/apache2/php.ini fi if $USE_MYSQL; then echo "Installing MySQL" debconf-set-selections <<< "mysql-server mysql-server/root_password password $DB_PASSWD" debconf-set-selections <<< "mysql-server mysql-server/root_password_again password $DB_PASSWD" apt-get -qq -y install mysql-server > /dev/null 2>&1 apt-get -qq -y install mysql-client php-mysql > /dev/null 2>&1 echo "Creating Database" mysql -uroot -p$DB_PASSWD -e "CREATE DATABASE $DB_NAME" >> /dev/null 2>&1 mysql -uroot -p$DB_PASSWD -e "grant all privileges on $DB_NAME.* to '$DB_USER'@'localhost' identified by '$DB_PASSWD'" > /dev/null 2>&1 if $MYSQL_IMPORT && [ -f /vagrant/provision/sql ]; then echo "SQL file found. Importing Database" mysql -u $DB_USER -p$DB_PASSWD $DB_NAME < /vagrant/provision/sql fi fi if $USE_PHPMYADMIN && $USE_APACHE; then echo "Installing phpMyAdmin" debconf-set-selections <<< "phpmyadmin phpmyadmin/dbconfig-install boolean true" debconf-set-selections <<< "phpmyadmin phpmyadmin/app-password-confirm password $DB_PASSWD" debconf-set-selections <<< "phpmyadmin phpmyadmin/mysql/admin-pass password $DB_PASSWD" debconf-set-selections <<< "phpmyadmin phpmyadmin/mysql/app-pass password $DB_PASSWD" debconf-set-selections <<< "phpmyadmin phpmyadmin/reconfigure-webserver multiselect apache2" apt-get -qq -y install phpmyadmin > /dev/null 2>&1 fi if $USE_COMPOSER; then echo "Installing Composer" curl -sS https://getcomposer.org/installer | php > /dev/null 2>&1 mv composer.phar /usr/local/bin/composer fi if $COMPOSER_AUTO && [ -f /vagrant/composer.json ]; then echo "composer.json found, let's setup your project" composer --working-dir=/vagrant/ install fi if $USE_GIT; then echo "Installing Git" apt-get install -y git > /dev/null 2>&1 echo "[user]" >> /home/ubuntu/.gitconfig echo " name = $GIT_USER" >> /home/ubuntu/.gitconfig echo " email = $GIT_EMAIL" >> /home/ubuntu/.gitconfig git clone https://github.com/magicmonty/bash-git-prompt.git /home/ubuntu/.bash-git-prompt --depth=1 > /dev/null 2>&1 echo "GIT_PROMPT_ONLY_IN_REPO=1" >> /home/ubuntu/.bashrc echo "source ~/.bash-git-prompt/gitprompt.sh" >> /home/ubuntu/.bashrc apt-get install -y git-ftp > /dev/null 2>&1 fi if $USE_PHPUNIT; then echo "Installing PHPUnit" wget -q https://phar.phpunit.de/phpunit.phar chmod +x phpunit.phar mv phpunit.phar /usr/local/bin/phpunit fi if $USE_NODE; then echo "Installing Node.js and npm" apt-get install -y python-software-properties > /dev/null 2>&1 curl -sL https://deb.nodesource.com/setup_7.x | sudo -E bash - > /dev/null 2>&1 apt-get install -y nodejs > /dev/null 2>&1 fi if $USE_GULP && $USE_NODE; then echo "Installing Gulp" npm install --global gulp-cli --silent > /dev/null 2>&1 apt-get install -y libnotify-bin > /dev/null 2>&1 fi if $USE_BOWER && $USE_NODE; then echo "Installing Bower" npm install -g bower --silent > /dev/null 2>&1 fi if $USE_GRUNT && $USE_NODE; then echo "Installing Grunt" npm install -g grunt-cli --silent > /dev/null 2>&1 fi if $USE_ANGULAR && $USE_NODE; then echo "Installing @angular/cli" npm install -g @angular/cli --silent > /dev/null 2>&1 fi if $USE_MAILCATCHER; then apt-get -qq -y install build-essential libsqlite3-dev ruby2.3-dev > /dev/null 2>&1 gem install mailcatcher --no-ri --no-rdoc > /dev/null 2>&1 echo 'description "Mailcatcher"' > /etc/init/mailcatcher.conf echo '' >> /etc/init/mailcatcher.conf echo 'start on runlevel [2345]' >> /etc/init/mailcatcher.conf echo 'stop on runlevel [2345]' >> /etc/init/mailcatcher.conf echo '' >> /etc/init/mailcatcher.conf echo 'respawn' >> /etc/init/mailcatcher.conf echo 'exec /usr/bin/env $(which mailcatcher) --foreground --http-ip=0.0.0.0' >> /etc/init/mailcatcher.conf if $USE_PHP; then sed -i 's|^;sendmail_path =|sendmail_path = /usr/bin/env /usr/local/bin/catchmail|g' /etc/php/7.0/apache2/php.ini fi /usr/bin/env $(which mailcatcher) --ip=0.0.0.0 > /dev/null 2>&1 fi if $USE_APACHE; then service apache2 restart > /dev/null 2>&1 fi echo "Done Installing stuff. Have a nice day!"
#### #Copyright (c) Facebook, Inc. and its affiliates. # #This source code is licensed under the MIT license found in the #LICENSE file in the root directory of this source tree. # #!/usr/bin/env bash # Goal : # - Select the datset for minimality experiments. Test when bob not using joint. experiment="QminimalityDataset" source `dirname $0`/utils.sh #precomputed="$prfx""precomputed" precomputed=$experiment kwargs="trnsf_experiment=$precomputed experiment=$precomputed datasize=all dataset.kwargs.is_augment=False train.kwargs.lr=5e-5 model.architecture.z_dim=1024 hydra.launcher.time=500 clfs.gamma_force_generalization=-0.1 model.Q_zy.hidden_size=128 model.Q_zy.n_hidden_layers=1 encoder=mlpl datasize.max_epochs=100 model.loss.altern_minimax=5 model.loss.n_per_head=3 $dev " kwargs_multi=" run=0,1,2 model=cdib,vib model.loss.beta=0,0.01,0.1,1,10,100 dataset=cifar10mnist,cifar100,cifar10,svhn,mnist " if [ "$is_plot_only" = false ] ; then for kwargs1 in "" do # precompute the transformer if not already done python main.py is_precompute_trnsf=True $kwargs $kwargs_multi $kwargs1 -m & wait python main.py $kwargs $kwargs_multi $kwargs1 -m & # make sure different hydra directory sleep 2m done fi wait params="col_val_subset.data=[cifar10mnist,cifar100,cifar10,svhn,mnist] col_val_subset.lr=[5e-5] col_val_subset.model=[cdib,vib] col_val_subset.encoder=[mlpl] col_val_subset.beta=[0,0.1,1,10,100,1000] " params=$params" col_val_subset.enc_zy_nhid=[128] col_val_subset.enc_zy_kpru=[0] col_val_subset.enc_zy_nlay=[1] " # ENCODER python aggregate.py \ experiment=$precomputed \ save_experiment="$experiment"/trnsf \ $params \ plot_generalization.x=beta \ plot_generalization.is_trnsf=True \ plot_generalization.logbase_x=10 \ plot_generalization.row=data \ plot_generalization.col=model \ plot_aux_trnsf.x=beta \ plot_aux_trnsf.logbase_x=10 \ plot_aux_trnsf.row=data \ plot_aux_trnsf.style=model \ plot_histories.row=data \ plot_histories.style=beta \ plot_histories.col=model \ recolt_data.pattern_results=null \ mode=[save_tables,plot_aux_trnsf,plot_generalization,plot_histories] params_clf=$params" col_val_subset.clf_nhid=[128] col_val_subset.clf_kpru=[0] col_val_subset.clf_nlay=[1] " # CLASSIFIER python aggregate.py \ experiment=$experiment \ $params_clf \ plot_generalization.x=beta \ plot_generalization.is_trnsf=False \ plot_generalization.logbase_x=10 \ plot_generalization.row=data \ plot_generalization.col=model \ plot_metrics.x=beta \ plot_metrics.logbase_x=10 \ plot_metrics.row=data \ plot_metrics.style=model \ plot_histories.row=data \ plot_histories.style=beta \ plot_histories.col=model \ recolt_data.pattern_histories="tmp_results/$experiment/**/clf_nhid_*/**/last_epoch_history.json" \ recolt_data.pattern_aux_trnsf=null \ mode=[save_tables,plot_metrics,plot_generalization,plot_histories]
'use strict'; // Dependencies require('dotenv').config(); const express = require('express'); const cors = require('cors'); const pg = require('pg'); // Modules const Location = require('./modules/locations'); const Weather = require('./modules/weather'); const Yelp = require('./modules/yelp'); const Event = require('./modules/events'); const Movie = require('./modules/movies'); const Trail = require('./modules/trails'); // Setup const PORT = process.env.PORT || 3000; const app = express(); app.use(cors()); const client = new pg.Client(process.env.DATABASE_URL); client.on('err', err => console.error(err)); // Routes app.get('/location', Location.getLocation); app.get('/weather', Weather.getWeather); app.get('/yelp', Yelp.getYelp); app.get('/events', Event); app.get('/movies', Movie.getMovies); app.get('/trails', Trail.getTrails); client.connect() .then( ()=> { app.listen(PORT, ()=> { console.log('server and db are up, listening on port', PORT); }); });
<filename>src/actions/Types.js export const SET_ALERTS = 'SET_ALERTS' export const PUSH_ALERT = 'PUSH_ALERT' export const REMOVE_ALERT = 'REMOVE_ALERT' export const SET_NICKNAME = 'SET_NICKNAME'
<reponame>nathaniel83/loopback-connector-firebase<filename>index.js /** * Created with JetBrains WebStorm. * User: kamol * Date: 3/19/15 * Time: 2:30 PM * To change this template use File | Settings | File Templates. */ module.exports = require('./lib/index');
cur_dir=$(cd "$(dirname "$0")"; pwd) parent_dir=$(dirname $(pwd)) cd ${parent_dir}/deps/libressl/ ./build.sh
#!/usr/bin/env bash set -ex HOME=/home/vagrant # Create working dir & set permissions mkdir -p /code chown -R vagrant:vagrant /code # Install Node.js (v6) and common global libs curl -sL https://deb.nodesource.com/setup_6.x | sudo -E bash - apt-get install -y nodejs npm install -g mocha \ istanbul \ babel-cli \ babel-preset-es2016 \ eslint \ eslint-config-airbnb \ eslint-plugin-react \ eslint-plugin-import npm install -g eslint-plugin-jsx-a11y@2.2.3 # pin specific version due to https://github.com/airbnb/javascript/issues/1163 # Set up Google Chrome key wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | sudo apt-key add - echo "deb http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google-chrome.list # Set up mongodb community edition key and repo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 0C49F3730359A14518585931BC711F9BA15703C6 echo "deb [ arch=amd64,arm64 ] http://repo.mongodb.org/apt/ubuntu xenial/mongodb-org/3.4 multiverse" | sudo tee /etc/apt/sources.list.d/mongodb-org-3.4.list apt-get update apt-get install -y \ apt-transport-https \ ca-certificates \ git \ samba \ google-chrome-stable \ mongodb-org timedatectl set-timezone Europe/Amsterdam locale-gen localectl set-locale LANG="en_US.UTF-8" # Start MongoDB service mongod start # Setup Docker config # mkdir -p /etc/systemd/system/docker.service.d # tee /etc/systemd/system/docker.service.d/docker.conf <<-'EOF' # [Service] # ExecStart= # ExecStart=/usr/bin/docker daemon -D -H tcp://0.0.0.0:2375 # EOF # Install & run docker engine apt-key adv --keyserver hkp://p80.pool.sks-keyservers.net:80 --recv-keys 58118E89F3A912897C070ADBF76221572C52609D echo 'deb https://apt.dockerproject.org/repo ubuntu-xenial main' > /etc/apt/sources.list.d/docker.list apt-get update apt-get install -y docker-engine systemctl daemon-reload systemctl restart docker systemctl enable docker usermod -aG docker vagrant # Install docker compose sudo curl -L https://github.com/docker/compose/releases/download/1.9.0/docker-compose-`uname -s`-`uname -m` > /usr/local/bin/docker-compose sudo chmod +x /usr/local/bin/docker-compose sudo curl -L https://raw.githubusercontent.com/docker/compose/$(sudo docker-compose version --short)/contrib/completion/bash/docker-compose > /etc/bash_completion.d/docker-compose # Install docker machine curl -L https://github.com/docker/machine/releases/download/v0.8.2/docker-machine-`uname -s`-`uname -m` >/usr/local/bin/docker-machine && \ chmod +x /usr/local/bin/docker-machine # sudo cp /lib/systemd/system/docker.service /etc/systemd/system/docker.service # sudo sed -i "s|-H fd://|-H tcp://0.0.0.0:2375|" /etc/systemd/system/docker.service # sudo systemctl daemon-reload # sudo service docker restart # Setup Docker Compose curl -L https://github.com/docker/compose/releases/download/1.8.0/docker-compose-`uname -s`-`uname -m` > /usr/local/bin/docker-compose chmod +x /usr/local/bin/docker-compose # Setup Git git config --global core.autocrlf false git config --global color.ui true # Setup Samba # tee /etc/samba/smb.conf <<-'EOF' # [global] # server string = Vagrant VM # log file = /var/log/samba/log.%m # max log size = 50 # security = user # passdb backend = tdbsam # [code] # path = /code # public = yes # browseable = yes # writable = yes # map archive = no # create mask = 0644 # directory mask = 0755 # EOF # systemctl restart smb # systemctl enable smb # echo -ne "vagrant\nvagrant\n" | smbpasswd -a -s vagrant
<filename>packages/react-form/tests/testHelpers.tsx import { ReactElement } from 'react'; import renderer from 'react-test-renderer'; import { Field, TextType, TextAreaType, SelectType, CheckType, NumberType, } from '../src/utils/fieldTypes'; export const matchSnapshot = (component: ReactElement): void => expect(renderer.create(component).toJSON()).toMatchSnapshot(); export const minimalTextField: TextType = { type: Field.TEXT, label: 'Text Field', }; export const minimalTextAreaField: TextAreaType = { type: Field.TEXT_AREA, label: 'Text Area Field', }; export const minimalSelectField: SelectType = { type: Field.SELECT, label: 'Select Field', options: [ { label: 'Option 1', value: 'option1' }, { label: 'Option 2', value: 'option2' }, ], }; export const minimalCheckField: CheckType = { type: Field.CHECK, label: 'Check Field', }; export const minimalNumberField: NumberType = { type: Field.NUMBER, label: 'Number Field', };
<filename>sources/include/nx/nx/headers.hpp #ifndef __NX_HEADERS_H__ #define __NX_HEADERS_H__ #include <string> #include <nx/config.h> #include <nx/attributes.hpp> namespace nx { const std::string Content_Type = "Content-Type"; const std::string content_type = "content-type"; const std::string Content_Length = "Content-Length"; const std::string content_length = "content-length"; const std::string Content_Disposition = "Content-Disposition"; const std::string content_disposition = "content-disposition"; const std::string Location = "Location"; const std::string location = "location"; const std::string Upgrade = "Upgrade"; const std::string upgrade = "upgrade"; const std::string Connection = "Connection"; const std::string connection = "connection"; const std::string Sec_WebSocket_Key = "Sec-WebSocket-Key"; const std::string sec_websocket_key = "sec-websocket-key"; const std::string Sec_WebSocket_Protocol = "Sec-WebSocket-Protocol"; const std::string sec_websocket_protocol = "sec-websocket-protocol"; const std::string Sec_WebSocket_Version = "Sec-WebSocket-Version"; const std::string sec_websocket_version = "sec-websocket-version"; const std::string Sec_WebSocket_Accept = "Sec-WebSocket-Accept"; const std::string sec_websocket_accept = "sec-websocket-accept"; class NX_API headers : public attribute_map { public: using attribute_map::attribute_map; virtual std::ostream& operator()(std::ostream& oss) const; }; struct header : public attribute_base { using attribute_base::attribute_base; }; const header text_plain = { Content_Type, "text/plain" }; const header application_json = { Content_Type, "application/json" }; const header upgrade_websocket = { Upgrade, "websocket" }; const header connection_upgrade = { Connection, Upgrade }; const header connection_close = { Connection, "close" }; } // namespace nx #endif // __NX_HEADERS_H__
<reponame>kampka/gsctl package login import ( "fmt" "github.com/fatih/color" "github.com/giantswarm/gscliauth/config" "github.com/giantswarm/microerror" "github.com/giantswarm/gsctl/client" ) // loginGiantSwarm executes the authentication logic. // If the user was logged in before, a logout is performed first. func loginGiantSwarm(args Arguments) (loginResult, error) { result := loginResult{ apiEndpoint: args.apiEndpoint, email: args.email, provider: "", loggedOutBefore: false, endpointSwitched: false, numEndpointsBefore: config.Config.NumEndpoints(), } endpointBefore := config.Config.SelectedEndpoint if result.apiEndpoint != endpointBefore { result.endpointSwitched = true } clientWrapper, err := client.NewWithConfig(args.apiEndpoint, "") if err != nil { return result, microerror.Mask(err) } ap := clientWrapper.DefaultAuxiliaryParams() ap.ActivityName = loginActivityName // log out if logged in if config.Config.Token != "" { if args.verbose { fmt.Println(color.WhiteString("Logging out using a a previously stored token")) } result.loggedOutBefore = true // we deliberately ignore the logout result here clientWrapper.DeleteAuthToken(config.Config.Token, ap) } if args.verbose { fmt.Println(color.WhiteString("Submitting API call to create an authentication token with email '%s'", args.email)) } response, err := clientWrapper.CreateAuthToken(args.email, args.password, ap) if err != nil { return result, err } // handle success result.token = response.Payload.AuthToken result.email = args.email // fetch installation name as alias if args.verbose { fmt.Println(color.WhiteString("Fetching installation details")) } installationInfo, err := getInstallationInfo(args.apiEndpoint, "giantswarm", result.token) if err != nil { return result, microerror.Mask(err) } result.alias = installationInfo.InstallationName result.provider = installationInfo.Provider if err := config.Config.StoreEndpointAuth(args.apiEndpoint, result.alias, result.provider, args.email, "giantswarm", result.token, ""); err != nil { return result, microerror.Mask(err) } if err := config.Config.SelectEndpoint(args.apiEndpoint); err != nil { return result, microerror.Mask(err) } // after storing endpoint, get new endpoint count result.numEndpointsAfter = config.Config.NumEndpoints() return result, nil }
#!/bin/bash nvoptix_dir="$(dirname "$(readlink -fm "$0")")/lib64/wine" wine='wine64' if [ ! -f "$nvoptix_dir/x86_64-unix/nvoptix.dll.so" ]; then echo "nvoptix.dll.so not found in $nvoptix_dir/x86_64-unix" >&2 exit 1 fi winever=$($wine --version | grep wine) if [ -z "$winever" ]; then echo "$wine: Not a wine executable. Check your $wine." >&2 exit 1 fi assume= function ask { echo "$1" if [ -z "$assume" ]; then read -r continue else continue=$assume echo "$continue" fi } POSITIONAL=() while [[ $# -gt 0 ]]; do case $1 in -y) assume='y' shift ;; -n) assume='n' shift ;; *) POSITIONAL+=("$1") shift ;; esac done set -- "${POSITIONAL[@]}" if [ -z "$WINEPREFIX" ]; then ask "WINEPREFIX is not set, continue? (y/N)" if [ "$continue" != "y" ] && [ "$continue" != "Y" ]; then exit 1 fi else if ! [ -f "$WINEPREFIX/system.reg" ]; then ask "WINEPREFIX does not point to an existing wine installation. Proceeding will create a new one, continue? (y/N)" if [ "$continue" != "y" ] && [ "$continue" != "Y" ]; then exit 1 fi fi fi unix_sys_path=$($wine winepath -u 'C:\windows\system32' 2> /dev/null) if [ -z "$unix_sys_path" ]; then echo 'Failed to resolve C:\windows\system32.' >&2 exit 1 fi function remove { echo " Removing nvoptix... " local dll="$unix_sys_path/$1.dll" if [ -h "$dll" ]; then out=$(rm "$dll" 2>&1) if [ $? -ne 0 ]; then echo -e "$out" exit=2 fi else echo -e "'$dll' is not a link or doesn't exist." exit=2 fi } function create { echo " Installing nvoptix... " ln -sf "$nvoptix_dir/x86_64-unix/$1.dll.so" "$unix_sys_path/$1.dll" if [ $? -ne 0 ]; then echo -e "Failed to create symlink" exit 1 fi } case "$1" in uninstall) fun=remove ;; install) fun=create ;; *) echo "Unrecognized option: $1" echo "Usage: $0 [install|uninstall]" exit 1 ;; esac $fun nvoptix
# Import necessary modules from django.contrib import admin from .models import YourModel # Define the custom display function class YourModelAdmin(admin.ModelAdmin): list_display = ( "uuid", "url", "display_current_retries", "last_try", "comment", "status", ) def display_current_retries(self, instance): return "{} / {}".format(instance.retries, instance.max_retries) # Register the model with the custom display function admin.site.register(YourModel, YourModelAdmin)
import { Component, Inject, OnInit } from '@angular/core'; import { MatDialogRef, MAT_DIALOG_DATA } from '@angular/material/dialog'; import { MiscellanousService } from '../../services/miscellanous.service'; import { AngularFirestore } from '@angular/fire/firestore'; import { AngularFireStorage } from '@angular/fire/storage'; import { MatSnackBar } from '@angular/material/snack-bar'; import * as _utils from './../../Utils/utils'; import { ToastrService } from 'ngx-toastr'; import { Student } from '../../Utils/student.model'; import { DatePipe } from '@angular/common'; import * as firebase from 'firebase/app'; @Component({ selector: 'app-add-student', templateUrl: './add-student.component.html', styleUrls: ['./add-student.component.css'] }) export class AddStudentComponent implements OnInit { updation: boolean = false; loading: boolean = false; stdObj: Student; docId: string; name: string; email: string; mobile: string; profileImage: any; fatherName: string; fatherEmail: string; fatherMobile: string; remarks: string; address: string; birthDate: Date; todaysDate: Date; appointmentDate: Date; followUpDate: Date; salutation: number = 0; gender: number = 0; status: number = 0; tempFile: File; datepipe: DatePipe = new DatePipe('en-us'); constructor( @Inject(MAT_DIALOG_DATA) private data: any, private dialogRef: MatDialogRef<AddStudentComponent>, private miscs: MiscellanousService, private dbRef: AngularFirestore, private stgRef: AngularFireStorage, private snackbar: MatSnackBar, private toast: ToastrService ) { if (data['obj'] != null) { this.updation = true; this.stdObj = data['obj']; this.name = this.stdObj.name; this.email = this.stdObj.email; this.mobile = this.stdObj.mobile; this.gender = this.stdObj.gender; this.address = this.stdObj.address; this.birthDate = this.stdObj.birthDate; this.docId = this.stdObj.docId; this.remarks = this.stdObj.remarks; this.fatherName = this.stdObj.fatherName; this.fatherMobile = this.stdObj.fatherMobile; this.fatherEmail = this.stdObj.fatherEmail; this.salutation = this.stdObj.salutation; this.appointmentDate = new Date(); if(this.stdObj.followUpDate != null){ if(this.datepipe.transform(this.appointmentDate, 'yyyy-MM-dd') >= this.datepipe.transform(this.stdObj.followUpDate['seconds'] * 1000, 'yyyy-MM-dd')) { this.status = 1; } } else { this.status = this.stdObj.status; } } else { this.updation = false; this.birthDate = new Date(); } } ngOnInit(): void { this.appointmentDate = new Date(); } firestoreID = () => this.dbRef.createId(); async addStudent() { this.loading = true; if (this.name == undefined || this.name == "") { this.snackbar.open("First Name Required", "", { duration: 2500, panelClass: ['warning'] }); this.loading = false; return; } if (this.fatherName == undefined || this.fatherName == "") { this.snackbar.open("First Name Required", "", { duration: 2500, panelClass: ['warning'] }); this.loading = false; return; } if (this.address == undefined && this.address?.length <= 1) { this.snackbar.open("Address required", "", { duration: 2500, panelClass: ['warning'] }); this.loading = false; return; } if (!this.email.includes("@") || !this.email.includes(".")) { this.snackbar.open("Email Invalid ", "", { duration: 2500, panelClass: ['warning'] }); this.loading = false; return; } if (!this.fatherEmail.includes("@") || !this.fatherEmail.includes(".")) { this.snackbar.open("Father Email Invalid ", "", { duration: 2500, panelClass: ['warning'] }); this.loading = false; return; } if (this.gender == undefined) { this.snackbar.open("Gender is required ", "", { duration: 2500, panelClass: ['warning'] }); this.loading = false; return; } if (this.birthDate == undefined) { this.snackbar.open("Date of Birth is required ", "", { duration: 2500, panelClass: ['warning'] }); this.loading = false; return; } var docKey = this.firestoreID(); if(this.profileImage != undefined) { const file = this.profileImage; const path = _utils.ROUTE_STUDENTS + "/" + docKey + "/profileImage"; const storageRef = this.stgRef.ref(path); const upload = this.stgRef.upload(path, file); await upload.snapshotChanges().pipe().toPromise().then(() => { return storageRef.getDownloadURL().toPromise().then(url => { this.profileImage = url; }) }) } var stdObj: Student = { docId: docKey, name: this.name.toLowerCase(), email: this.email.toLowerCase(), mobile: this.mobile, fatherName: this.fatherName.toLowerCase(), fatherEmail: this.fatherEmail.toLowerCase(), fatherMobile: this.fatherMobile, birthDate: this.birthDate || null, profileImage: this.profileImage || "", gender: this.gender, salutation: this.salutation, status: this.status, appointmentDate: this.appointmentDate, appointmentsHistory: [], followUpDate: this.followUpDate || null, address: this.address || "", remarks: this.remarks || "", createdOn: new Date(), updatedOn: new Date(), }; this.dbRef.collection(_utils.MAIN).doc(_utils.MAIN).collection(_utils.COLLECTION_STUDENTS).doc(docKey) .set(stdObj, { merge: true }) .then(() => { this.snackbar.open('Added Successfully', '', { duration: 2500, panelClass: ['success'] }) this.loading = false; this.dialogRef.close(); }) .catch((err) => { this.snackbar.open("Next Date is required ", "", { duration: 2500, panelClass: ['warning'] }); this.loading = false; }) } updateStudent() { this.loading = true; if(this.status == 1) { if(this.followUpDate == null) { this.snackbar.open("Next Date is required ", "", { duration: 2500, panelClass: ['warning'] }); this.loading = false; return; } } let stdObj = { docId: this.docId, name: this.name.toLowerCase(), email: this.email.toLowerCase(), mobile: this.mobile, fatherName: this.fatherName.toLowerCase(), fatherEmail: this.fatherEmail.toLowerCase(), fatherMobile: this.fatherMobile, birthDate: this.birthDate, profileImage: this.profileImage || this.stdObj.profileImage, gender: this.gender, salutation: this.salutation, status: this.status, appointmentDate: this.appointmentDate, appointmentsHistory: firebase.default.firestore.FieldValue.arrayUnion({ appointmentDate: this.stdObj.appointmentDate, followUpDate: this.stdObj.followUpDate, remarks: this.stdObj.remarks }), followUpDate: this.followUpDate || null, address: this.address, remarks: this.remarks, active: this.status == 3 ? false : this.stdObj.active, updatedOn: new Date(), }; this.dbRef.collection(_utils.MAIN).doc(_utils.MAIN).collection(_utils.COLLECTION_STUDENTS).doc(this.docId) .update(stdObj) .then(() => { this.snackbar.open('Updated Successfully', '', { duration: 2500, panelClass: ['success'] }) this.loading = false; this.dialogRef.close(); }) .catch((err) => { this.snackbar.open("Next Date is required ", "", { duration: 2500, panelClass: ['warning'] }); this.loading = false; }) } selectProfileImage(event) { var temp = event.target.files[0]; if ( temp.type == "image/png" || temp.type == "image/jpeg" || temp.type == "image/jpg" ) { this.profileImage = temp; } else { temp = null; this.profileImage = ""; (<HTMLInputElement>document.getElementById("profileImage")).value = null; this.snackbar.open("Invalid image format. Only .png/.jpg/.jpeg file supported.", '', { duration: 2500, panelClass: ['warning'] }); return; } } }
package io.smallrye.mutiny.operators; import static io.smallrye.mutiny.helpers.ParameterValidation.MAPPER_RETURNED_NULL; import static io.smallrye.mutiny.helpers.ParameterValidation.nonNull; import java.util.function.Function; import java.util.function.Predicate; import org.reactivestreams.Publisher; import org.reactivestreams.Subscriber; import io.smallrye.mutiny.Multi; import io.smallrye.mutiny.infrastructure.Infrastructure; import io.smallrye.mutiny.operators.multi.MultiOnFailureResumeOp; public class MultiFlatMapOnFailure<T> extends MultiOperator<T, T> { private final Predicate<? super Throwable> predicate; private final Function<? super Throwable, ? extends Multi<? extends T>> mapper; public MultiFlatMapOnFailure(Multi<T> upstream, Predicate<? super Throwable> predicate, Function<? super Throwable, ? extends Multi<? extends T>> mapper) { super(nonNull(upstream, "upstream")); this.predicate = predicate == null ? x -> true : predicate; this.mapper = nonNull(mapper, "mapper"); } @Override public void subscribe(Subscriber<? super T> subscriber) { if (subscriber == null) { throw new NullPointerException("The subscriber must not be `null`"); } Function<? super Throwable, ? extends Publisher<? extends T>> next = failure -> { if (predicate.test(failure)) { Publisher<? extends T> res = mapper.apply(failure); if (res == null) { return Multi.createFrom().failure(new NullPointerException(MAPPER_RETURNED_NULL)); } else { return res; } } return Multi.createFrom().failure(failure); }; Multi<T> op = Infrastructure.onMultiCreation(new MultiOnFailureResumeOp<>(upstream(), next)); op.subscribe(subscriber); } }
#ifndef STRF_DETAIL_PRINTERS_TUPLE_HPP #define STRF_DETAIL_PRINTERS_TUPLE_HPP // Copyright (C) (See commit logs on github.com/robhz786/strf) // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) #include <strf/printer.hpp> namespace strf { namespace detail { template <std::size_t I, typename T> struct indexed_obj { constexpr STRF_HD indexed_obj(const T& cp) : obj(cp) { } T obj; }; struct simple_tuple_from_args {}; template <typename ISeq, typename... T> class simple_tuple_impl; template <std::size_t... I, typename... T> class simple_tuple_impl<strf::detail::index_sequence<I...>, T...> : public indexed_obj<I, T> ... { template <std::size_t J, typename U> static constexpr STRF_HD const indexed_obj<J, U>& get_(const indexed_obj<J, U>* r) noexcept { return *r; } template <typename U> static constexpr STRF_HD const U& as_cref(const U& r) noexcept { return r; } public: static constexpr std::size_t size = sizeof...(T); template <typename ... Args> constexpr STRF_HD explicit simple_tuple_impl(simple_tuple_from_args, Args&& ... args) : indexed_obj<I, T>(args)... { } constexpr explicit simple_tuple_impl(const simple_tuple_impl&) = default; constexpr explicit simple_tuple_impl(simple_tuple_impl&&) = default; template <std::size_t J> constexpr STRF_HD auto get() const noexcept -> decltype(as_cref(get_<J>(this).obj)) { return get_<J>(this).obj; } }; template <typename ... T> class simple_tuple : public strf::detail::simple_tuple_impl < strf::detail::make_index_sequence<sizeof...(T)>, T...> { using strf::detail::simple_tuple_impl < strf::detail::make_index_sequence<sizeof...(T)>, T...> ::simple_tuple_impl; }; template <typename ... Args> constexpr STRF_HD strf::detail::simple_tuple<Args...> make_simple_tuple(const Args& ... args) { return strf::detail::simple_tuple<Args...> { strf::detail::simple_tuple_from_args{}, args... }; } template <std::size_t J, typename ... T> constexpr STRF_HD auto get(const simple_tuple<T...>& tp) -> decltype(tp.template get<J>()) { return tp.template get<J>(); } template <std::size_t I, typename Printer> struct indexed_printer { template <typename Arg> STRF_HD indexed_printer(const Arg& arg) : printer(arg) { } Printer printer; }; template < typename CharT , typename ISeq , typename ... Printers > class printers_tuple_impl; template < typename CharT , std::size_t ... I , typename ... Printers > class printers_tuple_impl<CharT, strf::detail::index_sequence<I...>, Printers...> : public detail::indexed_printer<I, Printers> ... { template <std::size_t J, typename T> static constexpr STRF_HD const indexed_printer<J, T>& get_(const indexed_printer<J, T>* r) noexcept { return *r; } template <typename U> static constexpr STRF_HD const U& as_cref(const U& r) noexcept { return r; } public: static constexpr std::size_t size = sizeof...(Printers); template < typename Preview, typename FPack, typename ... Args > STRF_HD printers_tuple_impl ( const strf::detail::simple_tuple<Args...>& args , Preview& preview , const FPack& fp ) : indexed_printer<I, Printers> ( strf::make_printer_input<CharT> ( preview, fp, args.template get<I>() ) ) ... { } template <std::size_t J> constexpr STRF_HD auto get() const noexcept -> decltype(as_cref(get_<J>(this).printer)) { return get_<J>(this).printer; } }; template<typename CharT, std::size_t ... I, typename ... Printers> STRF_HD void write ( strf::destination<CharT>& dest , const strf::detail::printers_tuple_impl < CharT, strf::detail::index_sequence<I...>, Printers... >& printers ) { strf::detail::write_args<CharT> (dest, static_cast<const strf::printer<CharT>&>(printers.template get<I>())...); } template <typename CharT, typename ... Printers> using printers_tuple = printers_tuple_impl < CharT , strf::detail::make_index_sequence<sizeof...(Printers)> , Printers... >; template < typename CharT, typename Preview, typename FPack , typename ISeq, typename... Args > class printers_tuple_alias { public: using type = printers_tuple_impl <CharT, ISeq, strf::printer_type<CharT, Preview, FPack, Args> ...>; }; template < typename CharT, typename Preview, typename FPack, typename ... Args > using printers_tuple_from_args = typename printers_tuple_alias < CharT, Preview, FPack, strf::detail::make_index_sequence<sizeof...(Args)>, Args ...> :: type; } // namespace detail } // namespace strf #endif // STRF_DETAIL_PRINTERS_TUPLE_HPP
#!/bin/bash set -e DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )" source ${DIR}/../../scripts/utils.sh JIRA_URL=${JIRA_URL:-$1} JIRA_USERNAME=${JIRA_USERNAME:-$2} JIRA_API_TOKEN=${JIRA_API_TOKEN:-$3} if [ -z "$JIRA_URL" ] then logerror "JIRA_URL is not set. Export it as environment variable or pass it as argument" exit 1 fi if [ -z "$JIRA_USERNAME" ] then logerror "JIRA_USERNAME is not set. Export it as environment variable or pass it as argument" exit 1 fi if [ -z "$JIRA_API_TOKEN" ] then logerror "JIRA_API_TOKEN is not set. Export it as environment variable or pass it as argument" exit 1 fi ${DIR}/../../environment/plaintext/start.sh "${PWD}/docker-compose.plaintext.yml" # take since last 6 months #SINCE=$(date -v-4320H "+%Y-%m-%d %H:%M") SINCE="2020-03-05 00:00" log "Creating Jira Source connector" curl -X PUT \ -H "Content-Type: application/json" \ --data '{ "connector.class": "io.confluent.connect.jira.JiraSourceConnector", "topic.name.pattern":"jira-topic-${entityName}", "tasks.max": "1", "jira.url": "'"$JIRA_URL"'", "jira.since": "'"$SINCE"'", "jira.username": "'"$JIRA_USERNAME"'", "jira.api.token": "'"$JIRA_API_TOKEN"'", "jira.tables": "project_categories", "key.converter": "io.confluent.connect.avro.AvroConverter", "key.converter.schema.registry.url":"http://schema-registry:8081", "value.converter": "io.confluent.connect.avro.AvroConverter", "value.converter.schema.registry.url":"http://schema-registry:8081", "confluent.license": "", "confluent.topic.bootstrap.servers": "broker:9092", "confluent.topic.replication.factor": "1" }' \ http://localhost:8083/connectors/jira-source/config | jq . sleep 10 log "Verify we have received the data in jira-topic-project_categories topic" timeout 60 docker exec connect kafka-avro-console-consumer -bootstrap-server broker:9092 --property schema.registry.url=http://schema-registry:8081 --topic jira-topic-project_categories --from-beginning --property print.key=true --max-messages 1
<reponame>skylark-integration/skylark-browserfs define([ "skylark-langx-ns", '../libs/process', '../libs/buffers', './node_fs', '../libs/path', '../generic/emscripten_fs', './backends', './util', './api_error', '../generic/setImmediate' ], function (skylark,process,buffers, fs, path, EmscriptenFS, Backends, BFSUtils, Errors, setImmediate) { 'use strict'; const {Buffer} = buffers; /** * BrowserFS's main module. This is exposed in the browser via the BrowserFS global. * Due to limitations in typedoc, we document these functions in ./typedoc.ts. */ if (process['initializeTTYs']) { process['initializeTTYs'](); } /** * Installs BFSRequire as global `require`, a Node Buffer polyfill as the global `Buffer` variable, * and a Node process polyfill as the global `process` variable. */ function install(obj) { obj.Buffer = Buffer; obj.process = process; const oldRequire = obj.require ? obj.require : null; // Monkey-patch require for Node-style code. obj.require = function (arg) { const rv = BFSRequire(arg); if (!rv) { return oldRequire.apply(null, Array.prototype.slice.call(arguments, 0)); } else { return rv; } }; } /** * @hidden */ function registerFileSystem(name, fs) { Backends[name] = fs; } function BFSRequire(module) { switch (module) { case 'fs': return fs; case 'path': return path; case 'buffer': // The 'buffer' module has 'Buffer' as a property. return buffer; case 'process': return process; case 'bfs_utils': return BFSUtils; default: return Backends[module]; } } /** * Initializes BrowserFS with the given root file system. */ function initialize(rootfs) { return fs.initialize(rootfs); } /** * Creates a file system with the given configuration, and initializes BrowserFS with it. * See the FileSystemConfiguration type for more info on the configuration object. */ function configure(config, cb) { getFileSystem(config, (e, fs) => { if (fs) { initialize(fs); cb(); } else { cb(e); } }); } /** * Retrieve a file system with the given configuration. * @param config A FileSystemConfiguration object. See FileSystemConfiguration for details. * @param cb Called when the file system is constructed, or when an error occurs. */ function getFileSystem(config, cb) { const fsName = config['fs']; if (!fsName) { return cb(new Errors.ApiError(Errors.ErrorCode.EPERM, 'Missing "fs" property on configuration object.')); } const options = config['options']; let waitCount = 0; let called = false; function finish() { if (!called) { called = true; const fsc = Backends[fsName]; if (!fsc) { cb(new Errors.ApiError(Errors.ErrorCode.EPERM, `File system ${fsName} is not available in BrowserFS.`)); } else { fsc.Create(options, cb); } } } if (options !== null && typeof (options) === "object") { let finishedIterating = false; const props = Object.keys(options).filter((k) => k !== 'fs'); // Check recursively if other fields have 'fs' properties. props.forEach((p) => { const d = options[p]; if (d !== null && typeof (d) === "object" && d['fs']) { waitCount++; getFileSystem(d, function (e, fs) { waitCount--; if (e) { if (called) { return; } called = true; cb(e); } else { options[p] = fs; if (waitCount === 0 && finishedIterating) { finish(); } } }); } }); finishedIterating = true; } if (waitCount === 0) { finish(); } } return skylark.attach("intg.BrowserFS",{ install: install, registerFileSystem: registerFileSystem, BFSRequire: BFSRequire, initialize: initialize, configure: configure, getFileSystem: getFileSystem, EmscriptenFS, "FileSystem" : Backends, Errors, setImmediate }); });
<filename>src/distinct.ts import Sequence, {createSequence} from "./Sequence"; class DistinctIterator<T> implements Iterator<T> { private set: Set<T> = new Set(); constructor(private readonly iterator: Iterator<T>) { } next(value?: any): IteratorResult<T> { for (let item = this.iterator.next(); !item.done; item = this.iterator.next()) { const sizeBeforeAdd = this.set.size; this.set.add(item.value); if (this.set.size > sizeBeforeAdd) { return {done: false, value: item.value}; } } this.set.clear(); return {done: true, value: undefined as any}; } } export class Distinct { /** * Returns a new sequence which discards all duplicate elements. * * @returns {Sequence<T>} */ distinct<T>(this: Sequence<T>): Sequence<T> { return createSequence(new DistinctIterator(this.iterator)); } }
#!/usr/bin/env bash # CannabisKash Multi-installer # a one line clone-and-compile for cannabiskashgold: # # ` $ curl -sL "https://raw.githubusercontent.com/chronickash/cannabiskash/master/scripts/multi_installer.sh" | bash # # Supports Ubuntu 16.04 LTS, OSX 10.10+ # Supports building project from current directory (automatic detection) set -o errexit set -o pipefail _colorize() { local code="\033[" case "$1" in red | r) color="${code}1;31m";; green | g) color="${code}1;32m";; purple | p) color="${code}1;35m";; *) local text="$1" esac [ -z "$text" ] && local text="$color$2${code}0m" printf "$text" } _note() { local msg=`echo $1` _colorize purple "$msg" && echo } _fail() { local msg=`echo \'$1\'` _colorize red "Failure: $msg" | tee -a build.log && echo _colorize red "Please check build.log and if you need help check out the team discord @ 'https://discordapp.com/invite/NZ7QYJA'" && echo _colorize purple "Exiting script" && echo exit 1 } _set_wd() { if [ -d "$PWD/.git" ] && [ -f "$PWD/Makefile" ]; then _note "Building project from current working directory ($PWD)" else _note "Cloning project with git..." if [ -d "$PWD"/chronickash ]; then read -r -p "${1:-cannabiskash directory already exists. Overwrite? [y/N]} " response case "$response" in [yY][eE][sS|[yY]) _colorize red "Overwriting old cannabiskash directory" && echo rm -rf "$PWD"/chronickash ;; *) _fail "cannabiskashgold directory already exists. Aborting..." ;; esac fi mkdir cannabiskashgold git clone -b master -q https://github.com/chronickash/chronickash chronickash >>build.log 2>&1 || _fail "Unable to clone git repository. Please see build.log for more information" cd ChronicKash fi } _build_chronickash() { _note "Building cannabiskashgold from source (this might take a while)..." if [ -d build ]; then _colorize red "Overwriting old build directory" && echo rm -rf build fi local _threads=`python -c 'import multiprocessing as mp; print(mp.cpu_count())'` echo "Using ${_threads} threads" mkdir build && cd $_ cmake --silent -DDO_TESTS=OFF .. >>build.log 2>&1 || _fail "Unable to run cmake. Please see build.log for more information" make --silent -j"$_threads" >>build.log 2>&1 || _fail "Unable to run make. Please see build.log for more information" _note "Compilation completed!" } _configure_ubuntu() { [ "`lsb_release -r -s | cut -d'.' -f1 `" -ge 16 ] || _fail "Your Ubuntu version `lsb_release -r -s` is below the requirements to run this installer. Please consider upgrading to a later release" _note "The installer will now update your package manager and install required packages (this might take a while)..." _sudo="" if (( $EUID != 0 )); then _sudo="sudo" _note "Sudo privileges required for package installation" fi $_sudo apt-get update -qq $_sudo apt-get install -qq -y git build-essential python-dev gcc g++ git cmake libboost-all-dev >>build.log 2>&1 || _fail "Unable to install build dependencies. Please see build.log for more information" export CXXFLAGS="-std=gnu++11" } _configure_debian() { [ "`lsb_release -r -s | cut -d'.' -f1 `" -ge 9 ] || _fail "Your Debian GNU/Linux version `lsb_release -r -s` is below the requirements to run this installer. Please consider upgrading to a later release" _note "The installer will now update your package manager and install required packages (this might take a while)..." _sudo="" if (( $EUID != 0 )); then _sudo="sudo" _note "Sudo privileges required for package installation" fi $_sudo apt-get update -qq $_sudo apt-get install -qq -y git build-essential python-dev gcc g++ git cmake libboost-all-dev librocksdb-dev >>build.log 2>&1 || _fail "Unable to install build dependencies. Please see build.log for more information" export CXXFLAGS="-std=gnu++11" } _configure_linux() { if [ "$(awk -F= '/^NAME/{print $2}' /etc/os-release)" = "\"Ubuntu\"" ]; then _configure_ubuntu elif [ "$(awk -F= '/^NAME/{print $2}' /etc/os-release)" = "\"Debian GNU/Linux\"" ]; then _configure_debian else _fail "Your OS version isn't supported by this installer. Please consider adding support for your OS to the project ('https://github.com/turtlecoin')" fi } _configure_osx() { [ ! "`echo ${OSTYPE:6} | cut -d'.' -f1`" -ge 14 ] && _fail "Your OS X version ${OSTYPE:6} is below the requirements to run this installer. Please consider upgrading to the latest release"; if [[ $(command -v brew) == "" ]]; then _note "Homebrew package manager was not found using \`command -v brew\`" _note "Installing Xcode if missing, setup will resume after completion..." xcode-select --install _note "Running the installer for homebrew, setup will resume after completion..." /usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" fi _note "Updating homebrew and installing software dependencies..." brew update --quiet brew install --quiet git cmake boost rocksdb } _configure_os() { _note "Configuring your operating system and installing required software..." _unameOut="$(uname -s)" case "${_unameOut}" in Linux*) _configure_linux ;; Darwin*) _configure_osx ;; *) _fail "This installer only runs on OSX 10.10+ and Ubuntu 16.04+. Please consider adding support for your OS to the project ('https://github.com/turtlecoin')" ;; esac _note "Operating system configuration completed. You're halfway there!" } _note "CannabiskashGold Multi_Installer v1.0 (pepperoni)" _colorize green " _______ _ _ _____ _ \n|__ __| | | | | / ____| (_) \n | |_ _ _ __| |_| | ___| | ___ _ _ __ \n | | | | | '__| __| |/ _ \ | / _ \| | '_ \ \n | | |_| | | | |_| | __/ |___| (_) | | | | |\n |_|\__,_|_| \__|_|\___|\_____\___/|_|_| |_|\n" && echo _configure_os _set_wd _build_chronickash _note "Installation complete!" _note "Look in 'cannabiskash/build/src/' for the executible binaries. See 'https://github.com/ChronicKash/CannabisKashGold' for more project support. Cowabunga!"
<reponame>MaartenBaert/ssr-packages /* Copyright (c) 2012-2017 <NAME> <<EMAIL>> This file is part of SimpleScreenRecorder. SimpleScreenRecorder is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. SimpleScreenRecorder is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with SimpleScreenRecorder. If not, see <http://www.gnu.org/licenses/>. */ #include "FastScaler_Convert.h" /* Color space standards are a mess. ==== BT.601 ==== Y = 16 + round(( 66 * R + 129 * G + 25 * B) / 256) U = 128 + round(( -38 * R + -74 * G + 112 * B) / 256) V = 128 + round(( 112 * R + -94 * G + -18 * B) / 256) ==== BT.709 ==== Y = 16 + round(( 47 * R + 157 * G + 16 * B) / 256) U = 128 + round(( -26 * R + -86 * G + 112 * B) / 256) V = 128 + round(( 112 * R + -102 * G + -10 * B) / 256) The converters below are currently hard-coded for BT.709. */ /* ==== Fallback BGRA-to-YUV444/YUV422/YUV420/NV12 Converter ==== Nothing special, just plain C code. - YUV444: one-to-one mapping - YUV422: takes blocks of 2x1 pixels, produces 2x1 Y and 1x1 U/V values - YUV420: takes blocks of 2x2 pixels, produces 2x2 Y and 1x1 U/V values - NV12: like YUV420, but U/V are in the same plane */ void Convert_BGRA_YUV444_Fallback(unsigned int w, unsigned int h, const uint8_t* in_data, int in_stride, uint8_t* const out_data[3], const int out_stride[3]) { const int offset_y = 128 + (16 << 8), offset_uv = 128 + (128 << 8); for(unsigned int j = 0; j < h; ++j) { const uint32_t *rgb = (const uint32_t*) (in_data + in_stride * (int) j); uint8_t *yuv_y = out_data[0] + out_stride[0] * (int) j; uint8_t *yuv_u = out_data[1] + out_stride[1] * (int) j; uint8_t *yuv_v = out_data[2] + out_stride[2] * (int) j; for(unsigned int i = 0; i < w; ++i) { uint32_t c = *(rgb++); int r = (int) ((c >> 16) & 0xff); int g = (int) ((c >> 8) & 0xff); int b = (int) ((c ) & 0xff); *(yuv_y++) = ( 47 * r + 157 * g + 16 * b + offset_y) >> 8; *(yuv_u++) = (-26 * r + -86 * g + 112 * b + offset_uv) >> 8; *(yuv_v++) = (112 * r + -102 * g + -10 * b + offset_uv) >> 8; } } } void Convert_BGRA_YUV422_Fallback(unsigned int w, unsigned int h, const uint8_t* in_data, int in_stride, uint8_t* const out_data[3], const int out_stride[3]) { assert(w % 2 == 0); const int offset_y = 128 + (16 << 8), offset_uv = (128 + (128 << 8)) << 1; for(unsigned int j = 0; j < h; ++j) { const uint32_t *rgb = (const uint32_t*) (in_data + in_stride * (int) j); uint8_t *yuv_y = out_data[0] + out_stride[0] * (int) j; uint8_t *yuv_u = out_data[1] + out_stride[1] * (int) j; uint8_t *yuv_v = out_data[2] + out_stride[2] * (int) j; for(unsigned int i = 0; i < w / 2; ++i) { uint32_t c1 = rgb[0], c2 = rgb[1]; rgb += 2; int r1 = (int) ((c1 >> 16) & 0xff), r2 = (int) ((c2 >> 16) & 0xff); int g1 = (int) ((c1 >> 8) & 0xff), g2 = (int) ((c2 >> 8) & 0xff); int b1 = (int) ((c1 ) & 0xff), b2 = (int) ((c2 ) & 0xff); yuv_y[0] = (47 * r1 + 157 * g1 + 16 * b1 + offset_y) >> 8; yuv_y[1] = (47 * r2 + 157 * g2 + 16 * b2 + offset_y) >> 8; yuv_y += 2; int sr = r1 + r2; int sg = g1 + g2; int sb = b1 + b2; *(yuv_u++) = (-26 * sr + -86 * sg + 112 * sb + offset_uv) >> 9; *(yuv_v++) = (112 * sr + -102 * sg + -10 * sb + offset_uv) >> 9; } } } void Convert_BGRA_YUV420_Fallback(unsigned int w, unsigned int h, const uint8_t* in_data, int in_stride, uint8_t* const out_data[3], const int out_stride[3]) { assert(w % 2 == 0 && h % 2 == 0); const int offset_y = 128 + (16 << 8), offset_uv = (128 + (128 << 8)) << 2; for(unsigned int j = 0; j < h / 2; ++j) { const uint32_t *rgb1 = (const uint32_t*) (in_data + in_stride * (int) j * 2); const uint32_t *rgb2 = (const uint32_t*) (in_data + in_stride * ((int) j * 2 + 1)); uint8_t *yuv_y1 = out_data[0] + out_stride[0] * (int) j * 2; uint8_t *yuv_y2 = out_data[0] + out_stride[0] * ((int) j * 2 + 1); uint8_t *yuv_u = out_data[1] + out_stride[1] * (int) j; uint8_t *yuv_v = out_data[2] + out_stride[2] * (int) j; for(unsigned int i = 0; i < w / 2; ++i) { uint32_t c1 = rgb1[0], c2 = rgb1[1], c3 = rgb2[0], c4 = rgb2[1]; rgb1 += 2; rgb2 += 2; int r1 = (int) ((c1 >> 16) & 0xff), r2 = (int) ((c2 >> 16) & 0xff), r3 = (int) ((c3 >> 16) & 0xff), r4 = (int) ((c4 >> 16) & 0xff); int g1 = (int) ((c1 >> 8) & 0xff), g2 = (int) ((c2 >> 8) & 0xff), g3 = (int) ((c3 >> 8) & 0xff), g4 = (int) ((c4 >> 8) & 0xff); int b1 = (int) ((c1 ) & 0xff), b2 = (int) ((c2 ) & 0xff), b3 = (int) ((c3 ) & 0xff), b4 = (int) ((c4 ) & 0xff); yuv_y1[0] = (47 * r1 + 157 * g1 + 16 * b1 + offset_y) >> 8; yuv_y1[1] = (47 * r2 + 157 * g2 + 16 * b2 + offset_y) >> 8; yuv_y2[0] = (47 * r3 + 157 * g3 + 16 * b3 + offset_y) >> 8; yuv_y2[1] = (47 * r4 + 157 * g4 + 16 * b4 + offset_y) >> 8; yuv_y1 += 2; yuv_y2 += 2; int sr = r1 + r2 + r3 + r4; int sg = g1 + g2 + g3 + g4; int sb = b1 + b2 + b3 + b4; *(yuv_u++) = (-26 * sr + -86 * sg + 112 * sb + offset_uv) >> 10; *(yuv_v++) = (112 * sr + -102 * sg + -10 * sb + offset_uv) >> 10; } } } void Convert_BGRA_NV12_Fallback(unsigned int w, unsigned int h, const uint8_t* in_data, int in_stride, uint8_t* const out_data[2], const int out_stride[2]) { assert(w % 2 == 0 && h % 2 == 0); const int offset_y = 128 + (16 << 8), offset_uv = (128 + (128 << 8)) << 2; for(unsigned int j = 0; j < h / 2; ++j) { const uint32_t *rgb1 = (const uint32_t*) (in_data + in_stride * (int) j * 2); const uint32_t *rgb2 = (const uint32_t*) (in_data + in_stride * ((int) j * 2 + 1)); uint8_t *yuv_y1 = out_data[0] + out_stride[0] * (int) j * 2; uint8_t *yuv_y2 = out_data[0] + out_stride[0] * ((int) j * 2 + 1); uint8_t *yuv_uv = out_data[1] + out_stride[1] * (int) j; for(unsigned int i = 0; i < w / 2; ++i) { uint32_t c1 = rgb1[0], c2 = rgb1[1], c3 = rgb2[0], c4 = rgb2[1]; rgb1 += 2; rgb2 += 2; int r1 = (int) ((c1 >> 16) & 0xff), r2 = (int) ((c2 >> 16) & 0xff), r3 = (int) ((c3 >> 16) & 0xff), r4 = (int) ((c4 >> 16) & 0xff); int g1 = (int) ((c1 >> 8) & 0xff), g2 = (int) ((c2 >> 8) & 0xff), g3 = (int) ((c3 >> 8) & 0xff), g4 = (int) ((c4 >> 8) & 0xff); int b1 = (int) ((c1 ) & 0xff), b2 = (int) ((c2 ) & 0xff), b3 = (int) ((c3 ) & 0xff), b4 = (int) ((c4 ) & 0xff); yuv_y1[0] = (47 * r1 + 157 * g1 + 16 * b1 + offset_y) >> 8; yuv_y1[1] = (47 * r2 + 157 * g2 + 16 * b2 + offset_y) >> 8; yuv_y2[0] = (47 * r3 + 157 * g3 + 16 * b3 + offset_y) >> 8; yuv_y2[1] = (47 * r4 + 157 * g4 + 16 * b4 + offset_y) >> 8; yuv_y1 += 2; yuv_y2 += 2; int sr = r1 + r2 + r3 + r4; int sg = g1 + g2 + g3 + g4; int sb = b1 + b2 + b3 + b4; yuv_uv[0] = (-26 * sr + -86 * sg + 112 * sb + offset_uv) >> 10; yuv_uv[1] = (112 * sr + -102 * sg + -10 * sb + offset_uv) >> 10; yuv_uv += 2; } } } /* ==== Fallback BGRA-to-BGR Converter ==== Nothing special, just plain C code. - BGR: converts blocks of 8x1 pixels */ void Convert_BGRA_BGR_Fallback(unsigned int w, unsigned int h, const uint8_t* in_data, int in_stride, uint8_t* out_data, int out_stride) { for(unsigned int j = 0; j < h; ++j) { const uint8_t *in = in_data + in_stride * (int) j; uint8_t *out = out_data + out_stride * (int) j; for(unsigned int i = 0; i < w / 8; ++i) { uint64_t c0 = ((uint64_t*) in)[0]; uint64_t c1 = ((uint64_t*) in)[1]; uint64_t c2 = ((uint64_t*) in)[2]; uint64_t c3 = ((uint64_t*) in)[3]; in += 32; ((uint64_t*) out)[0] = ((c0 & UINT64_C(0x0000000000ffffff)) ) | ((c0 & UINT64_C(0x00ffffff00000000)) >> 8) | ((c1 & UINT64_C(0x000000000000ffff)) << 48); ((uint64_t*) out)[1] = ((c1 & UINT64_C(0x0000000000ff0000)) >> 16) | ((c1 & UINT64_C(0x00ffffff00000000)) >> 24) | ((c2 & UINT64_C(0x0000000000ffffff)) << 32) | ((c2 & UINT64_C(0x000000ff00000000)) << 24); ((uint64_t*) out)[2] = ((c2 & UINT64_C(0x00ffff0000000000)) >> 40) | ((c3 & UINT64_C(0x0000000000ffffff)) << 16) | ((c3 & UINT64_C(0x00ffffff00000000)) << 8); out += 24; } for(unsigned int i = 0; i < (w & 7); ++i) { uint32_t c = *((uint32_t*) in); in += 4; out[0] = c; out[1] = c >> 8; out[2] = c >> 16; out += 3; } } }
import { ICellRendererComp } from '../rendering/cellRenderers/iCellRenderer'; import { AgPromise } from './promise'; import { loadTemplate } from './dom'; import { camelCaseToHyphen } from './string'; import { iterateObject } from './object'; /** @deprecated */ export function getNameOfClass(theClass: any) { const funcNameRegex = /function (.{1,})\(/; const funcAsString = theClass.toString(); const results = funcNameRegex.exec(funcAsString); return results && results.length > 1 ? results[1] : ""; } export function findLineByLeastSquares(values: number[]) { const len = values.length; let maxDecimals = 0; if (len <= 1) { return values; } for (let i = 0; i < values.length; i++) { const value = values[i]; if (Math.floor(value) === value) { continue; } maxDecimals = Math.max(maxDecimals, value.toString().split('.')[1].length); } let sum_x = 0; let sum_y = 0; let sum_xy = 0; let sum_xx = 0; let y = 0; for (let x = 0; x < len; x++) { y = values[x]; sum_x += x; sum_y += y; sum_xx += x * x; sum_xy += x * y; } const m = (len * sum_xy - sum_x * sum_y) / (len * sum_xx - sum_x * sum_x); const b = (sum_y / len) - (m * sum_x) / len; const result = []; for (let x = 0; x <= len; x++) { result.push(parseFloat((x * m + b).toFixed(maxDecimals))); } return result; } /** * Converts a CSS object into string * @param {Object} stylesToUse an object eg: {color: 'black', top: '25px'} * @return {string} A string like "color: black; top: 25px;" for html */ export function cssStyleObjectToMarkup(stylesToUse: any): string { if (!stylesToUse) { return ''; } const resParts: string[] = []; iterateObject(stylesToUse, (styleKey: string, styleValue: string) => { const styleKeyDashed = camelCaseToHyphen(styleKey); resParts.push(`${styleKeyDashed}: ${styleValue};`); }); return resParts.join(' '); } /** * Displays a message to the browser. this is useful in iPad, where you can't easily see the console. * so the javascript code can use this to give feedback. this is NOT intended to be called in production. * it is intended the ag-Grid developer calls this to troubleshoot, but then takes out the calls before * checking in. * @param {string} msg */ export function message(msg: string): void { const eMessage = document.createElement('div'); let eBox = document.querySelector('#__ag__message'); eMessage.innerHTML = msg; if (!eBox) { const template = `<div id="__ag__message" style="display: inline-block; position: absolute; top: 0px; left: 0px; color: white; background-color: black; z-index: 20; padding: 2px; border: 1px solid darkred; height: 200px; overflow-y: auto;"></div>`; eBox = loadTemplate(template); if (document.body) { document.body.appendChild(eBox); } } eBox.insertBefore(eMessage, eBox.children[0]); } /** * cell renderers are used in a few places. they bind to dom slightly differently to other cell renderes as they * can return back strings (instead of html elemnt) in the getGui() method. common code placed here to handle that. * @param {AgPromise<ICellRendererComp>} cellRendererPromise * @param {HTMLElement} eTarget */ export function bindCellRendererToHtmlElement(cellRendererPromise: AgPromise<ICellRendererComp>, eTarget: HTMLElement) { cellRendererPromise.then(cellRenderer => { const gui: HTMLElement | string = cellRenderer.getGui(); if (gui != null) { if (typeof gui === 'object') { eTarget.appendChild(gui); } else { eTarget.innerHTML = gui; } } }); }
dotnet new mauilib -o ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat -n Xamarin.CommunityToolkit.MauiCompat dotnet new mauilib -o ./src/Markup/Xamarin.CommunityToolkit.Markup.MauiCompat -n Xamarin.CommunityToolkit.Markup.MauiCompat dotnet new sln -o ./src/CommunityToolkit/ -n Xamarin.CommunityToolkit.MauiCompat dotnet sln ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat.sln add ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Xamarin.CommunityToolkit.MauiCompat.csproj dotnet new sln -o ./src/Markup/ -n Xamarin.CommunityToolkit.Markup.MauiCompat dotnet sln ./src/Markup/Xamarin.CommunityToolkit.Markup.MauiCompat.sln add ./src/Markup/Xamarin.CommunityToolkit.Markup.MauiCompat/Xamarin.CommunityToolkit.Markup.MauiCompat.csproj sed -i '' 's/<ImplicitUsings>enable/<ImplicitUsings>false/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/Xamarin.CommunityToolkit.MauiCompat.csproj sed -i '' 's/<ImplicitUsings>enable/<ImplicitUsings>false/g' ./src/Markup/Xamarin.CommunityToolkit.Markup.MauiCompat/**/Xamarin.CommunityToolkit.Markup.MauiCompat.csproj printf > ./src/CommunityToolkit/Directory.build.props "<Project> <PropertyGroup> <Nullable>enable</Nullable> <PackageId>Xamarin.CommunityToolkit.MauiCompat</PackageId> <Summary>A .NET MAUI Comapatible version of Xamarin.CommunityToolkit, a community-created toolkit with common Xamarin converters, effects, behaviors etc.</Summary> <PackageTag>maui,net,xamarin,ios,android,uwp,xamarin.forms,effects,controls,converters,animations,toolkit,kit,communitytoolkit,xamarincommunitytoolkit,watchos,tvos,tizen,Microsoft.Toolkit.Xamarin.Forms</PackageTag> <Title>Xamarin.CommunityToolkit.MauiCompat</Title> <Description>Xamarin.CommunityToolkit.MauiCompat is a collection of Animations, Behaviors, Converters, and Effects for mobile development with .NET MAUI. It is the .NET MAUI Compatible version of Xamarin.CommunityToolkit.</Description> <PackageIcon>icon.png</PackageIcon> <PackageVersion>\$(Version)\$(VersionSuffix)</PackageVersion> <Authors>Microsoft</Authors> <Owners>microsoft</Owners> <NeutralLanguage>en</NeutralLanguage> <Copyright>© Microsoft Corporation. All rights reserved.</Copyright> <RepositoryUrl>https://github.com/xamarin/XamarinCommunityToolkit</RepositoryUrl> <PackageReleaseNotes>See: http://aka.ms/xct-release-notes</PackageReleaseNotes> <DefineConstants>\$(DefineConstants);</DefineConstants> <UseFullSemVerForNuGet>false</UseFullSemVerForNuGet> <PackageLicenseExpression>MIT</PackageLicenseExpression> <PackageRequireLicenseAcceptance>true</PackageRequireLicenseAcceptance> <PackageProjectUrl>https://github.com/xamarin/XamarinCommunityToolkit</PackageProjectUrl> <EnableDefaultCompileItems>false</EnableDefaultCompileItems> <Version>1.3.0-pre4</Version> </PropertyGroup> <ItemGroup> <Compile Include=\"**/*.shared.cs\" /> <Compile Include=\"**/*.shared.*.cs\" /> <None Include=\"../../../LICENSE\" PackagePath=\"\" Pack=\"true\" /> <None Include=\"../../../assets/XamarinCommunityToolkit_128x128.png\" PackagePath=\"icon.png\" Pack=\"true\" /> </ItemGroup> <ItemGroup Condition=\" \$(TargetFramework.Contains(-android)) \"> <Compile Include=\"**\*.android.cs\" /> <Compile Include=\"**\*.android.*.cs\" /> <AndroidResource Include=\"Resources\**\*.axml\" /> <AndroidResource Include=\"Resources\**\*.xml\" /> <AndroidResource Include=\"Resources\**\*.png\" /> </ItemGroup> <ItemGroup Condition=\" \$(TargetFramework.Contains(-ios)) \"> <Compile Include=\"**\*.ios.cs\" /> <Compile Include=\"**\*.ios.*.cs\" /> </ItemGroup> <ItemGroup Condition=\" \$(TargetFramework.Contains('-windows')) \"> <Compile Include=\"**\*.uwp.cs\" /> <Compile Include=\"**\*.uwp.*.cs\" /> </ItemGroup> <ItemGroup Condition=\" \$(TargetFramework.Contains('-maccatalyst')) \"> <Compile Include=\"**\*.ios.cs\" /> <Compile Include=\"**\*.ios.*.cs\" /> </ItemGroup> <ItemGroup Condition=\" !\$(TargetFramework.Contains('-')) \"> <Compile Include=\"**\*.netstandard.cs\" /> <Compile Include=\"**\*.netstandard.*.cs\" /> </ItemGroup> <PropertyGroup Condition=\" !\$(TargetFramework.Contains('-')) \"> <DefineConstants>\$(DefineConstants);NETSTANDARD</DefineConstants> </PropertyGroup> </Project>" printf > ./src/Markup/Directory.build.props "<Project> <PropertyGroup> <Nullable>enable</Nullable> <PackageId>Xamarin.CommunityToolkit.Markup.MauiCompat</PackageId> <Summary>A .NET MAUI-compatible community-created toolkit with C# Markup classes and fluent helper methods</Summary> <Authors>Microsoft</Authors> <Owners>Microsoft</Owners> <NeutralLanguage>en</NeutralLanguage> <Copyright>© Microsoft Corporation. All rights reserved.</Copyright> <PackageLicenseExpression>MIT</PackageLicenseExpression> <PackageProjectUrl>https://github.com/xamarin/XamarinCommunityToolkit</PackageProjectUrl> <RepositoryUrl>https://github.com/xamarin/XamarinCommunityToolkit</RepositoryUrl> <PackageReleaseNotes>See: http://aka.ms/xct-release-notes</PackageReleaseNotes> <DefineConstants>\$(DefineConstants);</DefineConstants> <UseFullSemVerForNuGet>false</UseFullSemVerForNuGet> <Title>Xamarin.CommunityToolkit.Markup.MauiCompat</Title> <Description>Xamarin Community Toolkit Markup MauiCompat is a set of fluent helper methods and classes to simplify building declarative .NET MAUI user interfaces in C#</Description> <PackageIcon>icon.png</PackageIcon> <Product>\$(AssemblyName) (\$(TargetFramework))</Product> <PackageVersion>\$(Version)\$(VersionSuffix)</PackageVersion> <PackageRequireLicenseAcceptance>true</PackageRequireLicenseAcceptance> <Version>1.3.0-pre4</Version> <PackageTags>maui,net,xamarin,xamarin.forms,toolkit,kit,communitytoolkit,xamarincommunitytoolkit,markup,csharpformarkup,csharp,csharpmarkup</PackageTags> </PropertyGroup> <ItemGroup> <None Include=\"../../../LICENSE\" PackagePath=\"\" Pack=\"true\" /> <None Include=\"../../../assets/XamarinCommunityToolkit_128x128.png\" PackagePath=\"icon.png\" Pack=\"true\" /> </ItemGroup> </Project>" find ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/ -name "*" ! -name "*.csproj" -delete find ./src/Markup/Xamarin.CommunityToolkit.Markup.MauiCompat/ -name "*" ! -name "*.csproj" -delete rsync -avr --exclude='*.csproj' --exclude='bin' --exclude='obj' ./src/CommunityToolkit/Xamarin.CommunityToolkit/ ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/ rsync -avr --exclude='*.csproj' --exclude='bin' --exclude='obj' ./src/Markup/Xamarin.CommunityToolkit.Markup/ ./src/Markup/Xamarin.CommunityToolkit.Markup.MauiCompat/ # Preserve sed -i '' 's/\[Preserve(/\[Microsoft.Maui.Controls.Internals.Preserve(/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs # Internals sed -i '' 's/using Xamarin.Forms.Internals/using Microsoft.Maui.Controls.Internals/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' '/Forms.Internals.Log/d' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs # WeakEventManager sed -i '' 's/ Forms.WeakEventManager/ Microsoft.Maui.Controls.WeakEventManager/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs # Forms.Image sed -i '' 's/Xamarin.Forms.Image/Microsoft.Maui.Controls.Image/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/Forms.Image/Microsoft.Maui.Controls.Image/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs # Colors sed -i '' 's/ Forms\.Color\.Default/ default(Microsoft.Maui.Graphics.Color)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/Snackbar/**/**.cs sed -i '' 's/ Color\.Default\./ new Microsoft.Maui.Graphics.Color()./g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/Snackbar/**/SnackBarAppearance*.cs sed -i '' 's/ Color\.Default/ default(Microsoft.Maui.Graphics.Color)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/Snackbar/**/**.cs sed -i '' 's/ == Forms.Color.Default/ .Equals(new Microsoft.Maui.Graphics.Color())/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/ == XColor.Default/ .Equals(new Microsoft.Maui.Graphics.Color())/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/using Color = Xamarin.Forms.Color;/using Color = Microsoft.Maui.Graphics.Color;/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/ Color\./ Colors./g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\tColor\./\tColors./g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/Xamarin.Forms.Color/Microsoft.Maui.Graphics.Color/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/ Forms\.Color/ Microsoft.Maui.Graphics.Color/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\tForms\.Color/\tMicrosoft.Maui.Graphics.Color/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/Colors\.From/Color\.From/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/Colors.Default/new Microsoft.Maui.Graphics.Color()/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/ Color.FromRgba/ new Microsoft.Maui.Graphics.Color/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\tColor.FromRgba/\tnew Microsoft.Maui.Graphics.Color/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\.R,/.Red,/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\.G,/.Green,/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\.B,/.Blue,/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\.A,/.Alpha,/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\.R /.Red /g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\.G /.Green /g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\.B /.Blue /g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\.A /.Alpha /g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\.R)/.Red)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\.G)/.Green)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\.B)/.Blue)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\.A)/.Alpha)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\.A:/.Alpha:/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/r.IsDefault)/r.IsDefault())/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/.MultiplyAlpha(/.MultiplyAlpha((float)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\.Hue/.GetHue()/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\.Saturation/.GetSaturation()/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\.Luminosity/.GetLuminosity()/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs # Nullability sed -i '' 's/event EventHandler<VisualElementChangedEventArgs>? ElementChanged/event EventHandler<VisualElementChangedEventArgs> ElementChanged/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\.PropertyName\./.PropertyName?./g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/(object sender, PropertyChangedEventArgs e)/(object? sender, PropertyChangedEventArgs e)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs # PlatformEffect sed -i '' 's/: Xamarin.Forms.Platform.iOS.PlatformEffect/: Microsoft.Maui.Controls.Platform.PlatformEffect/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/: PlatformEffect/: Microsoft.Maui.Controls.Platform.PlatformEffect/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs # Platforms sed -i '' 's/if MONOANDROID10_0/if ANDROID/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/if MONOANDROID/if ANDROID/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/if !MONOANDROID/if ANDROID/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/if __ANDROID_29__/if ANDROID/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/using Xamarin.Forms.Platform.Android.FastRenderers;/using Microsoft.Maui.Controls.Compatibility.Platform.Android.FastRenderers;/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/using Xamarin.Forms.Platform.Android;/using Microsoft.Maui.Controls.Compatibility.Platform.Android; using Microsoft.Maui.Controls.Platform;/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/using Xamarin.Forms.Platform.iOS/using Microsoft.Maui.Controls.Compatibility.Platform.iOS/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/using Xamarin.Forms.Platform.GTK/using Microsoft.Maui.Controls.Compatibility.Platform.GTK/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/using Xamarin.Forms.Platform.Tizen/using Microsoft.Maui.Controls.Compatibility.Platform.Tizen/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/using Xamarin.Forms.Platform.UWP/using Microsoft.Maui.Controls.Compatibility.Platform.UWP/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/using Xamarin.Forms.Platform.MacOS/using Microsoft.Maui.Controls.Compatibility.Platform.MacOS/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/Xamarin.Forms.PlatformConfiguration/Microsoft.Maui.Controls.PlatformConfiguration/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/Xamarin.Forms.Platform/Microsoft.Maui.Controls.Compatibility.Platform/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs # IVisualElementRenderer sed -i '' '/IVisualElementRenderer.ViewGroup/d' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/event EventHandler<VisualElementChangedEventArgs>/event EventHandler<Microsoft.Maui.Controls.Platform.VisualElementChangedEventArgs>/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/new VisualElementChangedEventArgs/new Microsoft.Maui.Controls.Platform.VisualElementChangedEventArgs/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/new ElementChangedEventArgs/new Microsoft.Maui.Controls.Platform.ElementChangedEventArgs/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/(ElementChangedEventArgs/(Microsoft.Maui.Controls.Platform.ElementChangedEventArgs/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs # TextAlignment sed -i '' 's/Xamarin.Forms.TextAlignment/Microsoft.Maui.TextAlignment/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs # ElementChangedEventArgs sed -i '' 's/override void OnElementChanged(ElementChangedEventArgs/override void OnElementChanged(Microsoft.Maui.Controls.Platform.ElementChangedEventArgs/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs # XAML sed -i '' 's/using Xamarin.Forms.Xaml;/using Microsoft.Maui.Controls.Xaml;/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/Forms.Xaml/Microsoft.Maui.Controls.Xaml/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs # Effects sed -i '' 's/Xamarin.Forms.ExportEffect(/Microsoft.Maui.Controls.ExportEffect(/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs ## Font sed -i '' 's/Element.Font/Element.ToFont()/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs ## Internals sed -i '' 's/Element.Font/Element.ToFont()/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs #Forms.Internals.Log # TypeConverter sed -i '' 's/Xamarin.Forms.TypeConverter/System.ComponentModel.TypeConverter/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\[TypeConverter/\[System.ComponentModel.TypeConverter/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\[TypeConversion/\[System.ComponentModel.TypeConverter/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\[Microsoft.Maui.Controls.Xaml.TypeConversion/\[System.ComponentModel.TypeConverter/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\[Forms.TypeConverter/\[System.ComponentModel.TypeConverter/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/ TypeConverter/ System.ComponentModel.TypeConverter/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/Xamarin.Forms.UriTypeConverter/Microsoft.Maui.Controls.UriTypeConverter/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/ConvertFromInvariantString(string value)/ConvertFrom(System.ComponentModel.ITypeDescriptorContext? context, System.Globalization.CultureInfo? culture, object valueObject)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/ConvertFromInvariantString(string\? value)/ConvertFrom(System.ComponentModel.ITypeDescriptorContext? context, System.Globalization.CultureInfo? culture, object valueObject)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/if (value != null)/if (valueObject is not string value){throw new InvalidOperationException("Only typeof(string) allowed");}if (value != null)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Helpers/SafeAreaTypeConverter.shared.cs sed -i '' 's/if (value == null)/if (valueObject is not string value){throw new InvalidOperationException("Only typeof(string) allowed");}if (value == null)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Core/MediaSourceConverter.shared.cs sed -i '' 's/return/if (valueObject is not string value){throw new InvalidOperationException("Only typeof(string) allowed");}return/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Core/FileMediaSourceConverter.shared.cs sed -i '' 's/return/if (valueObject is not string value){throw new InvalidOperationException("Only typeof(string) allowed");}return/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Views/MediaElement/UriTypeConverter.shared.cs # Font sed -i '' '/else if (e.PropertyName == Label.FontProperty.PropertyName)/,+1d' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/Font.FontSize/Font.Size/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' '1s/^/using Font = Microsoft.Maui.Font;/' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/SnackBarActionOptions.shared.cs sed -i '' '1s/^/using Font = Microsoft.Maui.Font;/' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/SnackBar.android.cs sed -i '' '1s/^/using Font = Microsoft.Maui.Font;/' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/SnackBar.ios.macos.cs sed -i '' '1s/^/using Font = Microsoft.Maui.Font;/' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/MessageOptions.shared.cs ## ToUIFont sed -i '' '1s/^/using Microsoft.Maui.Controls.Platform;using Microsoft.Extensions.DependencyInjection;/' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/SnackBarAppearance.ios.cs sed -i '' 's/Forms.Font.Default.ToUIFont();/Microsoft.Maui.Font.Default.ToUIFont(Microsoft.Maui.Controls.Application.Current?.Handler.MauiContext?.Services.GetRequiredService<IFontManager>());/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/SnackBarAppearance.ios.cs sed -i '' '1s/^/using Microsoft.Maui.Controls.Platform;using Microsoft.Extensions.DependencyInjection;/' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/SnackBar.ios.macos.cs sed -i '' 's/.Font.ToUIFont();/.Font.ToUIFont(sender.Handler?.MauiContext?.Services.GetRequiredService<IFontManager>());/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/SnackBar.ios.macos.cs # Typeface ## SnackBar.android sed -i '' '1s/^/using Microsoft.Maui.Controls.Platform;using Microsoft.Extensions.DependencyInjection;/' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/SnackBar.android.cs sed -i '' 's/if (arguments.MessageOptions.Font != Font.Default)/var fontManager = sender.Handler?.MauiContext?.Services.GetRequiredService<IFontManager>();\ \ if (fontManager is null)\ {\ throw new ArgumentException("Unable to get IFontManager implementation");\ }\ if (arguments.MessageOptions.Font != Font.Default)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/SnackBar.android.cs sed -i '' 's/ToTypeface()/ToTypeface(fontManager)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/SnackBar.android.cs ## TextSwitcherRenderer.android sed -i '' 's/var newTypeface = f.ToTypeface();/var fontManager = Element.Handler?.MauiContext?.Services.GetRequiredService<IFontManager>();\ \ if (fontManager is null)\ {\ throw new ArgumentException("Unable to get IFontManager implementation");\ }\ var newTypeface = f.ToTypeface(fontManager);/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/TextSwitcherRenderer.android.cs # Controls sed -i '' 's/Xamarin.Forms.Page/Microsoft.Maui.Controls.Page/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/Xamarin.Forms.View/Microsoft.Maui.Controls.View/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/Forms.View/Microsoft.Maui.Controls.View/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs # Layouts sed -i '' 's/ Layout / Microsoft.Maui.Controls.Layout /g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/ Layout)/ Microsoft.Maui.Controls.Layout)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/using static Xamarin.Forms.AbsoluteLayout/using static Microsoft.Maui.Controls.Compatibility.AbsoluteLayout;using Microsoft.Maui.Layouts;using AbsoluteLayout = Microsoft.Maui.Controls.Compatibility.AbsoluteLayout/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/StackLayout/\tMicrosoft.Maui.Controls.StackLayout/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/ GridLength/ Microsoft.Maui.GridLength/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\tGridLength/\tMicrosoft.Maui.GridLength/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/(GridLength/(Microsoft.Maui.GridLength/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/<GridLength/<Microsoft.Maui.GridLength/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/ Grid/ Microsoft.Maui.Controls.Compatibility.Grid/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/\tGrid/\tMicrosoft.Maui.Controls.Compatibility.Grid/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/(Grid/(Microsoft.Maui.Controls.Compatibility.Grid/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/<Grid/<Microsoft.Maui.Controls.Compatibility.Grid/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs # Graphics sed -i '' 's/Xamarin.Forms.Point/Microsoft.Maui.Graphics.Point/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/Xamarin.Forms.Size/Microsoft.Maui.Graphics.Size/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/Xamarin.Forms.View/Microsoft.Maui.Controls.View/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs # ViewExtensions sed -i '' 's/\tViewExtensions./\tMicrosoft.Maui.Controls.ViewExtensions./g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs # *.android.cs sed -i '' 's/ContainerView/Microsoft.Maui.Controls.Platform.Compatibility.ContainerView/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/*.android.cs sed -i '' 's/ContainerView(Context, basePopup.Content/ContainerView(Context, basePopup.Content, Microsoft.Maui.Controls.Application.Current?.Handler.MauiContext/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/PopupRenderer.android.cs sed -i '' 's/View.Context.ToPixels(/Microsoft.Maui.Platform.ContextExtensions.ToPixels(View.Context ?? throw new NullReferenceException(), /g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/*.android.cs sed -i '' 's/Context.ToPixels(/Microsoft.Maui.Platform.ContextExtensions.ToPixels(Context, /g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/*.android.cs sed -i '' 's/context.ToPixels(/Microsoft.Maui.Platform.ContextExtensions.ToPixels(context, /g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/*.android.cs sed -i '' 's/Resource.Id/Xamarin.CommunityToolkit.MauiCompat.Resource.Id/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/*.android.cs sed -i '' 's/Resource.Layout/Xamarin.CommunityToolkit.MauiCompat.Resource.Layout/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/*.android.cs sed -i '' '1s/^/using Path = Android.Graphics.Path;/' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/*.android.cs sed -i '' '1s/^/using Paint = Android.Graphics.Paint;/' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/*.android.cs sed -i '' 's/ShapeDrawable/global::Android.Graphics.Drawables.ShapeDrawable/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/*.android.cs # BarStyle.android.cs sed -i '' '1s/^/using Window = Android.Views.Window;/' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/BarStyle.android.cs # TextSwitcherRenderer.android.cs sed -i '' 's/(visualElementRenderer?.OnTouchEvent(e) ?? false) || //g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/TextSwitcherRenderer.android.cs sed -i '' 's/f.ToScaledPixel()/(float)f.Size/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/TextSwitcherRenderer.android.cs sed -i '' 's/children.ForEach(/Array.ForEach(children,/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/TextSwitcherRenderer.android.cs sed -i '' '1s/^/using Microsoft.Extensions.DependencyInjection;/' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/TextSwitcherRenderer.android.cs sed -i '' 's/ToAttributed(Element.ToFont(), Element.TextColor, nextView);/ToSpannableString(Microsoft.Maui.Controls.Application.Current?.Handler.MauiContext?.Services.GetRequiredService<IFontManager>(), defaultColor: Element.TextColor);/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/TextSwitcherRenderer.android.cs ## PlatformTouchEffect.ios.cs sed -i '' 's/(isStarted ? color : control.BackgroundColor).ToCGColor()/Microsoft.Maui.Platform.ColorExtensions.ToCGColor(isStarted ? color : control.BackgroundColor)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/PlatformTouchEffect.ios.cs ## DrawingViewRenderer.ios.cs sed -i '' 's/void OnLinesCollectionChanged(object sender/void OnLinesCollectionChanged(object? sender/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/DrawingViewRenderer.ios.cs sed -i '' 's/currentPoint.ToPoint()/Microsoft.Maui.Platform.CoreGraphicsExtensions.ToPoint(currentPoint)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/DrawingViewRenderer.ios.cs # DrawingViewService.ios.cs sed -i '' 's/backgroundColor.ToCGColor()/Microsoft.Maui.Platform.ColorExtensions.ToCGColor(backgroundColor)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/DrawingViewService.ios.cs sed -i '' 's/strokeColor.ToCGColor()/Microsoft.Maui.Platform.ColorExtensions.ToCGColor(strokeColor)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/DrawingViewService.ios.cs sed -i '' 's/line.LineColor.ToCGColor()/Microsoft.Maui.Platform.ColorExtensions.ToCGColor(line.LineColor)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/DrawingViewService.ios.cs # SnackbarAppearance.ios.cs sed -i '' '1s/^/using Microsoft.Maui;using Microsoft.Maui.Controls.Compatibility.Platform.iOS;using Microsoft.Maui.Graphics;/' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/SnackbarAppearance.ios.cs sed -i '' 's/color.A /color.Alpha /g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/SnackbarAppearance.ios.cs # Snackbar.android.cs sed -i '' 's/await GetRendererWithRetries(sender)/(await GetRendererWithRetries(sender))?.View ?? sender.ToPlatform(sender.Handler.MauiContext)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/SnackBar.android.cs sed -i '' 's/renderer.View/renderer/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/SnackBar.android.cs sed -i '' 's/namespace/using Microsoft.Maui.Platform;namespace/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/SnackBar.android.cs # IconTintColorEffectRouter.android.cs sed -i '' 's/args.PropertyName?.Equals(IconTintColorEffect.TintColorProperty.PropertyName)/args.PropertyName?.Equals(IconTintColorEffect.TintColorProperty.PropertyName) is true/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/IconTintColorEffectRouter.android.cs sed -i '' 's/args.PropertyName?.Equals(Microsoft.Maui.Controls.Image.SourceProperty.PropertyName)/args.PropertyName?.Equals(Microsoft.Maui.Controls.Image.SourceProperty.PropertyName) is true/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/IconTintColorEffectRouter.android.cs sed -i '' 's/args.PropertyName?.Equals(Microsoft.Maui.Controls.ImageButton.SourceProperty.PropertyName)/args.PropertyName?.Equals(Microsoft.Maui.Controls.ImageButton.SourceProperty.PropertyName) is true/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/IconTintColorEffectRouter.android.cs sed -i '' 's/SetImageViewTintColor(ImageView image, Color color)/SetImageViewTintColor(ImageView image, Microsoft.Maui.Graphics.Color color)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/IconTintColorEffectRouter.android.cs sed -i '' 's/SetButtonTintColor(Button button, Color color)/SetButtonTintColor(Button button, Microsoft.Maui.Graphics.Color color)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/IconTintColorEffectRouter.android.cs sed -i '' '1s/^/using Button = Android.Widget.Button;/' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/IconTintColorEffectRouter.android.cs # IconTintColorEffectRouter.ios.cs sed -i '' 's/args.PropertyName?.Equals(IconTintColorEffect.TintColorProperty.PropertyName)/args.PropertyName?.Equals(IconTintColorEffect.TintColorProperty.PropertyName) is true/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/IconTintColorEffectRouter.ios.cs sed -i '' 's/args.PropertyName?.Equals(Image.SourceProperty.PropertyName)/args.PropertyName?.Equals(Microsoft.Maui.Controls.Image.SourceProperty.PropertyName) is true/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/IconTintColorEffectRouter.ios.cs sed -i '' 's/args.PropertyName?.Equals(ImageButton.SourceProperty.PropertyName)/args.PropertyName?.Equals(Microsoft.Maui.Controls.ImageButton.SourceProperty.PropertyName) is true/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/IconTintColorEffectRouter.ios.cs # SemanticEffectRouterBase.ios.cs sed -i '' 's/(T)Element.Effects.FirstOrDefault(e => e is T)/(T)Element.Effects.First(e => e is T);/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/SemanticEffectRouterBase.ios.cs # CameraViewRenderer.android.cs sed -i '' 's/visualElementRenderer?.OnTouchEvent(e) is true || //g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/CameraViewRenderer.android.cs sed -i '' 's/static void MeasureExactly(AView control, VisualElement? element, Context? context)/static void MeasureExactly(AView control, VisualElement? element, Context context)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/CameraViewRenderer.android.cs sed -i '' 's/Context.GetFragmentManager();/Microsoft.Maui.Platform.ContextExtensions.GetFragmentManager(Context ?? throw new NullReferenceException()) ?? throw new InvalidOperationException();/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/CameraViewRenderer.android.cs # CameraViewRenderer.ios.cs sed -i '' 's/RequestAvAsset/RequestAVAsset/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/CameraViewRenderer.ios.cs # VisualElementExtension.shared.cs sed -i '' 's/v,/(float)v,/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/VisualElementExtension.shared.cs sed -i '' 's/, v/, (float)v/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/VisualElementExtension.shared.cs # NativeSnackBar.ios.macos.cs sed -i '' 's/public SnackBarLayout Microsoft.Maui.Controls.Layout/public SnackBarLayout Layout/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/NativeSnackBar.ios.macos.cs # NativeSnackBarButton.ios.macos.cs sed -i '' 's/LineBreakMode =/TitleLabel.LineBreakMode =/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/NativeSnackButton.ios.macos.cs # VisualFeedbackEffect.shared.cs sed -i '' 's/nativeColor.Alpha/nativeColor.A/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/VisualFeedbackEffect.shared.cs # VisualFeedbackEffectRouter.shared.cs sed -i '' 's/nativeColor.Alpha/nativeColor.A/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/VisualFeedbackEffectRouter.android.cs # PlatformShadowEffect.ios.macos.cs sed -i '' 's/ShadowEffect.GetColor(Element).ToCGColor()/Microsoft.Maui.Platform.ColorExtensions.ToCGColor(ShadowEffect.GetColor(Element))/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/PlatformShadowEffect.ios.macos.cs sed -i '' 's/using Xamarin.CommunityToolkit.Android.Effects;/using System;using Xamarin.CommunityToolkit.Android.Effects;/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/PlatformShadowEffect.android.cs # PlatformTouchEffect.android.cs sed -i '' 's/ViewGroup? Group => Container ?? Control as ViewGroup;/ViewGroup? Group => (Container ?? Control) as ViewGroup;/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/PlatformTouchEffect.android.cs sed -i '' 's/XColor.Transparent/Microsoft.Maui.Graphics.Colors.Transparent/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/PlatformTouchEffect.android.cs sed -i '' 's/new Rectangle/new Rect/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/PlatformTouchEffect.android.cs # ColorExtension.shared.cs sed -i '' 's/(double)/(float)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/ColorExtension.shared.cs sed -i '' 's/WithRed(this Color baseColor, double newR)/WithRed(this Color baseColor, float newR)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/ColorExtension.shared.cs sed -i '' 's/WithGreen(this Color baseColor, double newG)/WithGreen(this Color baseColor, float newG)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/ColorExtension.shared.cs sed -i '' 's/WithBlue(this Color baseColor, double newB)/WithBlue(this Color baseColor, float newB)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/ColorExtension.shared.cs sed -i '' 's/WithAlpha(this Color baseColor, double newA)/WithAlpha(this Color baseColor, float newA)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/ColorExtension.shared.cs sed -i '' 's/WithCyan(this Color baseColor, double newC)/WithCyan(this Color baseColor, float newC)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/ColorExtension.shared.cs sed -i '' 's/WithMagenta(this Color baseColor, double newM)/WithMagenta(this Color baseColor, float newM)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/ColorExtension.shared.cs sed -i '' 's/WithYellow(this Color baseColor, double newY)/WithYellow(this Color baseColor, float newY)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/ColorExtension.shared.cs sed -i '' 's/WithBlackKey(this Color baseColor, double newK)/WithBlackKey(this Color baseColor, float newK)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/ColorExtension.shared.cs sed -i '' 's/double GetPercentBlackKey/float GetPercentBlackKey/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/ColorExtension.shared.cs sed -i '' 's/double GetPercentCyan/float GetPercentCyan/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/ColorExtension.shared.cs sed -i '' 's/double GetPercentMagenta/float GetPercentMagenta/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/ColorExtension.shared.cs sed -i '' 's/double GetPercentYellow/float GetPercentYellow/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/ColorExtension.shared.cs # TouchEffect.shared.cs sed -i '' 's/OnLayoutChildAdded(layout, new ElementEventArgs(view));/OnLayoutChildAdded(layout, new ElementEventArgs((Element)view));/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/TouchEffect.shared.cs # GravatarImageExtension.shared.cs sed -i '' 's/using System;/using System;using Microsoft.Extensions.DependencyInjection;/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/GravatarImageExtension.shared.cs # AvatarView.shared.cs sed -i '' 's/using System;/using System;using static Microsoft.Maui.Controls.Compatibility.AbsoluteLayout;using Microsoft.Maui.Layouts;using AbsoluteLayout = Microsoft.Maui.Controls.Compatibility.AbsoluteLayout;/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/AvatarView.shared.cs sed -i '' 's/uriSource\.GetStreamAsync/((IStreamImageSource)uriSource).GetStreamAsync/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/AvatarView.shared.cs # MotionEventHelper.android.cs sed -i '' '/if (layout.CascadeInputTransparent)/,+1d' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/MotionEventHelper.android.cs # StateLayoutController.shared.cs sed -i '' 's/Microsoft.Maui.Controls.Grid/Microsoft.Maui.Controls.Compatibility.Grid/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/StateLayoutController.shared.cs # TabBadgeTemplate.shared.cs sed -i '' 's/Frame/Microsoft.Maui.Controls.Frame/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/TabBadgeTemplate.shared.cs # Device.macOS sed -i '' 's/Device.macOS/Device.MacCatalyst/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/*.shared.cs # CameraFragment.android.cs sed -i '' 's/MauiCompat.Resource.Layout.CameraFragment/MauiCompat.Resource.Layout.camerafragment/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/CameraFragment.android.cs sed -i '' 's/namespace/using RectF = Android.Graphics.RectF;namespace/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/CameraFragment.android.cs # RangeSlider.shared.cs sed -i '' 's/return value.Clamp(MinimumValue, MaximumValue);/return Math.Clamp(value, MinimumValue, MaximumValue);/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/RangeSlider.shared.cs # Replace Xamarin.Forms Namespace sed -i '' 's/using Xamarin.Forms;/using Microsoft.Maui; using Microsoft.Maui.Controls; using Microsoft.Maui.Graphics; using Microsoft.Maui.Controls.Compatibility;/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/using Xamarin.Forms;/using Microsoft.Maui; using Microsoft.Maui.Controls; using Microsoft.Maui.Graphics; using Microsoft.Maui.Controls.Compatibility;/g' ./src/Markup/Xamarin.CommunityToolkit.Markup.MauiCompat/**/**.cs sed -i '' 's/using Microsoft.Maui.Controls.Compatibility;/using Microsoft.Maui.Controls.Compatibility;using Microsoft.Maui.Layouts;using FlexLayout = Microsoft.Maui.Controls.FlexLayout;/g' ./src/Markup/Xamarin.CommunityToolkit.Markup.MauiCompat/ViewInFlexLayoutExtensions.cs sed -i '' 's/Xamarin.Forms/Microsoft.Maui.Controls/g' ./src/Markup/Xamarin.CommunityToolkit.Markup.MauiCompat/ElementExtensions.cs sed -i '' 's/Xamarin.Forms.Rectangle/Microsoft.Maui.Graphics.Rect/g' ./src/Markup/Xamarin.CommunityToolkit.Markup.MauiCompat/RelativeLayout.cs sed -i '' 's/Xamarin.Forms.RelativeLayout/Microsoft.Maui.Controls.Compatibility.RelativeLayout/g' ./src/Markup/Xamarin.CommunityToolkit.Markup.MauiCompat/RelativeLayout.cs sed -i '' 's/Xamarin.Forms.View/Microsoft.Maui.Controls.View/g' ./src/Markup/Xamarin.CommunityToolkit.Markup.MauiCompat/RelativeLayout.cs sed -i '' 's/Xamarin.Forms.Constraint/Microsoft.Maui.Controls.Compatibility.Constraint/g' ./src/Markup/Xamarin.CommunityToolkit.Markup.MauiCompat/RelativeLayout.cs sed -i '' 's/using Microsoft.Maui.Controls.Compatibility;/using Microsoft.Maui.Controls.Compatibility;using Grid = Microsoft.Maui.Controls.Grid;/g' ./src/Markup/Xamarin.CommunityToolkit.Markup.MauiCompat/ViewInGridExtensions.cs sed -i '' 's/using Xamarin.Forms.Shapes;/using Microsoft.Maui.Controls.Shapes;using Rect = Microsoft.Maui.Graphics.Rect;/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Effects/CornerRadius/CornerRadiusEffect.shared.cs sed -i '' 's/new Rectangle/new Rect/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/*.shared.cs # MauiColorExtensions printf > ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Extensions/MauiColorExtensions.android.cs " using AColor = Android.Graphics.Color; namespace Xamarin.CommunityToolkit.MauiCompat { public static partial class MauiColorExtensions { public static AColor ToAndroid(this Microsoft.Maui.Graphics.Color self) { var colorToConvert = self; if (colorToConvert == null) { colorToConvert = Microsoft.Maui.Graphics.Colors.Transparent; } return Microsoft.Maui.Controls.Compatibility.Platform.Android.ColorExtensions.ToAndroid(colorToConvert); } } }" printf > ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Extensions/MauiColorExtensions.ios.cs " using iColor = UIKit.UIColor; namespace Xamarin.CommunityToolkit.MauiCompat { public static partial class MauiColorExtensions { public static iColor ToUIColor(this Microsoft.Maui.Graphics.Color self) { var colorToConvert = self; if (colorToConvert == null) { colorToConvert = Microsoft.Maui.Graphics.Colors.Transparent; } return Microsoft.Maui.Controls.Compatibility.Platform.iOS.ColorExtensions.ToUIColor(colorToConvert); } } }" sed -i '' 's/public class/using Xamarin.CommunityToolkit.MauiCompat; public class/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Effects/IconTintColor/IconTintColorEffectRouter.ios.cs sed -i '' 's/public class/using Xamarin.CommunityToolkit.MauiCompat; public class/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Effects/StatusBar/PlatformStatusBarEffect.ios.cs sed -i '' 's/\[Foundation.Preserve(AllMembers = true)/using Xamarin.CommunityToolkit.MauiCompat; \[Foundation.Preserve(AllMembers = true)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Effects/VisualFeedback/VisualFeedbackEffectRouter.ios.cs sed -i '' 's/public class/using Xamarin.CommunityToolkit.MauiCompat; public class/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Views/DrawingView/Renderer/DrawingViewRenderer.ios.cs sed -i '' 's/public class/using Xamarin.CommunityToolkit.MauiCompat; public class/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Views/MediaElement/iOS/MediaElementRenderer.ios.cs sed -i '' 's/public class/using Xamarin.CommunityToolkit.MauiCompat; public class/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Views/Popup/iOS/PopupRenderer.ios.cs sed -i '' 's/public class/using Xamarin.CommunityToolkit.MauiCompat; public class/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Views/Snackbar/SnackBar.ios.macos.cs sed -i '' 's/public class/using Xamarin.CommunityToolkit.MauiCompat; public class/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Views/Snackbar/Helpers/iOS/SnackBarAppearance.ios.cs sed -i '' 's/UIColor.SystemGrayColor/UIColor.SystemGray/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Views/Snackbar/Helpers/iOS/SnackBarAppearance.ios.cs sed -i '' 's/public class/using Xamarin.CommunityToolkit.MauiCompat; public class/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Effects/IconTintColor/IconTintColorEffectRouter.android.cs sed -i '' 's/public class/using Xamarin.CommunityToolkit.MauiCompat; public class/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Effects/Shadow/PlatformShadowEffect.android.cs sed -i '' 's/public class/using Xamarin.CommunityToolkit.MauiCompat; public class/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Effects/StatusBar/PlatformStatusBarEffect.android.cs sed -i '' 's/public class/using Xamarin.CommunityToolkit.MauiCompat; public class/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Effects/Touch/PlatformTouchEffect.android.cs sed -i '' 's/\[Microsoft.Maui.Controls.Internals.Preserve(AllMembers = true)/using Xamarin.CommunityToolkit.MauiCompat; \[Microsoft.Maui.Controls.Internals.Preserve(AllMembers = true)/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Effects/VisualFeedback/VisualFeedbackEffectRouter.android.cs sed -i '' 's/public class/using Xamarin.CommunityToolkit.MauiCompat; public class/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/PlatformConfiguration/AndroidSpecific/NavigationBar/PlatformNavigationBarEffect.android.cs sed -i '' 's/public class/using Xamarin.CommunityToolkit.MauiCompat; public class/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Views/CameraView/Android/CameraFragment.android.cs sed -i '' 's/public class/using Xamarin.CommunityToolkit.MauiCompat; public class/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Views/DrawingView/Renderer/DrawingViewRenderer.android.cs sed -i '' 's/public class/using Xamarin.CommunityToolkit.MauiCompat; public class/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Views/DrawingView/Service/DrawingViewService.android.cs sed -i '' 's/public class/using Xamarin.CommunityToolkit.MauiCompat; public class/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Views/MediaElement/Android/MediaElementRenderer.android.cs sed -i '' 's/public class/using Xamarin.CommunityToolkit.MauiCompat; public class/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Views/Snackbar/SnackBar.android.cs sed -i '' 's/public class/using Xamarin.CommunityToolkit.MauiCompat; public class/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Views/ViewSwitcher/BackgroundManager.android.cs sed -i '' 's/public class/using Xamarin.CommunityToolkit.MauiCompat; public class/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Views/ViewSwitcher/TextSwitcherRenderer.android.cs # StreamMediaSource sed -i '' 's/readonly object synchandle = new object();/public bool IsEmpty => Stream == null; readonly object synchandle = new object();/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/Core/StreamMediaSource.shared.cs # nfloat sed -i '' 's/nfloat/System.Runtime.InteropServices.NFloat/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs # AVMediaType sed -i '' 's/photoOutput.ConnectionFromMediaType(AVMediaType.Video)/photoOutput.ConnectionFromMediaType(new NSString("video"))/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/AVCaptureDevice.DevicesWithMediaType(AVMediaType.Video)/AVCaptureDevice.DevicesWithMediaType("video")/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/AVMediaType.Audio/AVMediaTypes.Audio/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/AVMediaType.Video/AVAuthorizationMediaType.Video/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs # Log sed -i '' 's/using System;/using System;using Microsoft.Extensions.Logging;/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs sed -i '' 's/Log\./(Microsoft.Maui.Controls.Application.Current?.Handler.MauiContext?.Services.GetService(typeof(ILogger)) as ILogger)?.Log/g' ./src/CommunityToolkit/Xamarin.CommunityToolkit.MauiCompat/**/**.cs
#!/bin/bash dataset=histopathology data_dir=$HOME/data/sip sampling_steps=40 sampler=mscorrect-3 save_dir=$HOME/results/$sampler/$dataset-s-$sampling_steps if [ ! -e $save_dir ]; then mkdir -p $save_dir fi export CUDA_VISIBLE_DEVICES=3 python -m SIP.debm.experiment.main_categorical_ebm \ --data_dir $data_dir \ --save_dir $save_dir \ --dataset_name $dataset \ --sampling_steps $sampling_steps \ --model resnet-64 \ --buffer_size 1000 \ --warmup_iters 10000 \ --learning_rate 1e-4 \ --n_iters 50000 \ --buffer_init mean \ --base_dist \ --sampler $sampler \ --eval_every 73 \ --plot_every 14 \ --eval_sampling_steps 10000 \ --gpu 0 \ $@
import sys def create_dynamic_class(name, attributes, python_version): if python_version == 2: name = str(name) # Convert name to byte string for Python 2 return type(name, (), attributes) # Test the function attributes = {'name': 'John', 'age': 30} new_class = create_dynamic_class('Person', attributes, sys.version_info[0]) print(new_class.__name__) # Output: 'Person' print(new_class.name) # Output: 'John' print(new_class.age) # Output: 30
package prjTabDez; public class TabDez { public static void main(String[] args) { int t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, i; i = 1; while (i <= 10) { t1 = 1 * i; System.out.println(t1); t2 = 2 * i; System.out.println(t2); t3 = 3 * i; System.out.println(t3); t4 = 4 * i; System.out.println(t4); t5 = 5 * i; System.out.println(t5); t6 = 6 * i; System.out.println(t6); t7 = 7 * i; System.out.println(t7); t8 = 8 * i; System.out.println(t8); t9 = 9 * i; System.out.println(t9); t10 = 10 * i; System.out.println(t10); i++; } } }
module Logic module Package class PackageLocationBuilder # TODO: better error handling def self.call(package_location_template, server_dir, package_hash) package_location_template .gsub('{server_dir}', server_dir) .gsub('{package_name}', package_hash.fetch('Package')) .gsub('{package_version}', package_hash.fetch('Version').to_s) end end end end
<gh_stars>100-1000 def proc2(proxy, name) proxy.process(name){ pid_file "#{name}.pid2" } end
public static int binarySearch(int[] arr, int key) { int low = 0; int high = arr.length - 1; while (low <= high) { int mid = (low + high) / 2; if (arr[mid] == key) { return mid; } else if (arr[mid] < key) { low = mid + 1; } else { high = mid - 1; } } return -1; }
class Library: def __init__(self): self.books = {} def add_book(self, book_title): if book_title not in self.books: self.books[book_title] = "available" else: print(f"{book_title} already exists in the library.") def remove_book(self, book_title): if book_title in self.books: del self.books[book_title] else: print(f"{book_title} is not available in the library.") def display_books(self): available_books = [book for book, status in self.books.items() if status == "available"] print("Available books:", ", ".join(available_books)) def borrow_book(self, book_title): if book_title in self.books: if self.books[book_title] == "available": self.books[book_title] = "borrowed" else: print(f"{book_title} is already borrowed.") else: print(f"{book_title} is not available in the library.") def return_book(self, book_title): if book_title in self.books: if self.books[book_title] == "borrowed": self.books[book_title] = "available" else: print(f"{book_title} was not borrowed.") else: print(f"{book_title} is not available in the library.")
import time import torch from collections import OrderedDict class Timer(object): def __init__(self, cuda_sync=False): self.timer = OrderedDict() self.cuda_sync = cuda_sync self.startTimer() def startTimer(self): self.iter_start = time.time() self.disp_start = time.time() def resetTimer(self): self.iter_start = time.time() self.disp_start = time.time() for key in self.timer.keys(): self.timer[key].reset() def updateTime(self, key): if key not in self.timer.keys(): self.timer[key] = AverageMeter() if self.cuda_sync: torch.cuda.synchronize() self.timer[key].update(time.time() - self.iter_start) self.iter_start = time.time() def timeToString(self, reset=True): strs = '\t [Time %.3fs] ' % (time.time() - self.disp_start) for key in self.timer.keys(): if self.timer[key].sum < 1e-4: continue strs += '%s: %.3fs| ' % (key, self.timer[key].sum) self.resetTimer() return strs class AverageMeter(object): def __init__(self): self.reset() def reset(self): self.sum = 0 self.count = 0 self.avg = 0 def update(self, val, n=1): self.sum += val * n self.count += n self.avg = self.sum / self.count def __repr__(self): return '%.3f' % (self.avg)
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+512+512-old/7-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+512+512-old/7-512+512+512-ST-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_within_trigrams_first_third_sixth --eval_function penultimate_sixth_eval
package apk import ( "context" "strings" "testing" "github.com/stretchr/testify/assert" "github.com/aquasecurity/trivy/pkg/fanal/analyzer" aos "github.com/aquasecurity/trivy/pkg/fanal/analyzer/os" "github.com/aquasecurity/trivy/pkg/fanal/types" ) func Test_apkRepoAnalyzer_Analyze(t *testing.T) { tests := []struct { name string input analyzer.AnalysisInput want *analyzer.AnalysisResult wantErr string }{ { name: "alpine", input: analyzer.AnalysisInput{ FilePath: "/etc/apk/repositories", Content: strings.NewReader("http://nl.alpinelinux.org/alpine/v3.7/main"), }, want: &analyzer.AnalysisResult{ Repository: &types.Repository{Family: aos.Alpine, Release: "3.7"}, }, }, { name: "adelie", input: analyzer.AnalysisInput{ FilePath: "/etc/apk/repositories", Content: strings.NewReader("https://distfiles.adelielinux.org/adelie/1.0-beta4/system/"), }, want: nil, }, { name: "repository has 'http' schema", input: analyzer.AnalysisInput{ FilePath: "/etc/apk/repositories", Content: strings.NewReader("http://nl.alpinelinux.org/alpine/v3.7/main"), }, want: &analyzer.AnalysisResult{ Repository: &types.Repository{Family: aos.Alpine, Release: "3.7"}, }, }, { name: "repository has 'https' schema", input: analyzer.AnalysisInput{ FilePath: "/etc/apk/repositories", Content: strings.NewReader("https://dl-cdn.alpinelinux.org/alpine/v3.15/main"), }, want: &analyzer.AnalysisResult{ Repository: &types.Repository{Family: aos.Alpine, Release: "3.15"}, }, }, { name: "repository has 'ftp' schema", input: analyzer.AnalysisInput{ FilePath: "/etc/apk/repositories", Content: strings.NewReader("ftp://dl-3.alpinelinux.org/alpine/v2.6/main"), }, want: &analyzer.AnalysisResult{ Repository: &types.Repository{Family: aos.Alpine, Release: "2.6"}, }, }, { name: "edge version", input: analyzer.AnalysisInput{ FilePath: "/etc/apk/repositories", Content: strings.NewReader("https://dl-cdn.alpinelinux.org/alpine/edge/main"), }, want: &analyzer.AnalysisResult{ Repository: &types.Repository{Family: aos.Alpine, Release: "edge"}, }, }, { name: "happy path. 'etc/apk/repositories' contains some line with v* versions", input: analyzer.AnalysisInput{ FilePath: "/etc/apk/repositories", Content: strings.NewReader(`https://dl-cdn.alpinelinux.org/alpine/v3.1/main https://dl-cdn.alpinelinux.org/alpine/v3.10/main `), }, want: &analyzer.AnalysisResult{ Repository: &types.Repository{Family: aos.Alpine, Release: "3.10"}, }, }, { name: "multiple v* versions", input: analyzer.AnalysisInput{ FilePath: "/etc/apk/repositories", Content: strings.NewReader(`https://dl-cdn.alpinelinux.org/alpine/v3.10/main https://dl-cdn.alpinelinux.org/alpine/v3.1/main `), }, want: &analyzer.AnalysisResult{ Repository: &types.Repository{Family: aos.Alpine, Release: "3.10"}, }, }, { name: "multiple v* and edge versions", input: analyzer.AnalysisInput{ FilePath: "/etc/apk/repositories", Content: strings.NewReader(`https://dl-cdn.alpinelinux.org/alpine/edge/main https://dl-cdn.alpinelinux.org/alpine/v3.10/main `), }, want: &analyzer.AnalysisResult{ Repository: &types.Repository{Family: aos.Alpine, Release: "edge"}, }, }, { name: "sad path", input: analyzer.AnalysisInput{ FilePath: "/etc/apk/repositories", Content: strings.NewReader("https://dl-cdn.alpinelinux.org/alpine//edge/main"), }, }, } for _, test := range tests { t.Run(test.name, func(t *testing.T) { a := apkRepoAnalyzer{} got, err := a.Analyze(context.Background(), test.input) if test.wantErr != "" { assert.Error(t, err) assert.Equal(t, test.wantErr, err.Error()) return } assert.NoError(t, err) assert.Equal(t, test.want, got) }) } }
node --max-http-header-size=1000000 app.js
<reponame>thitranthanh/Achilles<gh_stars>1-10 /* * Copyright (C) 2012-2014 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package info.archinnov.achilles.query.cql; import static org.fest.assertions.api.Assertions.assertThat; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; import info.archinnov.achilles.internal.persistence.operations.NativeQueryMapper; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Answers; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import org.powermock.reflect.Whitebox; import com.datastax.driver.core.RegularStatement; import com.datastax.driver.core.Row; import info.archinnov.achilles.internal.context.DaoContext; import info.archinnov.achilles.internal.statement.wrapper.NativeStatementWrapper; import info.archinnov.achilles.type.Options; import info.archinnov.achilles.type.OptionsBuilder; import info.archinnov.achilles.type.TypedMap; @RunWith(MockitoJUnitRunner.class) public class NativeQueryTest { private NativeQuery query; @Mock(answer = Answers.RETURNS_DEEP_STUBS) private DaoContext daoContext; @Mock private NativeQueryMapper mapper; @Mock private Row row; @Mock private RegularStatement regularStatement; private Object[] boundValues = new Object[] { 1 }; @Captor private ArgumentCaptor<NativeStatementWrapper> simpleStatementCaptor; @Before public void setUp() { query = new NativeQuery(daoContext, regularStatement, OptionsBuilder.noOptions(), boundValues); Whitebox.setInternalState(query, NativeQueryMapper.class, mapper); } @Test public void should_get() throws Exception { List<Row> rows = Arrays.asList(row); when(daoContext.execute(any(NativeStatementWrapper.class)).all()).thenReturn(rows); List<TypedMap> result = new ArrayList<>(); when(mapper.mapRows(rows)).thenReturn(result); List<TypedMap> actual = query.get(); assertThat(actual).isSameAs(result); } @Test public void should_get_one() throws Exception { List<Row> rows = Arrays.asList(row); when(daoContext.execute(any(NativeStatementWrapper.class)).all()).thenReturn(rows); List<TypedMap> result = new ArrayList<>(); TypedMap line = new TypedMap(); result.add(line); when(mapper.mapRows(rows)).thenReturn(result); TypedMap actual = query.first(); assertThat(actual).isSameAs(line); } @Test public void should_return_null_when_no_row() throws Exception { List<Row> rows = Arrays.asList(row); when(daoContext.execute(any(NativeStatementWrapper.class)).all()).thenReturn(rows); List<TypedMap> result = new ArrayList<>(); when(mapper.mapRows(rows)).thenReturn(result); Map<String, Object> actual = query.first(); assertThat(actual).isNull(); } @Test public void should_execute_upserts() throws Exception { //Given final Options options = OptionsBuilder.ifNotExists(); query.boundValues = boundValues; query.options = options; when(regularStatement.getQueryString()).thenReturn("queryString"); //When query.execute(); //Then verify(daoContext).execute(simpleStatementCaptor.capture()); final NativeStatementWrapper actual = simpleStatementCaptor.getValue(); assertThat(actual.getStatement().toString()).isEqualTo("queryString"); assertThat(actual.getValues()).isEqualTo(boundValues); } @Test public void should_get_iterator() throws Exception { //Given Iterator<Row> iterator = mock(Iterator.class); when(daoContext.execute(any(NativeStatementWrapper.class)).iterator()).thenReturn(iterator); when(iterator.hasNext()).thenReturn(true, true, false); //When final Iterator<TypedMap> actual = query.iterator(); //Then assertThat(actual.hasNext()).isTrue(); assertThat(actual.hasNext()).isTrue(); assertThat(actual.hasNext()).isFalse(); } }
/* * Tencent is pleased to support the open source community by making IoT Hub available. * Copyright (C) 2016 THL A29 Limited, a Tencent company. All rights reserved. * Licensed under the MIT License (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * http://opensource.org/licenses/MIT * Unless required by applicable law or agreed to in writing, software distributed under the License is * distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language governing permissions and * limitations under the License. * */ #ifdef __cplusplus extern "C" { #endif #include "qcloud.h" __QCLOUD_STATIC__ qcloud_err_t coap_client_network_host_construct(qcloud_network_t *network, qcloud_device_t *device) { int server_len; char coap_server[QCLOUD_SERVER_DOMAIN_MAX]; memset(network->host, 0, sizeof(network->host)); server_len = osal_snprintf(coap_server, sizeof(coap_server), "%s.%s", device->product_id, qcloud_coap_server); if (server_len < 0 || server_len > QCLOUD_SERVER_DOMAIN_MAX - 1) { QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_FAILURE); } memcpy(network->host, coap_server, sizeof(network->host)); network->port = qcloud_coap_port; return QCLOUD_ERR_SUCCESS; } __QCLOUD_STATIC__ qcloud_err_t coap_client_network_init(qcloud_network_t *network, qcloud_device_t *device) { #if (QCLOUD_CFG_TLS_EN > 0u) QCLOUD_FUNC_EXIT_RC_IF_NOT(qcloud_tls_init(&network->tls_opt, device), QCLOUD_ERR_SUCCESS, QCLOUD_ERR_FAILURE); QCLOUD_FUNC_EXIT_RC_IF_NOT(qcloud_network_dtls_init(network), QCLOUD_ERR_SUCCESS, QCLOUD_ERR_FAILURE); #else QCLOUD_FUNC_EXIT_RC_IF_NOT(qcloud_network_udp_init(network), QCLOUD_ERR_SUCCESS, QCLOUD_ERR_FAILURE); #endif QCLOUD_FUNC_EXIT_RC_IF_NOT(coap_client_network_host_construct(network, device), QCLOUD_ERR_SUCCESS, QCLOUD_ERR_FAILURE); return QCLOUD_ERR_SUCCESS; } __QCLOUD_STATIC__ uint16_t coap_client_random_packet_id_generate(void) { #define PACKET_ID_MAX (65535) srand((unsigned)osal_timer_current_sec()); return rand() % (PACKET_ID_MAX + 1) + 1; } __QCLOUD_STATIC__ qcloud_err_t coap_client_construct(qcloud_coap_client_t *client, qcloud_device_t *device, coap_event_handler_fn_t handler) { int len = 0; client->auth_state = QCLOUD_COAP_AUTH_STATE_NONE; client->command_timeout = QCLOUD_COAP_COMMAND_TIMEOUT; client->message_token = 0; client->event_handler.handler = handler; // packet id 取随机数 1- 65536 client->packet_id = coap_client_random_packet_id_generate(); client->auth_token = NULL; client->auth_token_len = 0; client->retransmit_max = 1; len = osal_snprintf(client->auth_uri, sizeof(client->auth_uri), "%s/%s/%s", device->product_id, device->device_name, QCLOUD_COAP_AUTH_URI); if (len < 0 || len >= QCLOUD_COAP_AUTH_URI_MAX) { QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_INVAL); } qcloud_list_init(&client->message_list); if ((client->message_list_lock = osal_mutex_create()) == NULL) { goto errout; } if ((client->tx_lock = osal_mutex_create()) == NULL) { goto errout; } QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS); errout: if (client->message_list_lock) { osal_mutex_destroy(client->message_list_lock); } if (client->tx_lock) { osal_mutex_destroy(client->tx_lock); } QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_FAILURE) } __QCLOUD_API__ qcloud_err_t qcloud_coap_client_create(qcloud_coap_client_t *client, qcloud_device_t *device, coap_event_handler_fn_t handler) { QCLOUD_FUNC_ENTRY; QCLOUD_POINTER_SANITY_CHECK(client, QCLOUD_ERR_INVAL); QCLOUD_POINTER_SANITY_CHECK(device, QCLOUD_ERR_INVAL); memset(client, 0, sizeof(qcloud_coap_client_t)); QCLOUD_FUNC_EXIT_RC_IF_NOT(coap_client_network_init(&client->network, device), QCLOUD_ERR_SUCCESS, QCLOUD_ERR_FAILURE); QCLOUD_FUNC_EXIT_RC_IF_NOT(coap_client_construct(client, device, handler), QCLOUD_ERR_SUCCESS, QCLOUD_ERR_FAILURE); QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS); } __QCLOUD_API__ qcloud_err_t qcloud_coap_client_connect(qcloud_coap_client_t *client) { QCLOUD_POINTER_SANITY_CHECK(client, QCLOUD_ERR_INVAL); int len; qcloud_err_t rc; char connection_id[QCLOUD_COAP_CONNECT_ID_MAX + 1]; QCLOUD_FUNC_EXIT_RC_IF_NOT(rc = client->network.connect(&client->network), QCLOUD_ERR_SUCCESS, rc); coap_glue_connect_id_generate(connection_id); len = osal_snprintf(client->auth_id, sizeof(client->auth_id), "%s;%s", QCLOUD_APPID, connection_id); if (len < 0 || len >= QCLOUD_COAP_AUTH_ID_MAX) { QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_INVAL); } coap_auth(client, connection_id); while (client->auth_state == QCLOUD_COAP_AUTH_STATE_NONE) { qcloud_coap_client_yield(client, 200); } if (client->auth_state != QCLOUD_COAP_AUTH_STATE_SUCCESS) { QCLOUD_LOG_I("auth failed"); QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_FAILURE); } QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS); } __QCLOUD_API__ qcloud_err_t qcloud_coap_client_yield(qcloud_coap_client_t *client, uint32_t timeout_ms) { QCLOUD_FUNC_ENTRY; QCLOUD_POINTER_SANITY_CHECK(client, QCLOUD_ERR_INVAL); return coap_glue_spin(client, timeout_ms); } __QCLOUD_API__ qcloud_err_t qcloud_coap_client_destroy(qcloud_coap_client_t *client) { QCLOUD_POINTER_SANITY_CHECK(client, QCLOUD_ERR_INVAL); if (client->network.is_connected && client->network.is_connected(&client->network)) { client->network.disconnect(&client->network); } coap_glue_message_list_destroy(client); osal_mutex_destroy(client->tx_lock); osal_mutex_destroy(client->message_list_lock); if (client->auth_token) { osal_free(client->auth_token); client->auth_token = NULL; } client->auth_token_len = 0; QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS); } __QCLOUD_API__ qcloud_err_t qcloud_coap_client_msg_send(qcloud_coap_client_t *client, char *topic, coap_send_opt_t *send_opt) { QCLOUD_FUNC_ENTRY QCLOUD_POINTER_SANITY_CHECK(client, QCLOUD_ERR_INVAL); QCLOUD_POINTER_SANITY_CHECK(topic, QCLOUD_ERR_INVAL); QCLOUD_POINTER_SANITY_CHECK(send_opt, QCLOUD_ERR_INVAL); int len; qcloud_err_t rc; char message_token[8] = {0}; coap_message_t send_msg = COAP_MESSAGE_INITIALIZER; if (strlen(topic) > QCLOUD_COAP_URI_MAX) { QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_MAX_TOPIC_LENGTH); } coap_message_init(&send_msg); coap_message_type_set(&send_msg, COAP_MSG_TYPE_CON); coap_message_code_set(&send_msg, COAP_CODE_CLASS_REQ, COAP_REQUEST_METHOD_POST); coap_message_id_set(&send_msg, coap_glue_packet_id_generate(client)); len = coap_message_token_get(client, message_token); coap_message_token_set(&send_msg, message_token, len); send_msg.payload = (char *)osal_malloc(send_opt->payload_len); if (!send_msg.payload) { QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_INVAL); } coap_message_payload_set(&send_msg, send_opt->payload, send_opt->payload_len); coap_message_option_add(&send_msg, COAP_MSG_OPTION_CODE_URI_PATH, strlen(topic), topic); coap_message_option_add(&send_msg, COAP_MSG_OPTION_CODE_AUTH_TOKEN, client->auth_token_len, client->auth_token); if (send_opt->resp_cb) { coap_message_option_add(&send_msg, COAP_MSG_OPTION_CODE_NEED_RESP, 1, "1"); coap_message_callback_set(&send_msg, send_opt->resp_cb); } else { coap_message_option_add(&send_msg, COAP_MSG_OPTION_CODE_NEED_RESP, 1, "0"); } coap_message_context_set(&send_msg, send_opt->context); rc = coap_glue_msg_send(client, &send_msg); osal_free(send_msg.payload); QCLOUD_FUNC_EXIT_RC(rc) } __QCLOUD_API__ uint16_t qcloud_coap_msg_id_get(coap_message_t *message) { QCLOUD_FUNC_ENTRY if (!message) { return COAP_MSG_ID_MAX; } return message->id; } __QCLOUD_API__ qcloud_err_t qcloud_coap_msg_payload_get(coap_message_t *message, char **payload, int *payload_len) { QCLOUD_FUNC_ENTRY QCLOUD_POINTER_SANITY_CHECK(message, QCLOUD_ERR_INVAL); QCLOUD_POINTER_SANITY_CHECK(payload, QCLOUD_ERR_INVAL); QCLOUD_POINTER_SANITY_CHECK(payload_len, QCLOUD_ERR_INVAL); if (message->code_class != COAP_CODE_CLASS_SUCCESS || message->code_detail != COAP_CODE_DETAIL_205_CONTENT) { QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_FAILURE) } *payload = message->payload; *payload_len = message->payload_len; QCLOUD_FUNC_EXIT_RC(QCLOUD_ERR_SUCCESS) } __QCLOUD_API__ coap_event_type_t qcloud_coap_event_type_get(coap_message_t *message) { QCLOUD_FUNC_ENTRY QCLOUD_POINTER_SANITY_CHECK(message, COAP_EVENT_TYPE_UNAUTHORIZED); switch (message->code_class) { case COAP_CODE_CLASS_SUCCESS: return COAP_EVENT_TYPE_RECEIVE_RESPCONTENT; case COAP_CODE_CLASS_SERVER_ERR: return COAP_EVENT_TYPE_INTERNAL_SERVER_ERROR; case COAP_CODE_CLASS_INTERNAL_ERR: return COAP_EVENT_TYPE_SEPRESP_TIMEOUT; case COAP_CODE_CLASS_CLIENT_ERR: if (message->code_detail == COAP_CODE_DETAIL_401_UNAUTHORIZED) { return COAP_EVENT_TYPE_UNAUTHORIZED; } else { return COAP_EVENT_TYPE_FORBIDDEN; } default: QCLOUD_LOG_E("not supported code class: %d", message->code_class); return COAP_EVENT_TYPE_ACK_TIMEOUT; } } #ifdef __cplusplus } #endif
#include <bits/stdc++.h> #include <stdio.h> int main() { int t; scanf("%d", &t); for(int i = 0; i < t; i++) { int n; scanf("%d", &n); int arr[n], brr[100000] = {0}, crr[100000] = {0}, max = -1; vector<int> arr[n]; for(int j = 0; j < n; j++) brr[j] = j; for(int j = 0; j < n; j++) { int count = 0; scanf("%d", &arr[j]); for(int k = 0; k < j;) { if(arr[k] % arr[j] == 0) { // printf("%d %d\n", arr[j], arr[k]); brr[k] = j; count++; } } if(max < count) max = count; } printf("%d\n", max); } }
import React from "react"; import ReactDOM from "react-dom"; import Webhooks from "./webhooks"; import axios from 'axios'; class Settings extends React.Component { constructor (props) { super(props) } render () { return <div className="menu-item-settings"> <Webhooks/> </div> } } function createReactSettings(container) { ReactDOM.unmountComponentAtNode( document.getElementById(container)) ReactDOM.render(React.createElement(Settings), document.getElementById(container)) } window.eventBus.on('draw-settings', (e) => { createReactSettings('main-view') window.eventBus.emit('settings-init') }) module.exports = createReactSettings
/** * @author <NAME> */ import { SortOrder } from '../data/sort.data'; export interface CompareObjectValueOptions { by: string; order?: SortOrder; } export interface SortArrayOptions { data: any[]; by?: string; order?: SortOrder; } export interface SortMapOptions { by?: string; data: Map<any, any>; order?: SortOrder; }
package com.alipay.api.domain; import com.alipay.api.AlipayObject; import com.alipay.api.internal.mapping.ApiField; /** * 出租车司机车辆信息 * * @author auto create * @since 1.0, 2021-09-09 09:44:44 */ public class DriverCarInfo extends AlipayObject { private static final long serialVersionUID = 4186937624168985138L; /** * 是否开通高德聚合码 "0"-未开通 "1"-开通 */ @ApiField("aggregate_code_flag") private String aggregateCodeFlag; /** * 认证时间 */ @ApiField("bind_time") private String bindTime; /** * 车辆品牌 */ @ApiField("car_brand") private String carBrand; /** * 车辆颜色 */ @ApiField("car_color") private String carColor; /** * 车牌号 */ @ApiField("car_no") private String carNo; /** * 车牌颜色 */ @ApiField("car_no_color") private String carNoColor; /** * 运营卡号 */ @ApiField("car_operation_no") private String carOperationNo; /** * 车辆型号 */ @ApiField("car_type") private String carType; /** * 身份证号 */ @ApiField("cert_no") private String certNo; /** * 渠道 */ @ApiField("channel") private String channel; /** * 城市码 */ @ApiField("city_code") private String cityCode; /** * 城市名 */ @ApiField("city_name") private String cityName; /** * 所属公司 */ @ApiField("company") private String company; /** * 司机id */ @ApiField("driver_id") private String driverId; /** * 司机姓名 */ @ApiField("driver_name") private String driverName; /** * 司机性别 M-男 F-女 */ @ApiField("driver_sex") private String driverSex; /** * 支付宝uid */ @ApiField("driver_user_id") private String driverUserId; /** * 电话号码 */ @ApiField("tele_no") private String teleNo; public String getAggregateCodeFlag() { return this.aggregateCodeFlag; } public void setAggregateCodeFlag(String aggregateCodeFlag) { this.aggregateCodeFlag = aggregateCodeFlag; } public String getBindTime() { return this.bindTime; } public void setBindTime(String bindTime) { this.bindTime = bindTime; } public String getCarBrand() { return this.carBrand; } public void setCarBrand(String carBrand) { this.carBrand = carBrand; } public String getCarColor() { return this.carColor; } public void setCarColor(String carColor) { this.carColor = carColor; } public String getCarNo() { return this.carNo; } public void setCarNo(String carNo) { this.carNo = carNo; } public String getCarNoColor() { return this.carNoColor; } public void setCarNoColor(String carNoColor) { this.carNoColor = carNoColor; } public String getCarOperationNo() { return this.carOperationNo; } public void setCarOperationNo(String carOperationNo) { this.carOperationNo = carOperationNo; } public String getCarType() { return this.carType; } public void setCarType(String carType) { this.carType = carType; } public String getCertNo() { return this.certNo; } public void setCertNo(String certNo) { this.certNo = certNo; } public String getChannel() { return this.channel; } public void setChannel(String channel) { this.channel = channel; } public String getCityCode() { return this.cityCode; } public void setCityCode(String cityCode) { this.cityCode = cityCode; } public String getCityName() { return this.cityName; } public void setCityName(String cityName) { this.cityName = cityName; } public String getCompany() { return this.company; } public void setCompany(String company) { this.company = company; } public String getDriverId() { return this.driverId; } public void setDriverId(String driverId) { this.driverId = driverId; } public String getDriverName() { return this.driverName; } public void setDriverName(String driverName) { this.driverName = driverName; } public String getDriverSex() { return this.driverSex; } public void setDriverSex(String driverSex) { this.driverSex = driverSex; } public String getDriverUserId() { return this.driverUserId; } public void setDriverUserId(String driverUserId) { this.driverUserId = driverUserId; } public String getTeleNo() { return this.teleNo; } public void setTeleNo(String teleNo) { this.teleNo = teleNo; } }
import { JsonInterface as __type___parent_tests_JsonInterface } from "../../../__type__/parent/tests/JsonInterface" function validateJSON(jsonData) { // Assuming the interface definition is imported as __type___parent_tests_JsonInterface // Perform validation logic using the imported interface // Return true if jsonData conforms to the interface, otherwise return false }
<reponame>EIDSS/EIDSS-Legacy package com.bv.eidss.model; import java.text.Format; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import java.util.Locale; import java.util.UUID; import com.bv.eidss.DateHelpers; import com.bv.eidss.data.EidssDatabase; import android.content.ContentValues; import android.database.Cursor; import android.os.Parcel; import android.os.Parcelable; public class HumanCase implements Parcelable { private long _id; public long getId() { return _id; } public void setId(long value) { _id = value; } private String _strLastSynError; public String getLastSynError() { return _strLastSynError; } public void setLastSynError(String value) { _strLastSynError = value; } private int _intStatus; // 1 - new; 2 - synchronized; 3 - changed; public int getStatus() { return _intStatus; } public void setStatusChanged() { _intStatus = HumanCaseStatus.CHANGED; } public void setStatusSyn() { _intStatus = HumanCaseStatus.SYNCHRONIZED; } private Date _datCreateDate; public Date getCreateDate() { return _datCreateDate; } private String _uidOfflineCaseID; public String getOfflineCaseID() { return _uidOfflineCaseID; } private String _strCaseID; public String getCaseID() { return _strCaseID; } public void setCaseID(String value) { _strCaseID = value; } private long _idfCase; public long getCase() { return _idfCase; } public void setCase(long value) { _idfCase = value; } private String _strLocalIdentifier; public String getLocalIdentifier() { return _strLocalIdentifier; } public void setLocalIdentifier(String value) { bChanged = bChanged || _strLocalIdentifier != value; _strLocalIdentifier = value; } private long _idfsTentativeDiagnosis; public long getTentativeDiagnosis() { return _idfsTentativeDiagnosis; } public void setTentativeDiagnosis(long value) { bChanged = bChanged || _idfsTentativeDiagnosis != value; _idfsTentativeDiagnosis = value; } private Date _datTentativeDiagnosisDate; public Date getTentativeDiagnosisDate() { return _datTentativeDiagnosisDate; } public void setTentativeDiagnosisDate(Date value) { bChanged = bChanged || _datTentativeDiagnosisDate != value; _datTentativeDiagnosisDate = value; } private String _strFamilyName; public String getFamilyName() { return _strFamilyName; } public void setFamilyName(String value) { bChanged = bChanged || _strFamilyName != value; _strFamilyName = value; } private String _strFirstName; public String getFirstName() { return _strFirstName; } public void setFirstName(String value) { bChanged = bChanged || _strFirstName != value; _strFirstName = value; } private Date _datDateofBirth; public Date getDateofBirth() { return _datDateofBirth; } public void setDateofBirth(Date value) { bChanged = bChanged || _datDateofBirth != value; _datDateofBirth = value; } private int _intPatientAge; public int getPatientAge() { return _intPatientAge; } public void setPatientAge(int value) { bChanged = bChanged || _intPatientAge != value; _intPatientAge = value; } private long _idfsHumanAgeType; public long getHumanAgeType() { return _idfsHumanAgeType; } public void setHumanAgeType(long value) { bChanged = bChanged || _idfsHumanAgeType != value; _idfsHumanAgeType = value; } private long _idfsHumanGender; public long getHumanGender() { return _idfsHumanGender; } public void setHumanGender(long value) { bChanged = bChanged || _idfsHumanGender != value; _idfsHumanGender = value; } private long _idfsRegionCurrentResidence; public long getRegionCurrentResidence() { return _idfsRegionCurrentResidence; } public void setRegionCurrentResidence(long value) { bChanged = bChanged || _idfsRegionCurrentResidence != value; _idfsRegionCurrentResidence = value; } private long _idfsRayonCurrentResidence; public long getRayonCurrentResidence() { return _idfsRayonCurrentResidence; } public void setRayonCurrentResidence(long value) { bChanged = bChanged || _idfsRayonCurrentResidence != value; _idfsRayonCurrentResidence = value; } private long _idfsSettlementCurrentResidence; public long getSettlementCurrentResidence() { return _idfsSettlementCurrentResidence; } public void setSettlementCurrentResidence(long value) { bChanged = bChanged || _idfsSettlementCurrentResidence != value; _idfsSettlementCurrentResidence = value; } private String _strBuilding; public String getBuilding() { return _strBuilding; } public void setBuilding(String value) { bChanged = bChanged || _strBuilding != value; _strBuilding = value; } private String _strHouse; public String getHouse() { return _strHouse; } public void setHouse(String value) { bChanged = bChanged || _strHouse != value; _strHouse = value; } private String _strApartment; public String getApartment() { return _strApartment; } public void setApartment(String value) { bChanged = bChanged || _strApartment != value; _strApartment = value; } private String _strStreetName; public String getStreetName() { return _strStreetName; } public void setStreetName(String value) { bChanged = bChanged || _strStreetName != value; _strStreetName = value; } private String _strPostCode; public String getPostCode() { return _strPostCode; } public void setPostCode(String value) { bChanged = bChanged || _strPostCode != value; _strPostCode = value; } private String _strHomePhone; public String getHomePhone() { return _strHomePhone; } public void setHomePhone(String value) { bChanged = bChanged || _strHomePhone != value; _strHomePhone = value; } private Date _datOnSetDate; public Date getOnSetDate() { return _datOnSetDate; } public void setOnSetDate(Date value) { bChanged = bChanged || _datOnSetDate != value; _datOnSetDate = value; } private long _idfsFinalState; public long getFinalState() { return _idfsFinalState; } public void setFinalState(long value) { bChanged = bChanged || _idfsFinalState != value; _idfsFinalState = value; } private long _idfsHospitalizationStatus; public long getHospitalizationStatus() { return _idfsHospitalizationStatus; } public void setHospitalizationStatus(long value) { bChanged = bChanged || _idfsHospitalizationStatus != value; _idfsHospitalizationStatus = value; } private Date _datNotificationDate; public Date getNotificationDate() { return _datNotificationDate; } public void setNotificationDate(Date value) { _datNotificationDate = value; } private String _strSentByOffice; public String getSentByOffice() { return _strSentByOffice; } public void setSentByOffice(String value) { _strSentByOffice = value; } private String _strSentByPerson; public String getSentByPerson() { return _strSentByPerson; } public void setSentByPerson(String value) { _strSentByPerson = value; } private Boolean bChanged; public Boolean getChanged() { return bChanged; } public String TentativeDiagnosis(EidssDatabase db) { List<BaseReference> list = db.Reference(BaseReferenceType.rftDiagnosis, db.getCurrentLanguage(), 2); for(int i = 0; i < list.size(); i++){ if (list.get(i).idfsBaseReference == getTentativeDiagnosis()){ return list.get(i).name; } } return ""; } private HumanCase() { } public static HumanCase CreateNew() { HumanCase ret = new HumanCase(); ret._intStatus = HumanCaseStatus.NEW; ret._uidOfflineCaseID = UUID.randomUUID().toString(); ret._strCaseID = "(new)"; ret._datCreateDate = new Date(); ret.bChanged = true; return ret; } @SuppressWarnings("deprecation") private Date getD() { Date ret; if (_datOnSetDate != null) ret = (Date)_datOnSetDate.clone(); else if (_datNotificationDate != null) ret = (Date)_datNotificationDate.clone(); else ret = (Date)_datCreateDate.clone(); ret.setHours(0); ret.setMinutes(0); ret.setSeconds(0); return ret; } public int CalcPatientAge() { GetDOBandAgeRet ret = GetDOBandAge(); if (ret.result) return ret.intPatientAge; return this._intPatientAge; } public long CalcPatientAgeType() { GetDOBandAgeRet ret = GetDOBandAge(); if (ret.result) return ret.idfsHumanAgeType; return this._idfsHumanAgeType; } class GetDOBandAgeRet { public boolean result; public int intPatientAge; public long idfsHumanAgeType; } private GetDOBandAgeRet GetDOBandAge() { GetDOBandAgeRet ret = new GetDOBandAgeRet(); ret.result = true; double ddAge = -1; Date datUp = null; if (this.getDateofBirth() != null) { datUp = this.getD(); long diff = this.getDateofBirth().getTime() - this.getD().getTime(); ddAge = -diff / (1000L*60L*60L*24L); if (ddAge > -1) { long yyAge = DateHelpers.DateDifference(0, this._datDateofBirth, datUp); if (yyAge > 0) { //'Years ret.intPatientAge = (int)yyAge; ret.idfsHumanAgeType = HumanAgeType.Years; return ret; } else { long mmAge = DateHelpers.DateDifference(1, this._datDateofBirth, datUp); if (mmAge > 0) { //'Months ret.intPatientAge = (int)mmAge; ret.idfsHumanAgeType = HumanAgeType.Month; return ret; } else { //'Days ret.intPatientAge = (int)ddAge; ret.idfsHumanAgeType = HumanAgeType.Days; return ret; } } } } ret.result = false; return ret; } public static HumanCase FromCursor(Cursor cursor) { HumanCase ret = new HumanCase(); Format formatterDateTime = new SimpleDateFormat("yyyy-MM-dd HH.mm.ss", Locale.US); Format formatterDate = new SimpleDateFormat("yyyy-MM-dd", Locale.US); try { ret._id = cursor.getLong(cursor.getColumnIndex("id")); ret._strLastSynError = cursor.getString(cursor.getColumnIndex("strLastSynError")); ret._intStatus = cursor.getInt(cursor.getColumnIndex("intStatus")); String strDate = cursor.getString(cursor.getColumnIndex("datCreateDate")); ret._datCreateDate = (Date)formatterDateTime.parseObject(strDate); ret._uidOfflineCaseID = cursor.getString(cursor.getColumnIndex("uidOfflineCaseID")); ret._strCaseID = cursor.getString(cursor.getColumnIndex("strCaseID")); ret._idfCase = cursor.getLong(cursor.getColumnIndex("idfCase")); ret._strLocalIdentifier = cursor.getString(cursor.getColumnIndex("strLocalIdentifier")); ret._idfsTentativeDiagnosis = cursor.getLong(cursor.getColumnIndex("idfsTentativeDiagnosis")); strDate = cursor.getString(cursor.getColumnIndex("datTentativeDiagnosisDate")); if (strDate != null && strDate != "") ret._datTentativeDiagnosisDate = (Date)formatterDate.parseObject(strDate); ret._strFamilyName = cursor.getString(cursor.getColumnIndex("strFamilyName")); ret._strFirstName = cursor.getString(cursor.getColumnIndex("strFirstName")); strDate = cursor.getString(cursor.getColumnIndex("datDateofBirth")); if (strDate != null && strDate != "") ret._datDateofBirth = (Date)formatterDate.parseObject(strDate); ret._intPatientAge = cursor.getInt(cursor.getColumnIndex("intPatientAge")); ret._idfsHumanAgeType = cursor.getLong(cursor.getColumnIndex("idfsHumanAgeType")); ret._idfsHumanGender = cursor.getLong(cursor.getColumnIndex("idfsHumanGender")); ret._idfsRegionCurrentResidence = cursor.getLong(cursor.getColumnIndex("idfsRegionCurrentResidence")); ret._idfsRayonCurrentResidence = cursor.getLong(cursor.getColumnIndex("idfsRayonCurrentResidence")); ret._idfsSettlementCurrentResidence = cursor.getLong(cursor.getColumnIndex("idfsSettlementCurrentResidence")); ret._strBuilding = cursor.getString(cursor.getColumnIndex("strBuilding")); ret._strHouse = cursor.getString(cursor.getColumnIndex("strHouse")); ret._strApartment = cursor.getString(cursor.getColumnIndex("strApartment")); ret._strStreetName = cursor.getString(cursor.getColumnIndex("strStreetName")); ret._strPostCode = cursor.getString(cursor.getColumnIndex("strPostCode")); ret._strHomePhone = cursor.getString(cursor.getColumnIndex("strHomePhone")); strDate = cursor.getString(cursor.getColumnIndex("datOnSetDate")); if (strDate != null && strDate != "") ret._datOnSetDate = (Date)formatterDate.parseObject(strDate); ret._idfsFinalState = cursor.getLong(cursor.getColumnIndex("idfsFinalState")); ret._idfsHospitalizationStatus = cursor.getLong(cursor.getColumnIndex("idfsHospitalizationStatus")); strDate = cursor.getString(cursor.getColumnIndex("datNotificationDate")); if (strDate != null && strDate != "") ret._datNotificationDate = (Date)formatterDate.parseObject(strDate); ret._strSentByOffice = cursor.getString(cursor.getColumnIndex("strSentByOffice")); ret._strSentByPerson = cursor.getString(cursor.getColumnIndex("strSentByPerson")); ret.bChanged = false; } catch (ParseException e) { e.printStackTrace(); return null; } return ret; } public ContentValues ContentValues() { ContentValues ret = new ContentValues(); Format formatterDateTime = new SimpleDateFormat("yyyy-MM-dd HH.mm.ss", Locale.US); Format formatterDate = new SimpleDateFormat("yyyy-MM-dd", Locale.US); if (_id != 0) ret.put("id", _id); ret.put("strLastSynError", _strLastSynError); ret.put("intStatus", _intStatus); String strDate = formatterDateTime.format(_datCreateDate); ret.put("datCreateDate", strDate); ret.put("uidOfflineCaseID", _uidOfflineCaseID); ret.put("strCaseID", _strCaseID); ret.put("idfCase", _idfCase); ret.put("strLocalIdentifier", _strLocalIdentifier); ret.put("idfsTentativeDiagnosis", _idfsTentativeDiagnosis); strDate = null; if (_datTentativeDiagnosisDate != null) strDate = formatterDate.format(_datTentativeDiagnosisDate); ret.put("datTentativeDiagnosisDate", strDate); ret.put("strFamilyName", _strFamilyName); ret.put("strFirstName", _strFirstName); strDate = null; if (_datDateofBirth != null) strDate = formatterDate.format(_datDateofBirth); ret.put("datDateofBirth", strDate); ret.put("intPatientAge", _intPatientAge); ret.put("idfsHumanAgeType", _idfsHumanAgeType); ret.put("idfsHumanGender", _idfsHumanGender); ret.put("idfsRegionCurrentResidence", _idfsRegionCurrentResidence); ret.put("idfsRayonCurrentResidence", _idfsRayonCurrentResidence); ret.put("idfsSettlementCurrentResidence", _idfsSettlementCurrentResidence); ret.put("strBuilding", _strBuilding); ret.put("strHouse", _strHouse); ret.put("strApartment", _strApartment); ret.put("strStreetName", _strStreetName); ret.put("strPostCode", _strPostCode); ret.put("strHomePhone", _strHomePhone); strDate = null; if (_datOnSetDate != null) strDate = formatterDate.format(_datOnSetDate); ret.put("datOnSetDate", strDate); ret.put("idfsFinalState", _idfsFinalState); ret.put("idfsHospitalizationStatus", _idfsHospitalizationStatus); strDate = null; if (_datNotificationDate != null) strDate = formatterDate.format(_datNotificationDate); ret.put("datNotificationDate", strDate); ret.put("strSentByOffice", _strSentByOffice); ret.put("strSentByPerson", _strSentByPerson); return ret; } public static final Parcelable.Creator<HumanCase> CREATOR = new Parcelable.Creator<HumanCase>() { public HumanCase createFromParcel(Parcel in) { return new HumanCase(in); } public HumanCase[] newArray(int size) { return new HumanCase[size]; } }; public HumanCase(Parcel source) { _id = source.readLong(); _strLastSynError = source.readString(); _intStatus = source.readInt(); _datCreateDate = (Date)source.readSerializable(); _uidOfflineCaseID = source.readString(); _strCaseID = source.readString(); _idfCase = source.readLong(); _strLocalIdentifier = source.readString(); _idfsTentativeDiagnosis = source.readLong(); _datTentativeDiagnosisDate = (Date)source.readSerializable(); _strFamilyName = source.readString(); _strFirstName = source.readString(); _datDateofBirth = (Date)source.readSerializable(); _intPatientAge = source.readInt(); _idfsHumanAgeType = source.readLong(); _idfsHumanGender = source.readLong(); _idfsRegionCurrentResidence = source.readLong(); _idfsRayonCurrentResidence = source.readLong(); _idfsSettlementCurrentResidence = source.readLong(); _strBuilding = source.readString(); _strHouse = source.readString(); _strApartment = source.readString(); _strStreetName = source.readString(); _strPostCode = source.readString(); _strHomePhone = source.readString(); _datOnSetDate = (Date)source.readSerializable(); _idfsFinalState = source.readLong(); _idfsHospitalizationStatus = source.readLong(); _datNotificationDate = (Date)source.readSerializable(); _strSentByOffice = source.readString(); _strSentByPerson = source.readString(); bChanged = false; } @Override public int describeContents() { return 4; } @Override public void writeToParcel(Parcel dest, int arg1) { dest.writeLong(_id); dest.writeString(_strLastSynError); dest.writeInt(_intStatus); dest.writeSerializable(_datCreateDate); dest.writeString(_uidOfflineCaseID); dest.writeString(_strCaseID); dest.writeLong(_idfCase); dest.writeString(_strLocalIdentifier); dest.writeLong(_idfsTentativeDiagnosis); dest.writeSerializable(_datTentativeDiagnosisDate); dest.writeString(_strFamilyName); dest.writeString(_strFirstName); dest.writeSerializable(_datDateofBirth); dest.writeInt(_intPatientAge); dest.writeLong(_idfsHumanAgeType); dest.writeLong(_idfsHumanGender); dest.writeLong(_idfsRegionCurrentResidence); dest.writeLong(_idfsRayonCurrentResidence); dest.writeLong(_idfsSettlementCurrentResidence); dest.writeString(_strBuilding); dest.writeString(_strHouse); dest.writeString(_strApartment); dest.writeString(_strStreetName); dest.writeString(_strPostCode); dest.writeString(_strHomePhone); dest.writeSerializable(_datOnSetDate); dest.writeLong(_idfsFinalState); dest.writeLong(_idfsHospitalizationStatus); dest.writeSerializable(_datNotificationDate); dest.writeString(_strSentByOffice); dest.writeString(_strSentByPerson); } }
#!/usr/bin/env bash # Source in common metadata functions script_dir="$(dirname "${BASH_SOURCE[0]}")" # shellcheck source=metadata/templates/common.sh source "$script_dir/templates/common.sh" if ! command -v bzr > /dev/null; then exit "$DETECTION_NOT_AVAILABLE" fi exit "$DETECTION_SUCCESS" # vim: syntax=sh cc=80 tw=79 ts=4 sw=4 sts=4 et sr
#!/usr/bin/env bash # # Format the source code. # Usage string function usage() { scriptname=$(basename "$0") echo "$scriptname - source formatting utility" echo "usage: $scriptname [options]" echo " options:" echo " -v, --verbose Produce verbose output" echo " -h, --help Display this help message" } # Command-line arguments verbose=false while [ "$1" != "" ]; do param=$(echo "$1" | awk -F= '{print $1}') # value=$(echo "$1" | awk -F= '{print $2}') case $param in -v | --verbose) verbose=true ;; -h | --help) usage exit ;; *) echo "ERROR: unknown parameter '$param'" usage exit 1 ;; esac shift done # Install tools if necessary declare -ra tools=('cabal-fmt' 'ormolu') for tool_name in "${tools[@]}" do if ! stack exec which "$tool_name" > /dev/null then echo "Required tool $tool_name was not found; installing it using stack" stack build --copy-compiler-tool "$tool_name" fi done # Find cabal files declare -a cabal_files while IFS= read -r line; do cabal_files+=("$line") done < <(find . -name '*.cabal') readonly cabal_files # Find Haskell files declare -a haskell_files while IFS= read -r line; do haskell_files+=("$line") done < <(find . -name '*.hs' | grep --invert-match '\.stack-work') readonly haskell_files # Format cabal files if $verbose; then echo "Formatting cabal files:" for name in "${cabal_files[@]}"; do echo " $name" done fi stack exec cabal-fmt -- --tabular -i "${cabal_files[@]}" # Format Haskell files if $verbose; then echo "Formatting Haskell files:" for name in "${haskell_files[@]}"; do echo " $name" done fi stack exec ormolu -- --mode=inplace "${haskell_files[@]}"
<gh_stars>1000+ import { reaction, autorun, isObservable, configure } from "mobx" import { types, getSnapshot, applySnapshot, onPatch, applyPatch, unprotect, detach, resolveIdentifier, getRoot, cast, SnapshotOut, IAnyModelType, Instance, SnapshotOrInstance, isAlive, destroy, castToReferenceSnapshot, tryReference, isValidReference, isStateTreeNode, addDisposer } from "../../src" test("it should support prefixed paths in maps", () => { const User = types.model({ id: types.identifier, name: types.string }) const UserStore = types.model({ user: types.reference(User), users: types.map(User) }) const store = UserStore.create({ user: "17", users: { "17": { id: "17", name: "Michel" }, "18": { id: "18", name: "Veria" } } }) unprotect(store) expect(store.users.get("17")!.name).toBe("Michel") expect(store.users.get("18")!.name).toBe("Veria") expect(store.user.name).toBe("Michel") store.user = store.users.get("18")! expect(store.user.name).toBe("Veria") store.users.get("18")!.name = "Noa" expect(store.user.name).toBe("Noa") expect(getSnapshot(store)).toEqual({ user: "18", users: { "17": { id: "17", name: "Michel" }, "18": { id: "18", name: "Noa" } } } as SnapshotOut<typeof store>) }) test("it should support prefixed paths in arrays", () => { const User = types.model({ id: types.identifier, name: types.string }) const UserStore = types.model({ user: types.reference(User), users: types.array(User) }) const store = UserStore.create({ user: "17", users: [ { id: "17", name: "Michel" }, { id: "18", name: "Veria" } ] }) unprotect(store) expect(store.users[0].name).toBe("Michel") expect(store.users[1].name).toBe("Veria") expect(store.user.name).toBe("Michel") store.user = store.users[1] expect(store.user.name).toBe("Veria") store.users[1].name = "Noa" expect(store.user.name).toBe("Noa") expect(getSnapshot(store)).toEqual({ user: "18", users: [ { id: "17", name: "Michel" }, { id: "18", name: "Noa" } ] } as SnapshotOut<typeof store>) }) if (process.env.NODE_ENV !== "production") { test("identifiers are required", () => { const Todo = types.model({ id: types.identifier }) expect(Todo.is({})).toBe(false) expect(Todo.is({ id: "x" })).toBe(true) expect(() => (Todo.create as any)()).toThrowError( " `undefined` is not assignable to type: `identifier` (Value is not a valid identifier, expected a string)" ) }) test("identifiers cannot be modified", () => { const Todo = types.model({ id: types.identifier }) const todo = Todo.create({ id: "x" }) unprotect(todo) expect(() => (todo.id = "stuff")).toThrowError( "[mobx-state-tree] Tried to change identifier from 'x' to 'stuff'. Changing identifiers is not allowed." ) expect(() => applySnapshot(todo, { id: "stuff" })).toThrowError( "[mobx-state-tree] Tried to change identifier from 'x' to 'stuff'. Changing identifiers is not allowed." ) }) } test("it should resolve refs during creation, when using path", () => { const values: number[] = [] const Book = types.model({ id: types.identifier, price: types.number }) const BookEntry = types .model({ book: types.reference(Book) }) .views((self) => ({ get price() { return self.book.price * 2 } })) const Store = types.model({ books: types.array(Book), entries: types.optional(types.array(BookEntry), []) }) const s = Store.create({ books: [{ id: "3", price: 2 }] }) unprotect(s) reaction( () => s.entries.reduce((a, e) => a + e.price, 0), (v) => values.push(v) ) s.entries.push({ book: castToReferenceSnapshot(s.books[0]) }) expect(s.entries[0].price).toBe(4) expect(s.entries.reduce((a, e) => a + e.price, 0)).toBe(4) const entry = BookEntry.create({ book: castToReferenceSnapshot(s.books[0]) }) // N.B. ref is initially not resolvable! s.entries.push(entry) expect(s.entries[1].price).toBe(4) expect(s.entries.reduce((a, e) => a + e.price, 0)).toBe(8) expect(values).toEqual([4, 8]) }) test("it should resolve refs over late types", () => { const Book = types.model({ id: types.identifier, price: types.number }) const BookEntry = types .model({ book: types.reference(types.late(() => Book)) }) .views((self) => ({ get price() { return self.book.price * 2 } })) const Store = types.model({ books: types.array(Book), entries: types.array(BookEntry) }) const s = Store.create({ books: [{ id: "3", price: 2 }] }) unprotect(s) s.entries.push({ book: castToReferenceSnapshot(s.books[0]) }) expect(s.entries[0].price).toBe(4) expect(s.entries.reduce((a, e) => a + e.price, 0)).toBe(4) }) test("it should resolve refs during creation, when using generic reference", () => { const values: number[] = [] const Book = types.model({ id: types.identifier, price: types.number }) const BookEntry = types .model({ book: types.reference(Book) }) .views((self) => ({ get price() { return self.book.price * 2 } })) const Store = types.model({ books: types.array(Book), entries: types.optional(types.array(BookEntry), []) }) const s = Store.create({ books: [{ id: "3", price: 2 }] }) unprotect(s) reaction( () => s.entries.reduce((a, e) => a + e.price, 0), (v) => values.push(v) ) s.entries.push({ book: castToReferenceSnapshot(s.books[0]) }) expect(s.entries[0].price).toBe(4) expect(s.entries.reduce((a, e) => a + e.price, 0)).toBe(4) const entry = BookEntry.create({ book: castToReferenceSnapshot(s.books[0]) }) // can refer to book, even when not part of tree yet expect(getSnapshot(entry)).toEqual({ book: "3" }) s.entries.push(entry) expect(values).toEqual([4, 8]) }) test("identifiers should support subtypes of types.string and types.number", () => { const M = types.model({ id: types.refinement(types.identifierNumber, (n) => n > 5) }) expect(M.is({})).toBe(false) expect(M.is({ id: "test" })).toBe(false) expect(M.is({ id: "6" })).toBe(false) expect(M.is({ id: "4" })).toBe(false) expect(M.is({ id: 6 })).toBe(true) expect(M.is({ id: 4 })).toBe(false) const S = types.model({ mies: types.map(M), ref: types.reference(M) }) const s = S.create({ mies: { "7": { id: 7 } }, ref: "7" }) expect(s.mies.get("7")).toBeTruthy() expect(s.ref).toBe(s.mies.get("7")) }) test("string identifiers should not accept numbers", () => { const F = types.model({ id: types.identifier }) expect(F.is({ id: "4" })).toBe(true) expect(F.is({ id: 4 })).toBe(false) const F2 = types.model({ id: types.identifier }) expect(F2.is({ id: "4" })).toBe(true) expect(F2.is({ id: 4 })).toBe(false) }) test("122 - identifiers should support numbers as well", () => { const F = types.model({ id: types.identifierNumber }) expect( F.create({ id: 3 }).id ).toBe(3) expect(F.is({ id: 4 })).toBe(true) expect(F.is({ id: "4" })).toBe(false) expect(F.is({ id: "bla" })).toBe(false) }) test("self reference with a late type", () => { const Book = types.model("Book", { id: types.identifier, genre: types.string, reference: types.reference(types.late((): IAnyModelType => Book)) }) const Store = types .model("Store", { books: types.array(Book) }) .actions((self) => { function addBook(book: SnapshotOrInstance<typeof Book>) { self.books.push(book) } return { addBook } }) const s = Store.create({ books: [{ id: "1", genre: "thriller", reference: "" }] }) const book2 = Book.create({ id: "2", genre: "romance", reference: castToReferenceSnapshot(s.books[0]) }) s.addBook(book2) expect((s.books[1].reference as Instance<typeof Book>).genre).toBe("thriller") }) test("when applying a snapshot, reference should resolve correctly if value added after", () => { const Box = types.model({ id: types.identifierNumber, name: types.string }) const Factory = types.model({ selected: types.reference(Box), boxes: types.array(Box) }) expect(() => Factory.create({ selected: 1, boxes: [ { id: 1, name: "hello" }, { id: 2, name: "world" } ] }) ).not.toThrow() }) test("it should fail when reference snapshot is ambiguous", () => { const Box = types.model("Box", { id: types.identifierNumber, name: types.string }) const Arrow = types.model("Arrow", { id: types.identifierNumber, name: types.string }) const BoxOrArrow = types.union(Box, Arrow) const Factory = types.model({ selected: types.reference(BoxOrArrow), boxes: types.array(Box), arrows: types.array(Arrow) }) const store = Factory.create({ selected: 2, boxes: [ { id: 1, name: "hello" }, { id: 2, name: "world" } ], arrows: [{ id: 2, name: "arrow" }] }) expect(() => { // tslint:disable-next-line:no-unused-expression store.selected // store.boxes[1] // throws because it can't know if you mean a box or an arrow! }).toThrowError( "[mobx-state-tree] Cannot resolve a reference to type '(Box | Arrow)' with id: '2' unambigously, there are multiple candidates: /boxes/1, /arrows/0" ) unprotect(store) // first update the reference, than create a new matching item! Ref becomes ambigous now... store.selected = 1 as any // valid assignment expect(store.selected).toBe(store.boxes[0]) // unambigous identifier let err!: Error autorun(() => store.selected, { onError(e) { err = e } }) expect(store.selected).toBe(store.boxes[0]) // unambigous identifier store.arrows.push({ id: 1, name: "oops" }) expect(err.message).toBe( "[mobx-state-tree] Cannot resolve a reference to type '(Box | Arrow)' with id: '1' unambigously, there are multiple candidates: /boxes/0, /arrows/1" ) }) test("it should support array of references", () => { const Box = types.model({ id: types.identifierNumber, name: types.string }) const Factory = types.model({ selected: types.array(types.reference(Box)), boxes: types.array(Box) }) const store = Factory.create({ selected: [], boxes: [ { id: 1, name: "hello" }, { id: 2, name: "world" } ] }) unprotect(store) expect(() => { store.selected.push(store.boxes[0]) }).not.toThrow() expect(getSnapshot(store.selected)).toEqual([1]) expect(() => { store.selected.push(store.boxes[1]) }).not.toThrow() expect(getSnapshot(store.selected)).toEqual([1, 2]) }) test("it should restore array of references from snapshot", () => { const Box = types.model({ id: types.identifierNumber, name: types.string }) const Factory = types.model({ selected: types.array(types.reference(Box)), boxes: types.array(Box) }) const store = Factory.create({ selected: [1, 2], boxes: [ { id: 1, name: "hello" }, { id: 2, name: "world" } ] }) unprotect(store) expect(store.selected[0] === store.boxes[0]).toEqual(true) expect(store.selected[1] === store.boxes[1]).toEqual(true) }) test("it should support map of references", () => { const Box = types.model({ id: types.identifierNumber, name: types.string }) const Factory = types.model({ selected: types.map(types.reference(Box)), boxes: types.array(Box) }) const store = Factory.create({ selected: {}, boxes: [ { id: 1, name: "hello" }, { id: 2, name: "world" } ] }) unprotect(store) expect(() => { store.selected.set("from", store.boxes[0]) }).not.toThrow() expect(getSnapshot(store.selected)).toEqual({ from: 1 }) expect(() => { store.selected.set("to", store.boxes[1]) }).not.toThrow() expect(getSnapshot(store.selected)).toEqual({ from: 1, to: 2 }) }) test("it should restore map of references from snapshot", () => { const Box = types.model({ id: types.identifierNumber, name: types.string }) const Factory = types.model({ selected: types.map(types.reference(Box)), boxes: types.array(Box) }) const store = Factory.create({ selected: { from: 1, to: 2 }, boxes: [ { id: 1, name: "hello" }, { id: 2, name: "world" } ] }) unprotect(store) expect(store.selected.get("from") === store.boxes[0]).toEqual(true) expect(store.selected.get("to") === store.boxes[1]).toEqual(true) }) test("it should support relative lookups", () => { const Node = types.model({ id: types.identifierNumber, children: types.optional(types.array(types.late((): IAnyModelType => Node)), []) }) const root = Node.create({ id: 1, children: [ { id: 2, children: [ { id: 4 } ] }, { id: 3 } ] }) unprotect(root) expect(getSnapshot(root)).toEqual({ id: 1, children: [ { id: 2, children: [{ id: 4, children: [] }] }, { id: 3, children: [] } ] }) expect(resolveIdentifier(Node, root, 1)).toBe(root) expect(resolveIdentifier(Node, root, 4)).toBe(root.children[0].children[0]) expect(resolveIdentifier(Node, root.children[0].children[0], 3)).toBe(root.children[1]) const n2 = detach(root.children[0]) unprotect(n2) expect(resolveIdentifier(Node, n2, 2)).toBe(n2) expect(resolveIdentifier(Node, root, 2)).toBe(undefined) expect(resolveIdentifier(Node, root, 4)).toBe(undefined) expect(resolveIdentifier(Node, n2, 3)).toBe(undefined) expect(resolveIdentifier(Node, n2, 4)).toBe(n2.children[0]) expect(resolveIdentifier(Node, n2.children[0], 2)).toBe(n2) const n5 = Node.create({ id: 5 }) expect(resolveIdentifier(Node, n5, 4)).toBe(undefined) n2.children.push(n5) expect(resolveIdentifier(Node, n5, 4)).toBe(n2.children[0]) expect(resolveIdentifier(Node, n2.children[0], 5)).toBe(n5) }) test("References are non-nullable by default", () => { const Todo = types.model({ id: types.identifierNumber }) const Store = types.model({ todo: types.maybe(Todo), ref: types.reference(Todo), maybeRef: types.maybe(types.reference(Todo)) }) expect(Store.is({})).toBe(false) expect(Store.is({ ref: 3 })).toBe(true) expect(Store.is({ ref: null })).toBe(false) expect(Store.is({ ref: undefined })).toBe(false) expect(Store.is({ ref: 3, maybeRef: 3 })).toBe(true) expect(Store.is({ ref: 3, maybeRef: undefined })).toBe(true) let store = Store.create({ todo: { id: 3 }, ref: 3 }) expect(store.ref).toBe(store.todo) expect(store.maybeRef).toBe(undefined) store = Store.create({ todo: { id: 3 }, ref: 4 }) unprotect(store) if (process.env.NODE_ENV !== "production") { expect(store.maybeRef).toBe(undefined) expect(() => store.ref).toThrow( "[mobx-state-tree] Failed to resolve reference '4' to type 'AnonymousModel' (from node: /ref)" ) store.maybeRef = 3 as any // valid assignment expect(store.maybeRef).toBe(store.todo) store.maybeRef = 4 as any // valid assignment expect(() => store.maybeRef).toThrow( "[mobx-state-tree] Failed to resolve reference '4' to type 'AnonymousModel' (from node: /maybeRef)" ) store.maybeRef = undefined expect(store.maybeRef).toBe(undefined) expect(() => ((store as any).ref = undefined)).toThrow(/Error while converting/) } }) test("References are described properly", () => { const Todo = types.model({ id: types.identifierNumber }) const Store = types.model({ todo: types.maybe(Todo), ref: types.reference(Todo), maybeRef: types.maybe(types.reference(Todo)) }) expect(Store.describe()).toBe( "{ todo: ({ id: identifierNumber } | undefined?); ref: reference(AnonymousModel); maybeRef: (reference(AnonymousModel) | undefined?) }" ) }) test("References in recursive structures", () => { const Folder = types.model("Folder", { id: types.identifierNumber, name: types.string, files: types.array(types.string) }) const Tree = types .model("Tree", { // sadly, this becomes any, and further untypeable... children: types.array(types.late((): IAnyModelType => Tree)), data: types.maybeNull(types.reference(Folder)) }) .actions((self) => { function addFolder(data: SnapshotOrInstance<typeof Folder>) { const folder3 = Folder.create(data) getRoot<typeof Storage>(self).putFolderHelper(folder3) self.children.push( Tree.create({ data: castToReferenceSnapshot(folder3), children: [] }) ) } return { addFolder } }) const Storage = types .model("Storage", { objects: types.map(Folder), tree: Tree }) .actions((self) => ({ putFolderHelper(aFolder: SnapshotOrInstance<typeof Folder>) { self.objects.put(aFolder) } })) const store = Storage.create({ objects: {}, tree: { children: [], data: null } }) const folder = { id: 1, name: "Folder 1", files: ["a.jpg", "b.jpg"] } store.tree.addFolder(folder) expect(getSnapshot(store)).toEqual({ objects: { "1": { files: ["a.jpg", "b.jpg"], id: 1, name: "Folder 1" } }, tree: { children: [ { children: [], data: 1 } ], data: null } }) expect(store.objects.get("1")).toBe(store.tree.children[0].data) const folder2 = { id: 2, name: "Folder 2", files: ["c.jpg", "d.jpg"] } store.tree.children[0].addFolder(folder2) expect(getSnapshot(store)).toEqual({ objects: { "1": { files: ["a.jpg", "b.jpg"], id: 1, name: "Folder 1" }, "2": { files: ["c.jpg", "d.jpg"], id: 2, name: "Folder 2" } }, tree: { children: [ { children: [ { children: [], data: 2 } ], data: 1 } ], data: null } }) expect(store.objects.get("1")).toBe(store.tree.children[0].data) expect(store.objects.get("2")).toBe(store.tree.children[0].children[0].data) }) test("it should applyPatch references in array", () => { const Item = types.model("Item", { id: types.identifier, name: types.string }) const Folder = types .model("Folder", { id: types.identifier, objects: types.map(Item), hovers: types.array(types.reference(Item)) }) .actions((self) => { function addObject(anItem: typeof Item.Type) { self.objects.put(anItem) } function addHover(anItem: typeof Item.Type) { self.hovers.push(anItem) } function removeHover(anItem: typeof Item.Type) { self.hovers.remove(anItem) } return { addObject, addHover, removeHover } }) const folder = Folder.create({ id: "folder 1", objects: {}, hovers: [] }) folder.addObject({ id: "item 1", name: "item name 1" }) const item = folder.objects.get("item 1")! const snapshot = getSnapshot(folder) const newStore = Folder.create(snapshot) onPatch(folder, (data) => { applyPatch(newStore, data) }) folder.addHover(item) expect(getSnapshot(newStore)).toEqual({ id: "folder 1", objects: { "item 1": { id: "item 1", name: "item name 1" } }, hovers: ["item 1"] }) folder.removeHover(item) expect(getSnapshot(newStore)).toEqual({ id: "folder 1", objects: { "item 1": { id: "item 1", name: "item name 1" } }, hovers: [] }) }) test("it should applySnapshot references in array", () => { const Item = types.model("Item", { id: types.identifier, name: types.string }) const Folder = types.model("Folder", { id: types.identifier, objects: types.map(Item), hovers: types.array(types.reference(Item)) }) const folder = Folder.create({ id: "folder 1", objects: { "item 1": { id: "item 1", name: "item name 1" } }, hovers: ["item 1"] }) const snapshot = JSON.parse(JSON.stringify(getSnapshot(folder))) expect(snapshot).toEqual({ id: "folder 1", objects: { "item 1": { id: "item 1", name: "item name 1" } }, hovers: ["item 1"] }) snapshot.hovers = [] applySnapshot(folder, snapshot) expect(getSnapshot(folder)).toEqual({ id: "folder 1", objects: { "item 1": { id: "item 1", name: "item name 1" } }, hovers: [] }) snapshot.hovers = ["item 1"] applySnapshot(folder, snapshot) expect(getSnapshot(folder)).toEqual({ id: "folder 1", objects: { "item 1": { id: "item 1", name: "item name 1" } }, hovers: ["item 1"] }) }) test("array of references should work fine", () => { const B = types.model("Block", { id: types.identifier }) const S = types .model("Store", { blocks: types.array(B), blockRefs: types.array(types.reference(B)) }) .actions((self) => { return { order() { const res = self.blockRefs.slice() self.blockRefs.replace([res[1], res[0]]) } } }) const a = S.create({ blocks: [{ id: "1" }, { id: "2" }], blockRefs: ["1", "2"] }) a.order() expect(a.blocks[0].id).toBe("1") expect(a.blockRefs[0].id).toBe("2") }) test("should serialize references correctly", () => { const M = types.model({ id: types.identifierNumber }) const S = types.model({ mies: types.map(M), ref: types.maybe(types.reference(M)) }) const s = S.create({ mies: { 7: { id: 7 } } }) unprotect(s) expect(Array.from(s.mies.keys())).toEqual(["7"]) expect(s.mies.get("7")!.id).toBe(7) expect(s.mies.get(7 as any)).toBe(s.mies.get("7")) // maps automatically normalizes the key s.mies.put({ id: 8 }) expect(Array.from(s.mies.keys())).toEqual(["7", "8"]) s.ref = 8 as any expect(s.ref!.id).toBe(8) // resolved from number expect(getSnapshot(s).ref).toBe(8) // ref serialized as number s.ref = "7" as any // resolved from string expect(s.ref!.id).toBe(7) // resolved from string expect(getSnapshot(s).ref).toBe("7") // ref serialized as string (number would be ok as well) s.ref = s.mies.get("8")! expect(s.ref.id).toBe(8) // resolved from instance expect(getSnapshot(s).ref).toBe(8) // ref serialized as number s.ref = "9" as any // unresolvable expect(getSnapshot(s).ref).toBe("9") // snapshot preserved as it was unresolvable s.mies.set(9 as any, { id: 9 }) expect(Array.from(s.mies.keys())).toEqual(["7", "8", "9"]) expect(s.mies.get("9")!.id).toBe(9) expect(getSnapshot(s).ref).toBe("9") // ref serialized as string (number would be ok as well) }) test("#1052 - Reference returns destroyed model after subtree replacing", () => { const Todo = types.model("Todo", { id: types.identifierNumber, title: types.string }) const Todos = types.model("Todos", { items: types.array(Todo) }) const Store = types .model("Store", { todos: Todos, last: types.maybe(types.reference(Todo)), lastWithId: types.maybe(types.reference(Todo)), counter: -1 }) .actions((self) => ({ load() { self.counter++ self.todos = Todos.create({ items: [ { id: 1, title: "Get Coffee " + self.counter }, { id: 2, title: "Write simpler code " + self.counter } ] }) }, select(todo: Instance<typeof Todo>) { self.last = todo self.lastWithId = todo.id as any } })) const store = Store.create({ todos: {} }) store.load() expect(store.last).toBe(undefined) expect(store.lastWithId).toBe(undefined) const reactionFn = jest.fn() const reactionDisposer = reaction(() => store.last, reactionFn) const reactionFn2 = jest.fn() const reactionDisposer2 = reaction(() => store.lastWithId, reactionFn2) try { store.select(store.todos.items[0]) expect(isAlive(store.last!)).toBe(true) expect(isObservable(store.last)).toBe(true) expect(reactionFn).toHaveBeenCalledTimes(1) expect(store.last!.title).toBe("Get Coffee 0") expect(isAlive(store.lastWithId!)).toBe(true) expect(isObservable(store.lastWithId)).toBe(true) expect(reactionFn2).toHaveBeenCalledTimes(1) expect(store.lastWithId!.title).toBe("Get Coffee 0") store.load() expect(isAlive(store.last!)).toBe(true) expect(isObservable(store.last)).toBe(true) expect(reactionFn).toHaveBeenCalledTimes(2) expect(store.last!.title).toBe("Get Coffee 1") expect(isAlive(store.lastWithId!)).toBe(true) expect(isObservable(store.lastWithId)).toBe(true) expect(reactionFn2).toHaveBeenCalledTimes(2) expect(store.lastWithId!.title).toBe("Get Coffee 1") } finally { reactionDisposer() reactionDisposer2() } }) test("#1080 - does not crash trying to resolve a reference to a destroyed+recreated model", () => { configure({ useProxies: "never" }) const Branch = types.model("Branch", { id: types.identifierNumber, name: types.string }) const User = types.model("User", { id: types.identifierNumber, email: types.maybeNull(types.string), branches: types.maybeNull(types.array(Branch)) }) const BranchStore = types .model("BranchStore", { activeBranch: types.maybeNull(types.reference(Branch)) }) .actions((self) => ({ setActiveBranch(branchId: any) { self.activeBranch = branchId } })) const RootStore = types .model("RootStore", { user: types.maybeNull(User), branchStore: types.maybeNull(BranchStore) }) .actions((self) => ({ setUser(snapshot: typeof userSnapshot) { self.user = cast(snapshot) }, setBranchStore(snapshot: typeof branchStoreSnapshot) { self.branchStore = cast(snapshot) }, destroyUser() { destroy(self.user!) }, destroyBranchStore() { destroy(self.branchStore!) } })) const userSnapshot = { id: 1, email: "<EMAIL>", branches: [ { id: 1, name: "Branch 1" }, { id: 2, name: "Branch 2" } ] } const branchStoreSnapshot = {} const rootStore = RootStore.create({ user: userSnapshot, branchStore: branchStoreSnapshot }) rootStore.branchStore!.setActiveBranch(1) expect(rootStore.branchStore!.activeBranch).toEqual({ id: 1, name: "Branch 1" }) rootStore.destroyUser() rootStore.destroyBranchStore() rootStore.setUser(userSnapshot) rootStore.setBranchStore(branchStoreSnapshot) rootStore.branchStore!.setActiveBranch(2) expect(rootStore.branchStore!.activeBranch).toEqual({ id: 2, name: "Branch 2" }) }) test("tryReference / isValidReference", () => { const Todo = types.model({ id: types.identifier }) const TodoStore = types .model({ todos: types.array(Todo), ref1: types.maybe(types.reference(Todo)), ref2: types.maybeNull(types.reference(Todo)), ref3: types.maybe(types.reference(Todo)) }) .actions((self) => ({ clearRef3() { self.ref3 = undefined }, afterCreate() { addDisposer( self, reaction( () => isValidReference(() => self.ref3), (valid) => { if (!valid) { this.clearRef3() } }, { fireImmediately: true } ) ) } })) const store = TodoStore.create({ todos: [{ id: "1" }, { id: "2" }, { id: "3" }] }) expect(tryReference(() => store.ref1)).toBeUndefined() expect(tryReference(() => store.ref2)).toBeUndefined() expect(isValidReference(() => store.ref1)).toBe(false) expect(isValidReference(() => store.ref2)).toBe(false) unprotect(store) store.ref1 = store.todos[0] store.ref2 = store.todos[1] store.ref3 = store.todos[2] expect(isStateTreeNode(store.ref1)).toBe(true) expect(isStateTreeNode(store.ref2)).toBe(true) expect(tryReference(() => store.ref1)).toBeDefined() expect(tryReference(() => store.ref2)).toBeDefined() expect(isValidReference(() => store.ref1)).toBe(true) expect(isValidReference(() => store.ref2)).toBe(true) store.todos = cast([]) expect(tryReference(() => store.ref1)).toBeUndefined() expect(tryReference(() => store.ref2)).toBeUndefined() expect(isValidReference(() => store.ref1)).toBe(false) expect(isValidReference(() => store.ref2)).toBe(false) // the reaction should have triggered and set this to undefined expect(store.ref3).toBe(undefined) expect(() => tryReference(() => 5 as any)).toThrowError( "The reference to be checked is not one of node, null or undefined" ) expect(() => isValidReference(() => 5 as any)).toThrowError( "The reference to be checked is not one of node, null or undefined" ) }) test("#1162 - reference to union", () => { const M1 = types.model({ id: types.identifier, type: types.string, sum: types.string }) const M2 = types.model({ id: types.identifier, type: types.string, data: types.string }) const AnyModel = types.union( { dispatcher(snapshot) { switch (snapshot.type) { case "type1": return M1 case "type2": return M2 default: throw new Error() } } }, M1, M2 ) const Store = types.model({ arr: types.array(AnyModel), selected: types.reference(AnyModel) }) const s = Store.create({ selected: "num1", arr: [ { id: "num1", type: "type1", sum: "1" }, { id: "num2", type: "type1", sum: "2" }, { id: "num3", type: "type2", data: "3" } ] }) unprotect(s) expect(s.selected.id).toBe("num1") expect(s.selected.type).toBe("type1") expect((s.selected as Instance<typeof M1>).sum).toBe("1") s.selected = "num2" as any expect(s.selected.id).toBe("num2") expect(s.selected.type).toBe("type1") expect((s.selected as Instance<typeof M1>).sum).toBe("2") s.selected = "num3" as any expect(s.selected.id).toBe("num3") expect(s.selected.type).toBe("type2") expect((s.selected as Instance<typeof M2>).data).toBe("3") })
<filename>tests/unit/utils/warnings_test.py # -*- coding: utf-8 -*- ''' :codeauthor: :email:`<NAME> (<EMAIL>)` :copyright: © 2013 by the SaltStack Team, see AUTHORS for more details :license: Apache 2.0, see LICENSE for more details. tests.unit.utils.warnings_test ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Test ``salt.utils.warn_until`` and ``salt.utils.kwargs_warn_until`` ''' # Import python libs import sys import warnings # Import Salt Testing libs from salttesting import TestCase from salttesting.helpers import ensure_in_syspath ensure_in_syspath('../../') # Import salt libs from salt.utils import warn_until, kwargs_warn_until from salt.version import SaltStackVersion class WarnUntilTestCase(TestCase): def test_warn_until_warning_raised(self): # We *always* want *all* warnings thrown on this module warnings.filterwarnings('always', '', DeprecationWarning, __name__) def raise_warning(_version_info_=(0, 16, 0)): warn_until( (0, 17), 'Deprecation Message!', _version_info_=_version_info_ ) def raise_named_version_warning(_version_info_=(0, 16, 0)): warn_until( 'Hydrogen', 'Deprecation Message!', _version_info_=_version_info_ ) # raise_warning should show warning until version info is >= (0, 17) with warnings.catch_warnings(record=True) as recorded_warnings: raise_warning() self.assertEqual( 'Deprecation Message!', str(recorded_warnings[0].message) ) # raise_warning should show warning until version info is >= (0, 17) with warnings.catch_warnings(record=True) as recorded_warnings: raise_named_version_warning() self.assertEqual( 'Deprecation Message!', str(recorded_warnings[0].message) ) # the deprecation warning is not issued because we passed # _dont_call_warning with warnings.catch_warnings(record=True) as recorded_warnings: warn_until( (0, 17), 'Foo', _dont_call_warnings=True, _version_info_=(0, 16) ) self.assertEqual(0, len(recorded_warnings)) # Let's set version info to (0, 17), a RuntimeError should be raised with self.assertRaisesRegexp( RuntimeError, r'The warning triggered on filename \'(.*)warnings_test.py\', ' r'line number ([\d]+), is supposed to be shown until version ' r'\'0.17.0\' is released. Current version is now \'0.17.0\'. ' r'Please remove the warning.'): raise_warning(_version_info_=(0, 17, 0)) # Let's set version info to (0, 17), a RuntimeError should be raised with self.assertRaisesRegexp( RuntimeError, r'The warning triggered on filename \'(.*)warnings_test.py\', ' r'line number ([\d]+), is supposed to be shown until version ' r'\'Hydrogen((.*))\' is released. Current version is now ' r'\'([\d.]+)\'. Please remove the warning.'): raise_named_version_warning(_version_info_=(sys.maxint, 16, 0)) # Even though we're calling warn_until, we pass _dont_call_warnings # because we're only after the RuntimeError with self.assertRaisesRegexp( RuntimeError, r'The warning triggered on filename \'(.*)warnings_test.py\', ' r'line number ([\d]+), is supposed to be shown until version ' r'\'0.17.0\' is released. Current version is now \'0.17.0\'. ' r'Please remove the warning.'): warn_until( (0, 17), 'Foo', _dont_call_warnings=True ) with self.assertRaisesRegexp( RuntimeError, r'The warning triggered on filename \'(.*)warnings_test.py\', ' r'line number ([\d]+), is supposed to be shown until version ' r'\'Hydrogen((.*))\' is released. Current version is now ' r'\'([\d.]+)\'. Please remove the warning.'): warn_until( 'Hydrogen', 'Foo', _dont_call_warnings=True, _version_info_=(sys.maxint, 16, 0) ) # version on the deprecation message gets properly formatted with warnings.catch_warnings(record=True) as recorded_warnings: vrs = SaltStackVersion.from_name('Helium') warn_until( 'Helium', 'Deprecation Message until {version}!', _version_info_=(vrs.major - 1, 0) ) self.assertEqual( 'Deprecation Message until {0}!'.format(vrs.formatted_version), str(recorded_warnings[0].message) ) def test_kwargs_warn_until_warning_raised(self): # We *always* want *all* warnings thrown on this module warnings.filterwarnings('always', '', DeprecationWarning, __name__) def raise_warning(**kwargs): _version_info_ = kwargs.pop('_version_info_', (0, 16, 0)) kwargs_warn_until( kwargs, (0, 17), _version_info_=_version_info_ ) # raise_warning({...}) should show warning until version info is >= (0, 17) with warnings.catch_warnings(record=True) as recorded_warnings: raise_warning(foo=42) # with a kwarg self.assertEqual( 'The following parameter(s) have been deprecated and ' 'will be removed in \'0.17.0\': \'foo\'.', str(recorded_warnings[0].message) ) # With no **kwargs, should not show warning until version info is >= (0, 17) with warnings.catch_warnings(record=True) as recorded_warnings: kwargs_warn_until( {}, # no kwargs (0, 17), _version_info_=(0, 16, 0) ) self.assertEqual(0, len(recorded_warnings)) # Let's set version info to (0, 17), a RuntimeError should be raised # regardless of whether or not we pass any **kwargs. with self.assertRaisesRegexp( RuntimeError, r'The warning triggered on filename \'(.*)warnings_test.py\', ' r'line number ([\d]+), is supposed to be shown until version ' r'\'0.17.0\' is released. Current version is now \'0.17.0\'. ' r'Please remove the warning.'): raise_warning(_version_info_=(0, 17)) # no kwargs with self.assertRaisesRegexp( RuntimeError, r'The warning triggered on filename \'(.*)warnings_test.py\', ' r'line number ([\d]+), is supposed to be shown until version ' r'\'0.17.0\' is released. Current version is now \'0.17.0\'. ' r'Please remove the warning.'): raise_warning(bar='baz', qux='quux', _version_info_=(0, 17)) # some kwargs if __name__ == '__main__': from integration import run_tests run_tests(WarnUntilTestCase, needs_daemon=False)
const MetroColors = Object.freeze({ "1호선": "#0D3692", "2호선": "#33A23D", "3호선": "#FE5B10", "4호선": "#32A1C8", "5호선": "#8B50A4", "6호선": "#C55C1D", "7호선": "#54640D", "8호선": "#F51361", "9호선": "#AA9872" }); class Random { static generate() { return d3.randomUniform()(); } static rangeInt(min, max) { return Math.floor(d3.randomUniform(min, max)()); } static range(min, max) { return d3.randomUniform(min, max)(); } } class Index { static convert1DTo2D(index, width) { return [parseInt(index % width), parseInt(index / width)] } static convert2Dto1D(x, y, width) { return y * width + x; } static isInBound(x, y, height, width) { return x >= 0 && y >= 0 && y < height && x < width; } } class Node { constructor(name, id = undefined, metroLine = undefined, coord = { x: 0, y: 0 }, pathCoord = { x: 0, y: 0 }) { this._name = name; this._coord = coord; this._pathCoord = pathCoord; this._id = id; this._metroLine = metroLine; this._neighbors = []; this._gScore = 0; this._hScore = 0; this._parent; } get name() { return this._name; } set name(newName) { this._name = newName; } get id() { return this._id; } set id(newId) { this._id = newId; } get coord() { return this._coord; } set coord(newCoord) { this._coord.x = newCoord[0]; this._coord.y = newCoord[1]; } get pathCoord() { return this._pathCoord; } get metroLine() { return this._metroLine; } set metroLine(newMetroLine) { this._metroLine = newMetroLine; } get metroColor() { return MetroColors[this._metroLine]; } get neighbors() { return this._neighbors; } addNeighbor(newNeighborObj) { this._neighbors.push(newNeighborObj); } get gScore() { return this._gScore; } set gScore(newGScore) { this._gScore = newGScore; } get hScore() { return this._hScore; } set hScore(newHScore) { this._hScore = newHScore; } get fScore() { return this._gScore + this._hScore; } get parent() { return this._parent; } set parent(newParent) { this._parent = newParent; } } class Renderer { static instance; constructor() { if (Renderer.instance) return Renderer.instance; let self = this; this._gridSize = 50; this._nodes = []; this._gridData = []; this._isEditMode = false; this._isPathMode = false; this._onClick = null; this._currentSelection = null; this._neighborsOfNodes = []; this._zoom = d3.zoom().scaleExtent([0.3, 4]).translateExtent([[0, 0], [5000, 3500]]); this._lineGenerator = d3 .line() .x(function (d) { return d.x * self._gridSize; }) .y(function (d) { return d.y * self._gridSize; }) .curve(d3.curveBundle.beta(1)); this._congestionColors = d3.scaleLinear() .range(["#247BA0", "#70C1B3", "#B2DBBF", "#F3FFBD", "#FF1654"]) .domain([0.0, 0.1, 0.2, 0.4, 1.0]) this._lineJamExponent = d3.scalePow().exponent(2.5).domain([0, 1]).range([0, 100]); this._svgContainer = d3 .select("#metro") .append("svg") .attr("width", "100%") .attr("height", "100%") .call(this._zoom.transform, d3.zoomIdentity.translate(0, 0).scale(0.4)) .call(this._zoom.on("zoom", function () { self._svgContainer.attr("transform", d3.event.transform) })) .on("contextmenu", function (d) { d3.event.preventDefault(); }) .append("g") this._svgContainer.call(this._zoom.transform, d3.zoomIdentity.translate(0, 0).scale(0.4)) this._svgBackgroundGroup = this._svgContainer.append("g"); this._svgGridGroup = this._svgContainer.append("g"); this._svgLineJamGroup = this._svgContainer.append("g"); this._svgLineGroup = this._svgContainer.append("g"); this._svgNodeGroup = this._svgContainer.append("g"); this._svgInsideNodeGroup = this._svgContainer.append("g"); this._svgNodeNameBackgroundGroup = this._svgContainer.append("g"); this._svgNodeNameGroup = this._svgContainer.append("g"); this._svgBackgroundGroup .append("rect") .attr("x", 0) .attr("y", 0) .attr("width", 100 * self._gridSize) .attr("height", 70 * self._gridSize) .attr("fill-opacity", "0") .on("click", function (d) { self._currentSelection = null; if (self._onClick && typeof (self._onClick) === "function") self._onClick(self._currentSelection); }) this.setSelectionCallback(function callback(selection) { console.log(selection); }) Renderer.instance = this; } get nodes() { return this._nodes; } set nodes(newNodes) { this._nodes = newNodes; this._neighborsOfNodes = []; for (let i = 0; i < newNodes.length; i++) { for (let j = 0; j < newNodes[i].neighbors.length; j++) { if (this._neighborsOfNodes.find(neighbor => neighbor.pair[0] === newNodes[i].neighbors[j].node && neighbor.pair[1] === newNodes[i])) continue; if (newNodes[i].name == newNodes[i].neighbors[j].node.name) continue; if (!this._nodes.find(node => node === newNodes[i].neighbors[j].node)) continue; this._neighborsOfNodes.push({ "pair": [newNodes[i], newNodes[i].neighbors[j].node], "congestion": newNodes[i].neighbors[j].congestion }); } } } get gridSize() { return this._gridSize; } set gridSize(newGridSize) { this._gridSize = newGridSize; } renderGrid(width, height) { let self = this; if (!this._gridData.length) { this._gridData = []; for (let x = 0; x < width; x += 1) { this._gridData.push([ { x: x, y: 0 }, { x: x, y: height } ]); } for (let y = 0; y < height; y += 1) { this._gridData.push([ { x: 0, y: y }, { x: width, y: y } ]); } } let svgGrids = this._svgGridGroup .selectAll("path") .data(this._gridData) svgGrids.exit().remove(); svgGrids .enter() .append("path") .attr("d", function (grid) { return self._lineGenerator([grid[0], grid[1]]); }) .classed("grid", true) } renderMetroLines() { let self = this; let svgLines = this._svgLineGroup .selectAll("path") .data(this._neighborsOfNodes) svgLines .exit() .style("opacity", 1) .transition() .duration(500) .delay(function (d, i) { return 3 * i }) .attr("d", function (neighbor) { let lineCoords = JSON.parse(JSON.stringify([neighbor.pair[0].coord, neighbor.pair[0].pathCoord, neighbor.pair[1].coord])); for (let i = 0; i < lineCoords.length; i++) { lineCoords[i].y = lineCoords[i].y + 1; } return self._lineGenerator(lineCoords); }) .style("opacity", 0) .remove(); svgLines .transition() .duration(500) .attr("d", function (neighbor) { return self._lineGenerator([neighbor.pair[0].coord, neighbor.pair[0].pathCoord, neighbor.pair[1].coord]); }) .attr("stroke", function (neighbor) { return neighbor.pair[0].metroColor; }) svgLines .enter() .append("path") .classed("line", true) .attr("stroke", function (neighbor) { return neighbor.pair[0].metroColor; }) .attr("d", function (neighbor) { let lineCoords = JSON.parse(JSON.stringify([neighbor.pair[0].coord, neighbor.pair[0].pathCoord, neighbor.pair[1].coord])); for (let i = 0; i < lineCoords.length; i++) { lineCoords[i].y = lineCoords[i].y - 1; } return self._lineGenerator(lineCoords); }) .style("opacity", 0) .transition() .duration(500) .delay(function (d, i) { return 3 * i }) .attr("d", function (neighbor) { return self._lineGenerator([neighbor.pair[0].coord, neighbor.pair[0].pathCoord, neighbor.pair[1].coord]); }) .style("opacity", 1) } renderMetroNodes() { let self = this; let svgNodes = this._svgNodeGroup .selectAll("circle") .data(this._nodes) svgNodes .exit() .style("opacity", 1) .transition() .duration(500) .delay(function (d, i) { return 3 * i }) .attr("cx", function (node) { return node.coord.x * self._gridSize; }) .attr("cy", function (node) { return (node.coord.y + 1) * self._gridSize; }) .style("opacity", 0) .remove(); svgNodes .transition() .duration(500) .attr("cx", function (node) { return node.coord.x * self._gridSize; }) .attr("cy", function (node) { return node.coord.y * self._gridSize; }) .attr("fill", function (node) { return node.metroColor; }) svgNodes .enter() .append("circle") .classed("node", true) .attr("cx", function (node) { return node.coord.x * self._gridSize; }) .attr("cy", function (node) { return (node.coord.y - 1) * self._gridSize; }) .transition() .duration(500) .delay(function (d, i) { return 3 * i }) .attr("cx", function (node) { return node.coord.x * self._gridSize; }) .attr("cy", function (node) { return node.coord.y * self._gridSize; }) .attr("fill", function (node) { return node.metroColor; }) let svgInsideNodes = this._svgInsideNodeGroup .selectAll("circle") .data(this._nodes) svgInsideNodes .exit() .style("opacity", 0) .transition() .duration(500) .delay(function (d, i) { return 3 * i }) .attr("cx", function (node) { return node.coord.x * self._gridSize; }) .attr("cy", function (node) { return (node.coord.y + 1) * self._gridSize; }) .style("opacity", 1) .remove(); svgInsideNodes .classed("node-inside", true) .classed("node-select", false) .transition() .duration(500) .attr("cx", function (node) { return node.coord.x * self._gridSize; }) .attr("cy", function (node) { return node.coord.y * self._gridSize; }) svgInsideNodes .enter() .append("circle") .classed("node-inside", true) .on("click", function (d) { self._currentSelection = d; if (self._onClick && typeof (self._onClick) === "function") self._onClick(self._currentSelection); }) .attr("cx", function (node) { return node.coord.x * self._gridSize; }) .attr("cy", function (node) { return (node.coord.y - 1) * self._gridSize; }) .style("opacity", 0) .transition() .duration(500) .delay(function (d, i) { return 3 * i }) .attr("cx", function (node) { return node.coord.x * self._gridSize; }) .attr("cy", function (node) { return node.coord.y * self._gridSize; }) .style("opacity", 1) } renderMetroNames() { let self = this; let svgNodeNames = this._svgNodeNameGroup .selectAll("text") .data(this._nodes) svgNodeNames .exit() .style("opacity", 1) .transition() .duration(500) .delay(function (d, i) { return 3 * i }) .attr("x", function (node) { return node.coord.x * self._gridSize; }) .attr("y", function (node) { return (node.coord.y + 1.4) * self._gridSize; }) .style("opacity", 0) .remove(); svgNodeNames .transition() .duration(500) .text(function (node) { return node.name; }) .attr("x", function (node) { return node.coord.x * self._gridSize; }) .attr("y", function (node) { return (node.coord.y - 0.4) * self._gridSize; }) .attr("fill", function (node) { return node.metroColor; }) svgNodeNames .enter() .append("text") .attr("class", "node-name") .text(function (node) { return node.name; }) .attr("fill", function (node) { return node.metroColor; }) .attr("x", function (node) { return node.coord.x * self._gridSize; }) .attr("y", function (node) { return (node.coord.y - 1.4) * self._gridSize; }) .style("opacity", 0) .transition() .duration(500) .delay(function (d, i) { return 3 * i }) .attr("x", function (node) { return node.coord.x * self._gridSize; }) .attr("y", function (node) { return (node.coord.y - 0.4) * self._gridSize; }) .style("opacity", 1) let svgNodeNameBackgrounds = self._svgNodeNameBackgroundGroup .selectAll("rect") .data(self._svgNodeNameGroup.selectAll("text").nodes()) svgNodeNameBackgrounds .exit() .transition() .duration(500) .delay(function (d, i) { return 3 * i }) .attr("x", function (node) { return node.__data__.coord.x * self._gridSize - node.getBBox().width / 2 - 3; }) .attr("y", function (node) { return (node.__data__.coord.y + 1.4) * self._gridSize - node.getBBox().height + 1; }) .remove(); svgNodeNameBackgrounds .transition() .duration(500) .attr("x", function (node) { return node.__data__.coord.x * self._gridSize - node.getBBox().width / 2 - 3; }) .attr("y", function (node) { return (node.__data__.coord.y - 0.4) * self._gridSize - node.getBBox().height + 1; }) .attr("width", function (node) { return node.getBBox().width + 6; }) .attr("height", function (node) { return node.getBBox().height + 2; }) .attr("stroke", function (node) { return node.__data__.metroColor; }) svgNodeNameBackgrounds .enter() .append("rect") .classed("node-name-background", true) .attr("width", 0) .attr("height", 0) .attr("x", function (node) { return node.__data__.coord.x * self._gridSize - node.getBBox().width / 2; }) .attr("y", function (node) { return (node.__data__.coord.y - 1.4) * self._gridSize - node.getBBox().height; }) .transition() .duration(500) .delay(function (d, i) { return 3 * i }) .attr("x", function (node) { return node.__data__.coord.x * self._gridSize - node.getBBox().width / 2 - 3; }) .attr("y", function (node) { return (node.__data__.coord.y - 0.4) * self._gridSize - node.getBBox().height + 1; }) .attr("width", function (node) { return node.getBBox().width + 6; }) .attr("height", function (node) { return node.getBBox().height + 2; }) .attr("stroke", function (node) { return node.__data__.metroColor; }) } renderPath(startNode, endNode) { let self = this; let rawPaths = findPath(startNode, endNode) let paths = []; for (let i = 1; i < rawPaths.length; i++) { let previousNode = rawPaths[i - 1]; let currentNode = rawPaths[i]; let neighbor = currentNode.neighbors.find(n => { return n.node === previousNode; }) let congestionMultiplier = 50; let exponentCongestion = this._lineJamExponent(neighbor.congestion); for (let congestion = 0; congestion < exponentCongestion; congestion++) { paths.push({ "pair": [previousNode, currentNode], "congestion": neighbor.congestion }); } } let svgJams = this._svgLineJamGroup .selectAll("path") .data(paths) svgJams.exit().remove() svgJams .attr("d", function (neighbor) { return generateRandomLineJam(neighbor); }) .attr("stroke-width", 0.5) .attr("stroke", function (neighbor) { return self._congestionColors(neighbor.congestion); }) .attr("fill", "none") .transition() .ease(d3.easePolyIn) .duration(1) .attrTween("stroke-dashoffset", tweenDashOffset) .attrTween("stroke-dasharray", tweenDash) .on("end", function repeat() { d3.active(this) .transition() .duration(Random.rangeInt(2000, 5000)) .delay(Random.range(0, 100)) .attrTween("stroke-dasharray", tweenDash) .on("end", repeat); }) svgJams .enter() .append("path") .attr("d", function (neighbor) { return generateRandomLineJam(neighbor); }) .attr("stroke-width", 0.5) .attr("stroke", function (neighbor) { return self._congestionColors(neighbor.congestion); }) .attr("fill", "none") .transition() .ease(d3.easePolyIn) .duration(1) .attrTween("stroke-dashoffset", tweenDashOffset) .attrTween("stroke-dasharray", tweenDash) .on("end", function repeat() { d3.active(this) .transition() .duration(Random.rangeInt(2000, 5000)) .delay(Random.range(0, 100)) .attrTween("stroke-dasharray", tweenDash) .on("end", repeat); }) function tweenDash() { let l = this.getTotalLength(), i = d3.interpolateString("0," + l, l + "," + l); return function (t) { return i(t); }; } function tweenDashOffset() { let l = this.getTotalLength(), i = d3.interpolateString(0, l); return function (t) { return i(t); }; } function generateRandomLineJam(neighbor) { let reversePath; let originalLine = self._svgLineGroup.selectAll("path").filter(d => { if ((d.pair[0] === neighbor.pair[0] && d.pair[1] === neighbor.pair[1])) { reversePath = false; return true; } else if (d.pair[1] === neighbor.pair[0] && d.pair[0] === neighbor.pair[1]) { reversePath = true; return true; } else return false; }).nodes()[0] if (!originalLine) return; let distance = Math.hypot(neighbor.pair[0].coord.x - neighbor.pair[1].coord.x, neighbor.pair[0].coord.y - neighbor.pair[1].coord.y); let dx = (neighbor.pair[1].coord.x - neighbor.pair[0].coord.x) / distance; let dy = (neighbor.pair[1].coord.y - neighbor.pair[0].coord.y) / distance; let noiseAmount = 10; let lineLength = originalLine.getTotalLength(); let interval = lineLength / (noiseAmount - 1); let lineData = d3.range(noiseAmount).map(function (d) { let point = originalLine.getPointAtLength(reversePath ? (noiseAmount - d) * interval : d * interval); point.x = Math.round(point.x / self._gridSize) point.y = Math.round(point.y / self._gridSize) if (d == 0 || d == noiseAmount - 1) return point; point.x += dy * Random.range(-0.3, 0.3); point.y += - dx * Random.range(-0.3, 0.3); return point; });; return self._lineGenerator(lineData); } focusNodes(rawPaths); } disablePath() { this._svgLineJamGroup .selectAll("path") .remove() disableFocus(); } renderPaths(paths) { let self = this; let colors = d3.scaleOrdinal().domain(paths.length).range(d3.schemeSet3); let concatPath = []; paths.forEach(function (path, i) { let color = colors(i); self._svgLineGroup .selectAll("path") .filter(function (neighbor) { return path.includes(neighbor.pair[0]) && path.includes(neighbor.pair[1]) }) .transition() .duration(500) .attr("stroke", color) concatPath = concatPath.concat(path); }) focusNodes(concatPath) } disablePaths() { this._svgLineGroup .selectAll("path") .transition() .duration(500) .attr("stroke", function (neighbor) { return neighbor.pair[0].metroColor; }) disableFocus(); } renderCongestion() { let self = this; this._svgLineGroup .selectAll("path") .transition() .duration(500) .attr("stroke", function (neighbor) { return self._congestionColors(neighbor.congestion); }); this._svgBackgroundGroup .selectAll("rect") .transition() .duration(500) .attr("fill-opacity", 1) .attr("fill", "#2f3131") this._svgNodeNameBackgroundGroup .selectAll("rect") .classed("node-name-background", false) .transition() .duration(500) .attr("opacity", 0.3) this._svgGridGroup .selectAll("path") .classed("grid", false) .transition() .duration(500) .attr("stroke", "#63605b") this._svgNodeNameGroup .selectAll("text") .transition() .duration(500) .attr("fill", "#f8f1e5") } disableCongestion() { let self = this; this._svgLineGroup .selectAll("path") .transition() .duration(500) .attr("stroke", function (neighbor) { return neighbor.pair[0].metroColor; }) this._svgBackgroundGroup .selectAll("rect") .transition() .duration(500) .attr("fill-opacity", 1) .attr("fill", "white") this._svgNodeNameBackgroundGroup .selectAll("rect") .attr("stroke", function (node) { return node.__data__.metroColor; }) .classed("node-name-background", true) this._svgGridGroup .selectAll("path") .classed("grid", true) this._svgNodeNameGroup .selectAll("text") .transition() .duration(500) .attr("fill", function (node) { return node.metroColor; }) } focusNodes(nodes) { let svgPathNotInNodes = this._svgLineGroup.selectAll("path").filter(function (neighbor) { return !nodes.includes(neighbor.pair[0]) || !nodes.includes(neighbor.pair[1]) }); let svgNodeNotInNodes = this._svgNodeGroup.selectAll("circle").filter(function (node) { return !nodes.includes(node) }); let svgInsideNodeInNodes = this._svgInsideNodeGroup.selectAll("circle").filter(function (node) { return !nodes.includes(node) }); let svgNameNotInNodes = this._svgNodeNameGroup.selectAll("text").filter(function (node) { return !nodes.includes(node) }); svgPathNotInNodes.classed("fade-out", true).classed("fade-in", false); svgNodeNotInNodes.classed("fade-out", true).classed("fade-in", false); svgNameNotInNodes.classed("fade-out", true).classed("fade-in", false); svgInsideNodeInNodes.classed("fade-out", true).classed("fade-in", false); } disableFocus() { this._svgLineGroup.selectAll("path").classed("fade-out", false).classed("fade-in", true); this._svgNodeGroup.selectAll("circle").classed("fade-out", false).classed("fade-in", true); this._svgNodeNameGroup.selectAll("text").classed("fade-out", false).classed("fade-in", true); this._svgInsideNodeGroup.selectAll("circle").classed("fade-out", false).classed("fade-in", true); } setSelectionCallback(callBack) { this._onClick = callBack; } } function setSelectionCallback(callBack) { let renderer = new Renderer(); renderer.setSelectionCallback(callBack); } function render(nodes) { let renderer = new Renderer(); renderer.nodes = nodes; renderer.renderGrid(100, 70); renderer.renderMetroLines(); renderer.renderMetroNodes(); renderer.renderMetroNames(); } function focusNodes(nodes) { let renderer = new Renderer(); renderer.focusNodes(nodes); } function disableFocus() { let renderer = new Renderer(); renderer.disableFocus(); } function renderCongestion() { let renderer = new Renderer(); renderer.renderCongestion(); } function disableCongestion() { let renderer = new Renderer(); renderer.disableCongestion(); } function renderPath(startNode, endNode) { let renderer = new Renderer(); renderer.renderPath(startNode, endNode); } function renderPaths(paths) { let renderer = new Renderer(); renderer.renderPaths(paths); } function disablePaths() { let renderer = new Renderer(); renderer.disablePaths(); } function disablePath() { let renderer = new Renderer(); renderer.disablePath(); } function findPath(startNode, endNode) { function manhattanDistance(coord0, coord1) { var d1 = Math.abs(coord1.x - coord0.x); var d2 = Math.abs(coord1.y - coord0.x); return d1 + d2; } let openList = []; let closeList = [] startNode.parent = null; openList.push(startNode); while (openList.length > 0) { let currentNode = openList[0]; for (let i = 0; i < openList.length; i++) { let openNode = openList[i]; if (openNode.fScore < currentNode.fScore) currentNode = openNode; } if (currentNode === endNode) { let pathCurrent = currentNode; let path = []; while (pathCurrent.parent) { path.push(pathCurrent); pathCurrent = pathCurrent.parent; } path.push(startNode); return path.reverse(); } openList = openList.filter(node => node !== currentNode); closeList.push(currentNode); let neighbors = currentNode.neighbors; for (let i = 0; i < neighbors.length; i++) { let neighborNode = neighbors[i].node; let neighborcost = (1 + neighbors[i].congestion) * 5; if (closeList.includes(neighborNode)) continue; let gScore = currentNode.gScore + neighborcost; let gScoreIsBest = false; if (!openList.includes(neighborNode)) { gScoreIsBest = true; neighborNode.hScore = manhattanDistance(neighborNode.coord, endNode.coord); openList.push(neighborNode); } else if (gScore < neighborNode.gScore) { gScoreIsBest = true; } if (gScoreIsBest) { neighborNode.parent = currentNode; neighborNode.gScore = gScore; } } } }