text
stringlengths
1
1.05M
/* Copyright (C) 2017-2021 by Arm Limited. All rights reserved. */ #ifndef INCLUDE_NON_ROOT_GLOBALSTATSTRACKER_H #define INCLUDE_NON_ROOT_GLOBALSTATSTRACKER_H #include "linux/proc/ProcLoadAvgFileRecord.h" #include "linux/proc/ProcStatFileRecord.h" #include "non_root/CounterHelpers.h" #include "non_root/GlobalCounter.h" #include <map> namespace non_root { class GlobalStateChangeHandler; /** * Extracts and monitors interesting global stats from various sources such as ProcLoadAvgFileRecord and ProcStatFileRecord */ class GlobalStatsTracker { public: /** * Extracts and monitors intersting per-core stats from ProcStatFileRecord (CPU entries) */ class PerCoreStatsTracker { public: void sendStats(unsigned long long timestampNS, GlobalStateChangeHandler & handler, unsigned long cpuID); void updateFromProcStatFileRecordCpuTime(const lnx::ProcStatFileRecord::CpuTime & record); private: DeltaCounter<unsigned long long> timeUserTicks {}; DeltaCounter<unsigned long long> timeNiceTicks {}; DeltaCounter<unsigned long long> timeSystemTicks {}; DeltaCounter<unsigned long long> timeIdleTicks {}; DeltaCounter<unsigned long long> timeIowaitTicks {}; DeltaCounter<unsigned long long> timeIrqTicks {}; DeltaCounter<unsigned long long> timeSoftirqTicks {}; DeltaCounter<unsigned long long> timeStealTicks {}; DeltaCounter<unsigned long long> timeGuestTicks {}; DeltaCounter<unsigned long long> timeGuestNiceTicks {}; bool first {true}; template<typename T> void writeCounter(unsigned long long timestampNS, GlobalStateChangeHandler & handler, unsigned long cpuID, DeltaGlobalCounter id, DeltaCounter<T> & counter); }; /* to convert loadavg values from double to unsigned long */ static constexpr const unsigned long LOADAVG_MULTIPLIER = 100; GlobalStatsTracker(GlobalStateChangeHandler & handler); void sendStats(unsigned long long timestampNS); void updateFromProcLoadAvgFileRecord(const lnx::ProcLoadAvgFileRecord & record); void updateFromProcStatFileRecord(const lnx::ProcStatFileRecord & record); private: std::map<unsigned long, PerCoreStatsTracker> perCoreStats {}; AbsoluteCounter<unsigned long> loadavgOver1Minute {}; AbsoluteCounter<unsigned long> loadavgOver5Minutes {}; AbsoluteCounter<unsigned long> loadavgOver15Minutes {}; AbsoluteCounter<unsigned long> numProcessesRunning {}; AbsoluteCounter<unsigned long> numProcessesExist {}; DeltaCounter<unsigned long> numContextSwitchs {}; DeltaCounter<unsigned long> numIrq {}; DeltaCounter<unsigned long> numSoftIrq {}; DeltaCounter<unsigned long> numForks {}; GlobalStateChangeHandler & handler; bool first {true}; template<typename T> void writeCounter(unsigned long long timestampNS, AbsoluteGlobalCounter id, AbsoluteCounter<T> & counter); template<typename T> void writeCounter(unsigned long long timestampNS, DeltaGlobalCounter id, DeltaCounter<T> & counter); }; } #endif /* INCLUDE_NON_ROOT_GLOBALSTATSTRACKER_H */
<reponame>kenna-kevin/kenna-js-api-interview-demo export const validQueryParams = [ "connector_pipeline[]=value", "next_param[]=value2", "tags[]=value3", "queries[]=value4" ]
import random # Generate 10 random numbers between 0 and 1 numbers = [random.uniform(0, 1) for _ in range(10)] # Sort the random numbers in order numbers.sort() # Output the sorted numbers print(numbers)
<reponame>fondation451/monorepo-dry-api import { TMP } from "@dry-api/core"; console.log(TMP);
class AuthDb: @staticmethod async def fetch_data(token: str) -> bool: # Simulated asynchronous database fetching logic # Replace with actual database interaction in a real application authorized_keys = ["valid_api_key_1", "valid_api_key_2"] return token in authorized_keys def authorize_api_key(api_key: str) -> str: """ Simulates the authorization process for an API key based on the provided code snippet. Args: - api_key: A string representing the API key to be authorized. Returns: - A string message indicating whether the API key is authorized or unauthorized. """ obj = AuthDb trust = obj.fetch_data(api_key) if trust: return "Authorized" else: return "Unauthorized" # Example usage api_key_1 = "valid_api_key_1" api_key_2 = "invalid_api_key" print(authorize_api_key(api_key_1)) # Output: "Authorized" print(authorize_api_key(api_key_2)) # Output: "Unauthorized"
./bahdanau_run.sh ./luong_run.sh ./noattention_run.sh ./normed_bahdanau_run.sh ./scaledluong_run.sh
#!/bin/bash -l # #PBS -l nodes=1 #PBS -l walltime=00:05:00 #PBS -d . regent 6.rg -ll:cpu 1
<filename>src/components/NotebookCreator/NotebookCreatorModal.js import React, { useRef } from "react"; import { Modal } from "antd"; import NotebookCreator from "."; function NotebookCreatorModal(props) { const { visible, onOk, onCancel } = props; const formRef = useRef(null); const closeModal = () => { formRef.current.resetForm(); onCancel(); }; return ( <Modal title="New Notebook" visible={visible} onCancel={closeModal} footer={null} > <NotebookCreator onSubmit={onOk} formRef={formRef} /> </Modal> ); } export default NotebookCreatorModal;
<html> <head> <title>Math Game</title> </head> <body> <form> <h3>Math Game</h3> Enter a number: <input type="number" name="number1" id="number1" required> Enter another number: <input type="number" name="number2" id="number2" required> <input type="button" onclick="sumNumbers()" value="Calculate"> <input type="text" name="result" id="result" disabled> </form> <script> function sumNumbers() { let number1 = document.getElementById("number1").value; let number2 = document.getElementById("number2").value; let result = document.getElementById("result"); result.value = Number(number1) + Number(number2); } </script> </body> </html>
#!/bin/bash echo $(date) " - Starting Script" set -e SUDOUSER=$1 PASSWORD="$2" PRIVATEKEY=$3 MASTER=$4 MASTERPUBLICIPHOSTNAME=$5 MASTERPUBLICIPADDRESS=$6 NODE=$7 NODECOUNT=$8 MASTERCOUNT=$9 ROUTING=${10} REGISTRYSA=${11} ACCOUNTKEY="${12}" METRICS=${13} LOGGING=${14} TENANTID=${15} SUBSCRIPTIONID=${16} AADCLIENTID=${17} AADCLIENTSECRET="${18}" RESOURCEGROUP=${19} LOCATION=${20} STORAGEACCOUNT1=${21} STORAGEACCOUNT2=${22} SAKEY1=${23} SAKEY2=${24} COCKPIT=${25} BASTION=$(hostname) MASTERLOOP=$((MASTERCOUNT - 1)) NODELOOP=$((NODECOUNT - 1)) # Create Container in PV Storage Accounts echo $(date) " - Creating container in PV Storage Accounts" azure telemetry --disable azure login --service-principal -u $AADCLIENTID -p $AADCLIENTSECRET --tenant $TENANTID azure storage container create -a $STORAGEACCOUNT1 -k $SAKEY1 --container vhds azure storage container create -a $STORAGEACCOUNT2 -k $SAKEY2 --container vhds # Generate private keys for use by Ansible echo $(date) " - Generating Private keys for use by Ansible for Kubernetes Installation" echo "Generating Private Keys" runuser -l $SUDOUSER -c "echo \"$PRIVATEKEY\" > ~/.ssh/id_rsa" runuser -l $SUDOUSER -c "chmod 600 ~/.ssh/id_rsa*" echo "Configuring SSH ControlPath to use shorter path name" sed -i -e "s/^# control_path = %(directory)s\/%%h-%%r/control_path = %(directory)s\/%%h-%%r/" /etc/ansible/ansible.cfg sed -i -e "s/^#host_key_checking = False/host_key_checking = False/" /etc/ansible/ansible.cfg sed -i -e "s/^#pty=False/pty=False/" /etc/ansible/ansible.cfg # Create Ansible Playbooks for Post Installation tasks echo $(date) " - Create Ansible Playbooks for Post Installation tasks" echo $(date) " - Script complete"
import React from 'react' import { Cardy } from 'my-react-module' // import Card from '' import 'my-react-module/src/index.css' const App = () => { return <Cardy headerContent="Je suis le Header" bodyContent="Je suis le contenu du card" footerContent="Je suis le footer" /> } export default App
<gh_stars>1-10 require "cyborg/command/help" require "cyborg/command/npm" require "cyborg/command/scaffold" module Cyborg module Command extend self def run(options) @production = options[:production] if options[:help] version puts options[:help] return end case options[:command] when 'new', 'n' Scaffold.new(options) when 'build', 'b' from_root { dispatch(:build, options) } when 'watch', 'w' from_root { dispatch(:watch, options) } when 'server', 's' from_root { dispatch(:server, options) } when 'clean', 'c' from_root { clean } when 'version' version when 'gem:build' from_root { gem_build } when 'gem:install' from_root { gem_install } when 'gem:release' from_root { gem_release } when 'gem:tag' from_root { gem_tag } else puts "Command `#{options[:command]}` not recognized" end end def version puts "Cyborg version #{Cyborg::VERSION}\n\n" end def production? @production == true end def gem_build @production = true FileUtils.rm_rf('public') dispatch(:build) system "bundle exec rake build" end def gem_install @production = true FileUtils.rm_rf('public') dispatch(:build) system "bundle exec rake install" end def gem_release @production = true FileUtils.rm_rf('public') dispatch(:build) if key = ENV['RUBYGEMS_API_KEY'] gem = "#{Cyborg.plugin.gem_name}-#{Cyborg.plugin.version}.gem" system "bundle exec rake build" system "curl --data-binary @./pkg/#{gem} -H 'Authorization:#{key}' https://rubygems.org/api/v1/gems" else system 'bundle exec rake release' end end def gem_tag require './lib/tungsten/version.rb' system "git tag v#{Tungsten::VERSION}" end def require_rails require File.join(Dir.pwd, Cyborg.rails_path('config/application')) end def clean FileUtils.rm_rf(Cyborg.rails_path('tmp/cache/')) FileUtils.rm_rf('.sass-cache') FileUtils.rm_rf(Cyborg.rails_path('.sass-cache')) end # Handles running threaded commands # def dispatch(command, *args) @threads = [] send(command, *args) @threads.each { |thr| thr.join } end # Build assets def build(options={}) puts Cyborg.production? ? 'Building for production…' : 'Building…' require_rails clean if Cyborg.production? Cyborg.plugin.build(options) end # Watch assets for changes and build def watch(options={}) build(options) require 'listen' trap("SIGINT") { puts "\nCyborg watcher stopped. Have a nice day!" exit! } @threads.concat Cyborg.plugin.watch(options) end # Run rails server and watch assets def server(options={}) options[:port] ||= 3000 @threads << Thread.new { system "#{Cyborg.rails_path('bin/rails')} server -p #{options[:port]}" } watch(options) if options[:watch] end def from_root(command=nil, &blk) unless dir = Cyborg.gem_path abort "Command must be run from the root of a Cyborg Plugin (adjacent to the gemspec)." end Dir.chdir(dir) do if command system command else blk.call end end end end end
package com.corsair.sparrow.pirate.oauth.security.impl; import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper; import com.corsair.sparrow.pirate.oauth.constant.SecurityConstant; import com.corsair.sparrow.pirate.oauth.domain.bean.SysRole; import com.corsair.sparrow.pirate.oauth.domain.bean.SysUser; import com.corsair.sparrow.pirate.oauth.exception.AuthorityNotFoundException; import com.corsair.sparrow.pirate.oauth.exception.PhoneCodeNotMatchException; import com.corsair.sparrow.pirate.oauth.exception.QrCodeException; import com.corsair.sparrow.pirate.oauth.security.Oauth2UserDetails; import com.corsair.sparrow.pirate.oauth.security.Oauth2UserDetailsService; import com.corsair.sparrow.pirate.oauth.service.ISysRoleService; import com.corsair.sparrow.pirate.oauth.service.ISysUserService; import com.google.common.collect.Sets; import org.apache.commons.collections.CollectionUtils; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.core.userdetails.UserDetails; import org.springframework.security.core.userdetails.UsernameNotFoundException; import org.springframework.stereotype.Component; import java.util.Objects; import java.util.Set; /** * @author jack */ @Component public class Oauth2UserDetailsServiceImpl implements Oauth2UserDetailsService { @Autowired private ISysUserService sysUserService; @Autowired private ISysRoleService roleService; @Override public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException { QueryWrapper<SysUser> queryWrapper = new QueryWrapper<>(); queryWrapper.eq("username",username); SysUser sysUser = sysUserService.getOne(queryWrapper); if(Objects.isNull(sysUser)){ throw new UsernameNotFoundException(String.format("'%s'用户名不存在",username)); } // 获取Role集合 Set<SysRole> sysRoleSet = roleService.getRoleSetByUserId(sysUser.getId()); // 如果系统不支持匿名登陆需要限制需要角色才能登陆 // if(CollectionUtils.isEmpty(sysRoleSet)){ // throw new AuthorityNotFoundException(String.format("'%s'该用户没有任何角色",username)); // } return new Oauth2UserDetails( sysUser.getId(), sysUser.getUsername(), sysUser.getPassword(), sysRoleSet, sysUser.getIsAccountNonExpired(), sysUser.getIsAccountNonLocked(), sysUser.getIsCredentialsNonExpired(), sysUser.getIsEnabled() ); } @Override public Oauth2UserDetails loadUserByPhoneCode(String phone, String code) throws PhoneCodeNotMatchException { // todo 验证手机号验证码登陆 return null; } @Override public Oauth2UserDetails loadUserByQrCode(String qrCode) throws QrCodeException { // todo 验证二维码登陆 return null; } }
<gh_stars>1-10 column_settings = { 'Account': [ {'id': 'id', 'title': 'ID', 'width': 35}, {'id': 'name', 'title': 'NAME', 'width': -1}, ], 'CloudRegion': [ {'id': 'id', 'title': 'ID', 'width': 25}, {'id': 'provider', 'title': 'PROVIDER', 'width': 20}, {'id': 'display_name', 'title': 'DISPLAY_NAME', 'width': 40}, {'id': 'geo_coordinates', 'title': 'GEO-COORDINATES', 'width': -1, 'serialize': True} ], 'CloudService': [ {'id': 'id', 'title': 'ID', 'width': 35}, {'id': 'provider', 'title': 'PROVIDER', 'width': 15}, {'id': 'service', 'title': 'SERVICE', 'width': 20}, {'id': 'deactivated', 'title': 'DEACTIVATED', 'width': -1, 'json': True} ], 'Facility': [ {'id': 'id', 'title': 'ID', 'width': 56}, {'id': 'name', 'title': 'NAME', 'width': 55}, {'id': 'state', 'title': 'STATE', 'width': -1}, ], 'Location': [ {'id': 'id', 'title': 'ID', 'width': 15}, {'id': 'name', 'title': 'NAME', 'width': 20}, {'id': 'href', 'title': 'HREF', 'width': 25}, {'id': 'geo_coordinates', 'title': 'GEO-COORDINATES', 'width': -1, 'serialize': True} ], 'Network': [ {'id': 'id', 'title': 'ID', 'width': 35}, {'id': 'name', 'title': 'NAME', 'width': 25}, {'id': 'state', 'title': 'STATE', 'width': 25}, {'id': 'tags', 'title': 'TAGS', 'width': -1, 'json': True} ] }
<reponame>Adrian-Garcia/Algorithms /* 406. Queue Reconstruction by Height Suppose you have a random list of people standing in a queue. Each person is described by a pair of integers (h, k), where h is the height of the person and k is the number of people in front of this person who have a height greater than or equal to h. Write an algorithm to reconstruct the queue. Note: The number of people is less than 1,100. Example Input: [[7,0], [4,4], [7,1], [5,0], [6,1], [5,2]] Output: [[5,0], [7,0], [5,2], [6,1], [4,4], [7,1]] */ class Solution { public: static bool flag(const pair<int,int> &x, const pair<int,int> &y){ return ((x.second<y.second) || (x.second==y.second&&x.first>y.first)); } vector<pair<int, int>> reconstructQueue(vector<pair<int, int>>& people) { sort(people.begin(),people.end(), flag); for(int i=0; i<people.size(); i++){ int curr = people[i].second; if(curr<i){ int index = 0; for(; index<i && curr>0; index++) if(people[index].first>=people[i].first) curr--; people.insert(people.begin()+index,people[i]); people.erase(people.begin()+i+1); } } return people; } };
# This startup script creates a config file based on environment variables and then starts the bot # CONFIG => holds the stormy.[json|yml|toml|hcl|env] content # CONFIG_TYPE => json|yml|toml|hcl|env can be used, default: json # CONFIG_2, CONFIG_3, CONFIG_4 => can be used to split the config into multiple variables # ENCODED_CONFIG => set to true, to base64 decode the config file # DEBUG => when set, the go binary will start in debug mode config_filename=stormy.${CONFIG_TYPE:-json} # read config, split in up to 4 environment variables, and create the config file echo ${CONFIG}${CONFIG_2}${CONFIG_3}${CONFIG_4} > $config_filename # decode the config file, if it's base64 encoded if [ -n ${ENCODED_CONFIG} ] ; then base64 -d $config_filename | tee $config_filename; else cat $config_filename ; fi # start the bot if [ -n ${DEBUG} ] ; then ./bin/stormy -debug; else ./bin/stormy ; fi
<filename>app/src/main/java/com/hapramp/views/UserItemView.java package com.hapramp.views; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.os.Handler; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.v7.app.AlertDialog; import android.util.AttributeSet; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.widget.FrameLayout; import android.widget.ImageView; import android.widget.ProgressBar; import android.widget.TextView; import com.crashlytics.android.Crashlytics; import com.hapramp.R; import com.hapramp.preferences.HaprampPreferenceManager; import com.hapramp.steemconnect.SteemConnectUtils; import com.hapramp.steemconnect4j.SteemConnect; import com.hapramp.steemconnect4j.SteemConnectCallback; import com.hapramp.steemconnect4j.SteemConnectException; import com.hapramp.ui.activity.ProfileActivity; import com.hapramp.utils.Constants; import com.hapramp.utils.FollowingsSyncUtils; import com.hapramp.utils.ImageHandler; import java.util.Set; import butterknife.BindView; import butterknife.ButterKnife; /** * Created by Ankit on 4/6/2018. */ public class UserItemView extends FrameLayout { @BindView(R.id.user_pic) ImageView userPic; @BindView(R.id.content) TextView content; @BindView(R.id.followUnfollowBtn) TextView followUnfollowBtn; @BindView(R.id.followUnfollowProgress) ProgressBar followUnfollowProgress; private boolean followed = false; private Context mContext; private Handler mHandler; private String mUsername; private String me; private SteemConnect steemConnect; private FollowStateChangeListener followStateChangeListener; public UserItemView(@NonNull Context context) { super(context); init(context); } private void init(Context context) { this.mContext = context; View view = LayoutInflater.from(context).inflate(R.layout.user_suggestions_item_row, this); ButterKnife.bind(this, view); mHandler = new Handler(); me = HaprampPreferenceManager.getInstance().getCurrentSteemUsername(); steemConnect = SteemConnectUtils.getSteemConnectInstance(HaprampPreferenceManager.getInstance().getSC2AccessToken()); attachListeners(); } private void attachListeners() { followUnfollowBtn.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { if (isFollowed()) { confirmUnfollowAction(); } else { requestFollowOnSteem(); } } }); userPic.setOnClickListener(new OnClickListener() { @Override public void onClick(View view) { openProfilePage(); } }); content.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { openProfilePage(); } }); } private boolean isFollowed() { return this.followed; } private void confirmUnfollowAction() { AlertDialog.Builder builder = new AlertDialog.Builder(mContext) .setTitle("Unfollow") .setMessage("Do you want to Unfollow " + getUsername() + "?") .setPositiveButton("UnFollow", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { requestUnFollowOnSteem(); } }) .setNegativeButton("No", null); builder.show(); } private void requestFollowOnSteem() { showProgress(true); new Thread() { @Override public void run() { steemConnect.follow( me, mUsername, new SteemConnectCallback() { @Override public void onResponse(String s) { mHandler.post(new Runnable() { @Override public void run() { userFollowedOnSteem(); } }); } @Override public void onError(SteemConnectException e) { Log.d("UserFollow-follow", e.toString()); mHandler.post(new Runnable() { @Override public void run() { userFollowFailed(); } }); } } ); } }.start(); } private void openProfilePage() { Intent i = new Intent(mContext, ProfileActivity.class); i.putExtra(Constants.EXTRAA_KEY_STEEM_USER_NAME, getUsername()); mContext.startActivity(i); } private String getUsername() { return this.mUsername; } private void requestUnFollowOnSteem() { showProgress(true); new Thread() { @Override public void run() { steemConnect.unfollow( me, mUsername, new SteemConnectCallback() { @Override public void onResponse(String s) { mHandler.post(new Runnable() { @Override public void run() { userUnFollowedOnSteem(); } }); } @Override public void onError(final SteemConnectException e) { Log.d("UserFollow-Unfollow", e.toString()); mHandler.post(new Runnable() { @Override public void run() { userUnfollowFailed(); } }); } } ); } }.start(); } private void showProgress(boolean show) { try { if (show) { //hide button followUnfollowBtn.setVisibility(GONE); followUnfollowProgress.setVisibility(VISIBLE); } else { //show button followUnfollowBtn.setVisibility(VISIBLE); followUnfollowProgress.setVisibility(GONE); } } catch (Exception e) { Crashlytics.log(e.toString()); } } private void userFollowedOnSteem() { showProgress(false); alreadyFollowed(); syncFollowings(); if (followStateChangeListener != null) { followStateChangeListener.onFollowStateChanged(); } } private void userFollowFailed() { showProgress(false); notFollowed(); } private void userUnFollowedOnSteem() { showProgress(false); notFollowed(); syncFollowings(); if (followStateChangeListener != null) { followStateChangeListener.onFollowStateChanged(); } } private void userUnfollowFailed() { showProgress(false); alreadyFollowed(); } private void alreadyFollowed() { followUnfollowBtn.setText("Unfollow"); followUnfollowBtn.setSelected(true); followUnfollowBtn.setBackgroundResource(R.drawable.unfollow_btn_bg); followed = true; } private void syncFollowings() { FollowingsSyncUtils.syncFollowings(mContext); } private void notFollowed() { followUnfollowBtn.setText("Follow"); followUnfollowBtn.setSelected(false); followUnfollowBtn.setBackgroundResource(R.drawable.follow_btn_bg); followed = false; } public void setUsername(String username) { this.mUsername = username; content.setText(username); ImageHandler.loadCircularImage(mContext, userPic, String.format(mContext.getResources().getString(R.string.steem_user_profile_pic_format), username)); invalidateFollowButton(); } private void invalidateFollowButton() { if (mUsername.equals(me)) { followUnfollowBtn.setVisibility(GONE); return; } Set<String> followings = HaprampPreferenceManager.getInstance().getFollowingsSet(); if (followings != null) { if (followUnfollowBtn != null) { followUnfollowBtn.setVisibility(VISIBLE); } if (followings.contains(mUsername)) { alreadyFollowed(); } else { notFollowed(); } } else { if (followUnfollowBtn != null) { followUnfollowBtn.setVisibility(GONE); } } } public UserItemView(@NonNull Context context, @Nullable AttributeSet attrs) { super(context, attrs); init(context); } public UserItemView(@NonNull Context context, @Nullable AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(context); } public void setFollowStateChangeListener(FollowStateChangeListener followStateChangeListener) { this.followStateChangeListener = followStateChangeListener; } public interface FollowStateChangeListener { void onFollowStateChanged(); } }
An iterator is an object that iterates over a sequence of values in a data structure or container. The iterator is responsible for keeping track of the current position in the sequence, and it provides methods to move to the next element or to retrieve information about the current element. Iterators can be used to loop through collections such as arrays, lists, sets, and maps. They simplify the task of looping over elements and can be used to iterate over all elements of a collection or a subset of elements based on a certain criteria.
#!/bin/bash echo "export LS_OPTIONS='--color=auto'" > /etc/bash.bashrc echo "alias grep='grep --color=auto'" >> /etc/bash.bashrc echo "alias ls='ls \$LS_OPTIONS'" >> /etc/bash.bashrc echo "alias ll='ls \$LS_OPTIONS -l'" >> /etc/bash.bashrc echo "alias l='ls \$LS_OPTIONS -lA'" >> /etc/bash.bashrc echo -e "\n" >> /root/.bashrc
<reponame>ocamler/expense-www import { METHOD_PAYMENTS_LOAD, METHOD_PAYMENT_SELECTED, } from '../constants/ActionTypes'; const initialState = {methods: [], active: ''}; export default (state = initialState, action) => { switch (action.type) { case METHOD_PAYMENTS_LOAD: return { ...state, methods: action.methods }; case METHOD_PAYMENT_SELECTED: return { ...state, active: action.label }; default: return state; } }
#!/bin/bash -l ## ## Copyright (c) 2019 Opticks Team. All Rights Reserved. ## ## This file is part of Opticks ## (see https://bitbucket.org/simoncblyth/opticks). ## ## Licensed under the Apache License, Version 2.0 (the "License"); ## you may not use this file except in compliance with the License. ## You may obtain a copy of the License at ## ## http://www.apache.org/licenses/LICENSE-2.0 ## ## Unless required by applicable law or agreed to in writing, software ## distributed under the License is distributed on an "AS IS" BASIS, ## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ## See the License for the specific language governing permissions and ## limitations under the License. ## opticks- oe- om- sdir=$(pwd) name=$(basename $sdir) bdir=/tmp/$USER/opticks/$name/build echo bdir $bdir name $name rm -rf $bdir && mkdir -p $bdir && cd $bdir && pwd om-cmake $sdir make [ ! $? -eq 0 ] && echo build error && exit 1 make install earth=$HOME/opticks_refs/Earth_Albedo_8192_4096.ppm gradient=/tmp/SPPMTest_MakeTestImage.ppm if [ -f "$earth" ]; then path=$earth else path=$gradient fi path=$gradient echo $name $path $name $path [ ! $? -eq 0 ] && echo runtime error && exit 1 outpath=/tmp/$USER/opticks/$name/out.ppm if [ -n "$SSH_TTY" ]; then echo remote running : outpath $outpath else echo local running : open outpath $outpath open $outpath fi
<gh_stars>1-10 #ifndef __XENO__ #define __XENO__ #endif #include "RTClient.h" //Modify this number to indicate the actual number of motor on the network #define ELMO_TOTAL 16 #define DUAL_ARM_DOF 16 hyuEcat::Master ecatmaster; hyuEcat::EcatElmo ecat_elmo[ELMO_TOTAL]; // When all slaves or drives reach OP mode, // system_ready becomes 1. int system_ready = 0; bool break_flag = false; // Global time (beginning from zero) double double_gt=0.0; //real global time double double_dt=0.0; // For RT thread management unsigned long fault_count=0; unsigned long down_count=0; unsigned long calculation_time=0; unsigned long worst_time=0; double double_dt_tcp=0.0; unsigned long fault_count_tcp=0; unsigned long calculation_time_tcp=0; unsigned long worst_time_tcp=0; // EtherCAT Data (Dual-Arm) UINT16 StatusWord[DUAL_ARM_DOF] = {0,}; INT32 ActualPos[DUAL_ARM_DOF] = {0,}; INT32 ActualVel[DUAL_ARM_DOF] = {0,}; INT16 ActualTor[DUAL_ARM_DOF] = {0,}; INT8 ModeOfOperationDisplay[DUAL_ARM_DOF] = {0,}; std::string DeviceState[DUAL_ARM_DOF]; INT16 TargetTor[DUAL_ARM_DOF] = {0,}; //100.0 persentage /****************************************************************************/ // Xenomai RT tasks RT_TASK RTArm_task; RT_TASK print_task; RT_TASK tcpip_task; RT_TASK event_task; RT_QUEUE msg_tcpip; RT_QUEUE msg_event; void signal_handler(int signum); VectorXd ActualPos_Rad; VectorXd ActualVel_Rad; VectorXd TargetPos_Rad; VectorXd TargetVel_Rad; VectorXd TargetAcc_Rad; VectorXd TargetToq; VectorXd TargetPos_Task; VectorXd TargetVel_Task; VectorXd TargetAcc_Task; VectorXd ActualPos_Task; VectorXd ExternalForce; VectorXd ErrorPos_Task; VectorXd finPos; VectorXd findPos_Task; int isSlaveInit() { #if defined(_ECAT_ON_) int elmo_count = 0; int slave_count = 0; for(int i=0; i<ELMO_TOTAL; ++i) { if(ecat_elmo[i].initialized()) { elmo_count++; } } for(int j=0; j<((int)ecatmaster.GetConnectedSlaves()-1); j++) { if(ecatmaster.GetSlaveState(j) == 0x08) { slave_count++; } } if((elmo_count == ELMO_TOTAL) && (slave_count == ((int)ecatmaster.GetConnectedSlaves()-1))) return 1; else return 0; #else return 1; #endif } Vector3d ForwardPos[2]; Vector3d ForwardOri[2]; Vector3d ForwardAxis[2]; int NumChain; static unsigned char ControlIndex1 = CTRLMODE_IDY_JOINT; static unsigned char ControlIndex2 = 3; static unsigned char ControlSubIndex = 1; // RTArm_task void RTRArm_run( void *arg ) { #if defined(_PLOT_ON_) int sampling_time = 20; // Data is sampled every 10 cycles. int sampling_tick = sampling_time; void *msg; LOGGING_PACK logBuff; int len = sizeof(LOGGING_PACK); #endif RTIME now, previous; RTIME start = rt_timer_read(); RTIME p1 = 0; RTIME p3 = 0; short MaxTor = 1200; unsigned char JointState = ControlSubIndex; ActualPos_Rad.setZero(DUAL_ARM_DOF); ActualVel_Rad.setZero(DUAL_ARM_DOF); TargetPos_Rad.setZero(DUAL_ARM_DOF); TargetVel_Rad.setZero(DUAL_ARM_DOF); TargetAcc_Rad.setZero(DUAL_ARM_DOF); finPos.setZero(DUAL_ARM_DOF); TargetToq.setZero(DUAL_ARM_DOF); TargetPos_Task.setZero(12); TargetVel_Task.setZero(12); TargetAcc_Task.setZero(12); ActualPos_Task.setZero(12); ExternalForce.setZero(12); ErrorPos_Task.setZero(12); findPos_Task.setZero(12); std::shared_ptr<SerialManipulator> DualArm = std::make_shared<SerialManipulator>(); std::unique_ptr<HYUControl::Controller> Control = std::make_unique<HYUControl::Controller>(DualArm); std::unique_ptr<HYUControl::Motion> motion = std::make_unique<HYUControl::Motion>(DualArm); VectorXd des_mass = VectorXd::Constant(2, 5.0); VectorXd KpTask = VectorXd::Zero(12); VectorXd KdTask = VectorXd::Zero(12); VectorXd KpNull = VectorXd::Constant(16, 0.001); VectorXd KdNull = VectorXd::Constant(16, 3.0); KpTask.segment(0,3).setConstant(100.0); KpTask.segment(3,3).setConstant(1300.0); KpTask.segment(6,3).setConstant(100.0); KpTask.segment(9,3).setConstant(1300.0); KdTask.segment(0,3).setConstant(5.0); KdTask.segment(3,3).setConstant(55.0); KdTask.segment(6,3).setConstant(5.0); KdTask.segment(9,3).setConstant(55.0); Control->SetImpedanceGain(KpTask, KdTask, KpNull, KdNull, des_mass); DualArm->UpdateManipulatorParam(); int len, err; void *msg; TCP_Packet_Task packet_task; err = rt_queue_bind(&msg_tcpip, "tcp_queue", TM_NONBLOCK); if(err) { fprintf(stderr, "Failed to queue bind, code %d\n", err); } /* Arguments: &task (NULL=self), * start time, * period */ rt_task_set_periodic(nullptr, TM_NOW, cycle_ns); while (true) { rt_task_wait_period(nullptr); //wait for next cycle if(break_flag) break; previous = rt_timer_read(); #if defined(_ECAT_ON_) ecatmaster.RxUpdate(); #endif for(int k=0; k < DUAL_ARM_DOF; k++) { DeviceState[k] = ecat_elmo[k].GetDevState(); StatusWord[k] = ecat_elmo[k].status_word_; ModeOfOperationDisplay[k] = ecat_elmo[k].mode_of_operation_display_; ActualPos[k] = ecat_elmo[k].position_; ActualVel[k] = ecat_elmo[k].velocity_; ActualTor[k] = ecat_elmo[k].torque_; } DualArm->ENCtoRAD(ActualPos, ActualPos_Rad); DualArm->VelocityConvert(ActualVel, ActualVel_Rad); if( system_ready ) { DualArm->pKin->PrepareJacobian( ActualPos_Rad ); DualArm->pDyn->PrepareDynamics( ActualPos_Rad, ActualVel_Rad ); DualArm->pKin->GetForwardKinematics( ForwardPos, ForwardOri, NumChain ); if((len = rt_queue_receive(&msg_tcpip, &msg, TM_NONBLOCK)) > 0) { memcpy(&packet_task.data, msg, sizeof(TCP_Packet_Task)); printf("received message> len=%d bytes, ptr=%p, index1=0x%02X, index2=0x%02X, subindex=0x%02X\n", len, msg, packet_task.info.index1, packet_task.info.index2, packet_task.info.subindex); ControlIndex1 = packet_task.info.index1; ControlIndex2 = packet_task.info.index2; ControlSubIndex = packet_task.info.subindex; JointState = ControlSubIndex; rt_queue_free(&msg_tcpip, msg); } if( ControlIndex1 == CTRLMODE_FRICTIONID ) { Control->FrictionIdentification( ActualPos_Rad, ActualVel_Rad, TargetPos_Rad, TargetVel_Rad, TargetAcc_Rad, TargetToq, double_gt ); } else if( ControlIndex1 == CTRLMODE_CLIK ) { if( ControlIndex2 == 7 ) { DualArm->pKin->GetForwardKinematicsWithRelative(ActualPos_Task); } else { DualArm->pKin->GetForwardKinematics(ActualPos_Task); } motion->TaskMotion( TargetPos_Task, TargetVel_Task, TargetAcc_Task, findPos_Task, ActualPos_Task, ActualVel_Rad, double_gt, JointState, ControlSubIndex ); Control->CLIKTaskController( ActualPos_Rad, ActualVel_Rad, TargetPos_Task, TargetVel_Task,TargetToq, double_dt, ControlIndex2 ); } else if( ControlIndex1 == CTRLMODE_TASK ) { DualArm->pKin->GetForwardKinematics(ActualPos_Task); motion->TaskMotion( TargetPos_Task, TargetVel_Task, TargetAcc_Task, findPos_Task, ActualPos_Task, ActualVel_Rad, double_gt, JointState, ControlSubIndex ); Control->TaskInvDynController(TargetPos_Task, TargetVel_Task, TargetAcc_Task, ActualPos_Rad, ActualVel_Rad, TargetToq, double_dt, ControlIndex2 ); Control->GetControllerStates(TargetPos_Rad, TargetVel_Rad, ErrorPos_Task ); } else if( ControlIndex1 == CTRLMODE_IMPEDANCE_TASK ) { if( ControlIndex2 == 3 ) { DualArm->pKin->GetForwardKinematicsWithRelative(ActualPos_Task); } else { DualArm->pKin->GetForwardKinematics(ActualPos_Task); } motion->TaskMotion(TargetPos_Task, TargetVel_Task, TargetAcc_Task, findPos_Task, ActualPos_Task, ActualVel_Rad, double_gt, JointState, ControlSubIndex ); Control->TaskImpedanceController(ActualPos_Rad, ActualVel_Rad, TargetPos_Task, TargetVel_Task, TargetAcc_Task, ExternalForce, TargetToq, ControlIndex2 ); Control->GetControllerStates(TargetPos_Rad, TargetVel_Rad, ErrorPos_Task ); } else { motion->JointMotion( TargetPos_Rad, TargetVel_Rad, TargetAcc_Rad, finPos, ActualPos_Rad, ActualVel_Rad, double_gt, JointState, ControlSubIndex ); Control->InvDynController( ActualPos_Rad, ActualVel_Rad, TargetPos_Rad, TargetVel_Rad, TargetAcc_Rad, TargetToq, double_dt ); } DualArm->TorqueConvert(TargetToq, TargetTor, MaxTor); //write the motor data for(int j=0; j < DUAL_ARM_DOF; ++j) { if( double_gt >= 0.1 ) { //ecat_elmo[j].writeTorque(TargetTor[j]); } else { ecat_elmo[j].writeTorque(0); } } } #if defined(_ECAT_ON_) ecatmaster.TxUpdate(0, rt_timer_read()); #endif // For EtherCAT performance statistics p1 = p3; p3 = rt_timer_read(); now = rt_timer_read(); if ( isSlaveInit() ) { double_dt = (static_cast<double>(p3 - p1))*1e-3; // us double_gt = (static_cast<double>(p3 - start))*1e-9; // s calculation_time = (long)(now - previous); system_ready = 1; //all drives have been done if ( worst_time < calculation_time ) worst_time = calculation_time; if( calculation_time >= cycle_ns ) { fault_count++; worst_time = 0; } } else { if(ecatmaster.GetConnectedSlaves() < ELMO_TOTAL) { //signal_handler(1); } if(system_ready) down_count++; system_ready = 0; double_gt = 0; worst_time = 0; calculation_time = 0; start = rt_timer_read(); } } rt_queue_unbind(&msg_tcpip); } void tcpip_run(void *arg) { RTIME p1, p2, p3; PacketHandler packet; Poco::Net::SocketAddress server_addr(SERVER_PORT); Poco::Net::ServerSocket server_sock(server_addr); Poco::Net::Socket::SocketList connectedSockList; connectedSockList.push_back(server_sock); TCP_Packet_Task packet_task; TCP_Packet_Task packet_task_send; void *msg; RTIME tcp_cycle_ns = 8000e3; rt_task_set_periodic(nullptr, TM_NOW, tcp_cycle_ns); //ms while(true) { rt_task_wait_period(nullptr); if(break_flag) break; p1 = rt_timer_read(); Poco::Net::Socket::SocketList readList(connectedSockList.begin(), connectedSockList.end()); Poco::Net::Socket::SocketList writeList(connectedSockList.begin(), connectedSockList.end()); Poco::Net::Socket::SocketList exceptList(connectedSockList.begin(), connectedSockList.end()); Poco::Timespan timeout; if( Poco::Net::Socket::select(readList, writeList, exceptList, timeout) != 0 && system_ready ) { Poco::Net::Socket::SocketList delSockList; for (auto& readSock : readList) { if (server_sock == readSock) { auto newSock = server_sock.acceptConnection(); connectedSockList.push_back(newSock); //std::cout << "New Client connected" << std::endl; } else { auto n = ((Poco::Net::StreamSocket*)&readSock)->receiveBytes(packet_task.data, sizeof(TCP_Packet_Task)); if (n > 0) { packet_task_send = packet_task; msg = rt_queue_alloc(&msg_tcpip, sizeof(TCP_Packet_Task)); if(msg == nullptr) rt_printf("rt_queue_alloc Failed to allocate\n"); memcpy(msg, &packet_task.data, sizeof(TCP_Packet_Task)); rt_queue_send(&msg_tcpip, msg, sizeof(TCP_Packet_Task), Q_NORMAL); ((Poco::Net::StreamSocket*)&readSock)->sendBytes(packet_task_send.data, sizeof(TCP_Packet_Task)); } else { //std::cout << "Client Disconnected" << std::endl; delSockList.push_back(readSock); } } } for (auto& delSock : delSockList) { auto delIter = std::find_if(connectedSockList.begin(),connectedSockList.end(),[&delSock](auto& sock){return delSock == sock ? true : false;}); if (delIter != connectedSockList.end()) { connectedSockList.erase(delIter); //std::cout << "Remove the Client from connectedSockList" << std::endl; } } } p3 = p2; p2 = rt_timer_read(); calculation_time_tcp = (long)(p2 - p1); double_dt_tcp = (static_cast<double>(p2 - p3))*1e-3; // us if ( worst_time_tcp < calculation_time_tcp ) worst_time_tcp = calculation_time_tcp; if( calculation_time_tcp >= tcp_cycle_ns ) { fault_count_tcp++; worst_time_tcp = 0; } } } void print_run(void *arg) { long stick=0; int count=0; rt_printf("\nPlease WAIT at least %i (s) until the system getting ready...\n", WAKEUP_TIME); /* Arguments: &task (NULL=self), * start time, * period (here: 100ms = 0.1s) */ RTIME PrintPeriod = 500e6; //ms rt_task_set_periodic(nullptr, TM_NOW, PrintPeriod); while (true) { rt_task_wait_period(nullptr); //wait for next cycle if(break_flag) break; if ( system_ready ) { rt_printf("Time=%0.2fs\n", double_gt); rt_printf("DesiredTask=%0.2fus, Calculation= %0.2fus, WorstCalculation= %0.2fus, RTFault=%d, EcatDown=%d\n", double_dt, static_cast<double>(calculation_time)*1e-3, static_cast<double>(worst_time)*1e-3, fault_count, down_count); #if defined(_TCPIP_ON_) rt_printf("DesiredTask(tcp)=%0.2fus, Calculation(tcp)= %0.2fus, WorstCalculation(tcp)= %0.2fus, RTFault(tcp)=%d\n", double_dt_tcp, static_cast<double>(calculation_time_tcp)*1e-3, static_cast<double>(worst_time_tcp)*1e-3, fault_count_tcp); #endif rt_printf("\nIndex1:0x%02X, Index2:0x%02X, SubIndex:0x%02X", ControlIndex1, ControlIndex2, ControlSubIndex); for(int j=0; j<DUAL_ARM_DOF; ++j) { rt_printf("\t \nID: %d,", j+1); #if defined(_DEBUG_) //rt_printf(" StatWord: 0x%04X, ", StatusWord[j]); //rt_printf(" DeviceState: %d, ", DeviceState[j]); rt_printf(" ModeOfOp: %d,", ModeOfOperationDisplay[j]); //rt_printf("\n"); #endif rt_printf("\tActPos(Deg): %0.2lf,", ActualPos_Rad(j)*RADtoDEG); rt_printf("\tTarPos(Deg): %0.2lf,", TargetPos_Rad(j)*RADtoDEG); //rt_printf("\tActPos(inc): %d,", ActualPos[j]); //rt_printf("\n"); rt_printf("\tActVel(Deg/s): %0.1lf,", ActualVel_Rad(j)*RADtoDEG); rt_printf("\tTarVel(Deg/s): %0.1lf,", TargetVel_Rad(j)*RADtoDEG); //rt_printf("\tActVel(inc/s): %d,", ActualVel[j]); //rt_printf("\n"); rt_printf("\tActTor(%): %d,", ActualTor[j]); rt_printf("\tCtrlTor(Nm): %0.1lf", TargetToq(j)); //rt_printf("\tTarTor(%): %d", TargetTor[j]); //rt_printf("\n"); } rt_printf("\n"); rt_printf("\nForward Kinematics -->"); for(int cNum = 0; cNum < NumChain; cNum++) { rt_printf("\n Num:%d: x:%0.3lf, y:%0.3lf, z:%0.3lf, u:%0.3lf, v:%0.3lf, w:%0.3lf",cNum, ForwardPos[cNum](0), ForwardPos[cNum](1), ForwardPos[cNum](2), ForwardOri[cNum](0)*RADtoDEG, ForwardOri[cNum](1)*RADtoDEG, ForwardOri[cNum](2)*RADtoDEG); rt_printf("\n Num:%d: dx:%0.3lf, dy:%0.3lf, dz:%0.3lf, du:%0.3lf, dv:%0.3lf, dw:%0.3lf",cNum, TargetPos_Task(6*cNum+3), TargetPos_Task(6*cNum+4), TargetPos_Task(6*cNum+5), TargetPos_Task(6*cNum)*RADtoDEG, TargetPos_Task(6*cNum+1)*RADtoDEG, TargetPos_Task(6*cNum+2)*RADtoDEG); rt_printf("\n Num:%d: e_x:%0.3lf, e_y:%0.3lf, e_z:%0.3lf, e_u:%0.3lf, e_v:%0.3lf, e_w:%0.3lf\n",cNum, ErrorPos_Task(6*cNum+3), ErrorPos_Task(6*cNum+4), ErrorPos_Task(6*cNum+5), ErrorPos_Task(6*cNum)*RADtoDEG, ErrorPos_Task(6*cNum+1)*RADtoDEG, ErrorPos_Task(6*cNum+2)*RADtoDEG); //rt_printf("\n Manipulability: Task:%0.2lf, Orient:%0.2lf", TaskCondNumber[cNum], OrientCondNumber[cNum]); } rt_printf("\n\n"); } else { if ( ++count >= roundl(static_cast<double>(NSEC_PER_SEC)/static_cast<double>(PrintPeriod)) ) { ++stick; count=0; } if ( count==0 ) { rt_printf("\nReady Time: %i sec", stick); rt_printf("\nMaster State: %s, AL state: 0x%02X, ConnectedSlaves : %d", ecatmaster.GetEcatMasterLinkState().c_str(), ecatmaster.GetEcatMasterState(), ecatmaster.GetConnectedSlaves()); for(int i=0; i<((int)ecatmaster.GetConnectedSlaves()-1); i++) { rt_printf("\nID: %d , SlaveState: 0x%02X, SlaveConnection: %s, SlaveNMT: %s ", i, ecatmaster.GetSlaveState(i), ecatmaster.GetSlaveConnected(i).c_str(), ecatmaster.GetSlaveNMT(i).c_str()); rt_printf(" SlaveStatus : %s,", DeviceState[i].c_str()); rt_printf(" StatWord: 0x%04X, ", StatusWord[i]); } rt_printf("\n"); } } } } int kbhit() { struct termios oldt, newt; int ch; int oldf; tcgetattr(STDIN_FILENO, &oldt); newt = oldt; newt.c_lflag &= ~(ICANON | ECHO); tcsetattr(STDIN_FILENO, TCSANOW, &newt); oldf = fcntl(STDIN_FILENO, F_GETFL, 0); fcntl(STDIN_FILENO, F_SETFL, oldf | O_NONBLOCK); ch = getchar(); tcsetattr(STDIN_FILENO, TCSANOW, &oldt); fcntl(STDIN_FILENO, F_SETFL, oldf); if(ch != EOF) { ungetc(ch, stdin); return 1; } return 0; } void event_run(void *arg) { int key_event=0; rt_task_set_periodic(nullptr, TM_NOW, 1000e3); //us while(true) { rt_task_wait_period(nullptr); if(break_flag) break; if(system_ready) { if(kbhit()) { key_event = getchar(); rt_printf("\nReceived Data %c\n", key_event); switch(key_event) { case 't': case 'k': break; default: break; } } } } } /****************************************************************************/ void signal_handler(int signum) { rt_printf("\nSignal Interrupt: %d", signum); break_flag=true; #if defined(_KEYBOARD_ON_) rt_printf("\nEvent RTTask Closing...."); rt_task_delete(&event_task); rt_printf("\nEvent RTTask Closing Success...."); #endif #if defined(_TCPIP_ON_) rt_printf("\nTCPIP RTTask Closing...."); rt_task_delete(&tcpip_task); rt_printf("\nTCPIP RTTask Closing Success...."); #endif #if defined(_PRINT_ON_) rt_printf("\nConsolPrint RTTask Closing...."); rt_task_delete(&print_task); rt_printf("\nConsolPrint RTTask Closing Success...."); #endif rt_printf("\nControl RTTask Closing...."); rt_task_delete(&RTArm_task); rt_printf("\nControl RTTask Closing Success...."); rt_printf("\n\n\t !!RT-DualArm Client System Stopped!! \n"); ecatmaster.deactivate(); } /****************************************************************************/ int main(int argc, char **argv) { signal(SIGHUP, signal_handler); signal(SIGINT, signal_handler); signal(SIGQUIT, signal_handler); signal(SIGIOT, signal_handler); signal(SIGFPE, signal_handler); signal(SIGKILL, signal_handler); signal(SIGSEGV, signal_handler); signal(SIGTERM, signal_handler); /* Avoids memory swapping for this program */ mlockall( MCL_CURRENT | MCL_FUTURE ); // Perform auto-init of rt_print buffers if the task doesn't do so rt_print_auto_init(1); // TO DO: Specify the cycle period (cycle_ns) here, or use default value //cycle_ns = 500e3; // nanosecond -> 2kHz cycle_ns = 1000e3; // nanosecond -> 1kHz //cycle_ns = 1250e3; // nanosecond -> 800Hz //cycle_ns = 2000e3; // nanosecond -> 500Hz #if defined(_ECAT_ON_) for(int SlaveNum=0; SlaveNum < ELMO_TOTAL; SlaveNum++) { ecatmaster.addSlave(0, SlaveNum, &ecat_elmo[SlaveNum]); } ecatmaster.activateWithDC(0, cycle_ns); #endif // RTArm_task: create and start rt_printf("\n-- Now running rt tasks ...\n"); rt_queue_create(&msg_tcpip, "tcp_queue", sizeof(TCP_Packet_Task)*5, 40, Q_FIFO|Q_SHARED); #if defined(_TCPIP_ON_) rt_task_create(&tcpip_task, "TCPIP_proc", 0, 30, 0); rt_task_start(&tcpip_task, &tcpip_run, nullptr); #endif #if defined(_PRINT_ON_) rt_task_create(&print_task, "Console_proc", 0, 20, 0); rt_task_start(&print_task, &print_run, nullptr); #endif rt_task_create(&RTArm_task, "Control_proc", 1024*1024*4, 99, 0); // MUST SET at least 4MB stack-size (MAXIMUM Stack-size ; 8192 kbytes) rt_task_start(&RTArm_task, &RTRArm_run, nullptr); #if defined(_KEYBOARD_ON_) rt_task_create(&event_task, "Event_proc", 0, 80, 0); rt_task_start(&event_task, &event_run, nullptr); #endif // Must pause here pause(); // task delete check ecatmaster.deactivate(); return 0; }
#!/bin/bash BINDIR=`pwd`/build/demo SRCDIR=`pwd`/demo TMPDIR=/tmp/opsica KEYOPT=-g while getopts s OPT; do case $OPT in "s" ) KEYOPT="" ;; esac done CONFIG=${SRCDIR}/querier/config.txt INFILE_A=${SRCDIR}/dataownerA/datasetA2_1.txt INFILE_B=${SRCDIR}/dataownerB/datasetB2_1.txt OUTDIR_A=${TMPDIR}/A OUTDIR_B=${TMPDIR}/B if [ ! -e ${OUTDIR_A} ]; then mkdir -p ${OUTDIR_A}; fi if [ ! -e ${OUTDIR_B} ]; then mkdir -p ${OUTDIR_B}; fi xterm -T "cloud" -e "/bin/bash -c 'cd ${BINDIR}/cloud && ./cloud; exec /bin/bash -i'"& xterm -T "dataownerA" -e "/bin/bash -c 'cd ${BINDIR}/dataownerA && ./dataownerA -i ${INFILE_A} -o ${OUTDIR_A}; exec /bin/bash -i'"& xterm -T "dataownerB" -e "/bin/bash -c 'cd ${BINDIR}/dataownerB && ./dataownerB -i ${INFILE_B} -o ${OUTDIR_B}; exec /bin/bash -i'"& xterm -T "querier" -e "/bin/bash -c 'cd ${BINDIR}/querier && ./querier ${KEYOPT} -c ${CONFIG}; exec /bin/bash -i'"&
#ifndef PORT2_H #define PORT2_H // Module initialisation extern void Port2_init(); // Attempt to interrupt a Port2 peripheral and wait for an interrupt in return. // Returns the round trip response time of the peripheral (including the libnds // interrupt dispatch latency) in bus clock ticks, if there is a response. // Returns -1 if there is no response. extern int Port2_echo(); #endif
class Graph: def __init__(self): self.edges = {} def add_edge(self, relationship_type, parent_key, child_key): if parent_key not in self.edges: self.edges[parent_key] = [] self.edges[parent_key].append((relationship_type, child_key)) def add_edges_to_graph(graph, relationships): for relationship in relationships: relationship_type, parent_key, child_key = relationship graph.add_edge(relationship_type, parent_key, child_key)
/** * 解决图片跨域问题 * @export * @param {string} api * @return {*} */ export function imageProxy(api: string) { return function (url: string) { if (url != '') { if (/^data:image\/\w+;base64,/.test(url)) { return url } else { return api + encodeURIComponent(url); } } else { return ''; } } }
class PostReport < Struct.new(:word_count, :word_histogram) def self.generate(post) PostReport.new( # Word_count post.content.split.map { |word| word.gsub(/\W/, '') }.count, # Word_histogram calc_histogram(post) ) end private def self.calc_histogram(post) (post .content .split .map { |word| word.gsub(/\W/, '') } .map(&:downcase) .group_by { |word| word } .transform_values(&:size)) end end
module QuickSearch # QuickSearch seacher for WorldCat Knowledge Base class WorldCatKnowledgeBaseSearcher < QuickSearch::Searcher def search raw_response = @http.get(base_url, parameters, headers) @response = JSON.parse(raw_response.body) end def results # rubocop:disable Metrics/MethodLength if results_list results_list else @results_list = [] @response['entries'].each do |value| result = OpenStruct.new result.title = value['title'] result.link = value['links'][2]['href'] result.author = value['kb:publisher'] result.date = published(value) @results_list << result end @results_list end end def base_url QuickSearch::Engine::WORLD_CAT_KNOWLEDGE_BASE_CONFIG['base_url'] end def parameters { 'q' => http_request_queries['not_escaped'], 'itemsPerPage' => '3', 'institution_id' => QuickSearch::Engine::WORLD_CAT_KNOWLEDGE_BASE_CONFIG['institution_id'], 'wskey' => QuickSearch::Engine::WORLD_CAT_KNOWLEDGE_BASE_CONFIG['wskey'] } end def headers { 'Accept' => 'application/json' } end def total @response['os:totalResults'] end def loaded_link QuickSearch::Engine::WORLD_CAT_KNOWLEDGE_BASE_CONFIG['loaded_link'] + http_request_queries['uri_escaped'] end def published(value) datetime = value['published'] d = Time.zone.parse(datetime) d.strftime('%Y') end end end
#!/usr/bin/env zsh #bsub < hpcSeq96cells.sh ./startHpcCom96cells.sh ./startHpcCom96cellsInterval.sh ./startHpcPar96cells.sh ./startHpcPar96cellsInterval.sh
#pragma once #include <array> #include "queue.h" #define LOG_BLOCK_SIZE 1024 #define LOG_BUFFER_SIZE 1024 using block = std::array<uint8_t, LOG_BLOCK_SIZE>; using queue = spsc_queue<block, LOG_BUFFER_SIZE>;
#!/bin/bash #check to see if online, if not online we cant do anything if sudo ping -q -c 1 -W 1 google.com >/dev/null; then #online, continue echo -e "Online..." else echo -e "No internet connection, please follow the reconnect instructions before continuning..." exit 1 fi TISENSE="aws-iot-tisense-js" DEFAULT_SENSE_VERSION="1.0.0" #ask for arrow dragonboard version to install echo -e "TiSense Version to install ($DEFAULT_SENSE_VERSION is the default):" read pVersion if [ "$pVersion" != "" ] ; then DEFAULT_SENSE_VERSION=$pVersion fi echo "***Configuring system with additional dependencies..." #we need a higher version of nodejs sudo apt-get remove node nodejs npm sudo apt-get update sudo apt-get upgrade #remove some stuff to free up room sudo apt-get remove oxygen-icon-theme opencv-data chromium-browser oxygen-icon-theme openjdk-7-jre-headless ti-llvm-3.3 #clean up sudo apt-get autoremove sudo apt-get install locales resolvconf bluetooth bluez curl unzip python virtualenv oracle-java8-installer #clean up sudo apt-get autoremove #check to see if online, if not online we cant do anything if sudo ping -q -c 1 -W 1 google.com >/dev/null; then #online, continue echo -e "Online..." else echo -e "No internet connection, please follow the reconnect instructions before continuning..." exit 1 fi cd $BASE_DEVICE_DIR #see if node exists. #TODO (:gtam) #LTS #https://nodejs.org/dist/latest-v4.x/ #Stable #https://nodejs.org/dist/latest/ #we will install the LTS version to keep it simple NODE_LIB_ARCHIVE_FILE=`wget -q https://nodejs.org/dist/latest-v4.x/ -O - | grep $DEVICE_ARCH.tar.xz | awk -F\" '{print $2}'` NODE_VERSION=`echo $NODE_LIB_ARCHIVE_FILE | sed 's/^.*[^0-9]\([0-9]*\.[0-9]*\.[0-9]*\).*$/\1/'` echo "***Installing NodeJS $NODE_VERSION..." if [ ! -d tmp ] ; then mkdir tmp fi cd tmp wget https://nodejs.org/dist/latest-v4.x/$NODE_LIB_ARCHIVE_FILE tar -xf $NODE_LIB_ARCHIVE_FILE if [ ! -d $BASE_DEVICE_DIR/share ] ; then mkdir -p $BASE_DEVICE_DIR/share fi mv node-v$NODE_VERSION-linux-$DEVICE_ARCH $BASE_DEVICE_DIR/share #dont even check - just remove sudo rm -Rf /usr/local/bin/node sudo rm -Rf /usr/local/bin/npm sudo rm -Rf /usr/local/bin/grunt #create the symlinks sudo ln -s $BASE_DEVICE_DIR/share/node-v$NODE_VERSION-linux-$DEVICE_ARCH/bin/node /usr/local/bin/node sudo ln -s $BASE_DEVICE_DIR/share/node-v$NODE_VERSION-linux-$DEVICE_ARCH/lib/node_modules/npm/bin/npm-cli.js /usr/local/bin/npm #fix the permissions sudo chown -R $DEVICE_USER:$DEVICE_USER $BASE_DEVICE_DIR/share sudo chmod 777 /usr/local/bin/node sudo chmod 777 /usr/local/bin/npm cd $BASE_DEVICE_DIR rm -Rf tmp #link it if it doesnt exist - especially if there is no bash profile npm install -g grunt-cli sudo ln -s $BASE_DEVICE_DIR/share/node-v$NODE_VERSION-linux-$DEVICE_ARCH/lib/node_modules/grunt-cli/bin/grunt /usr/local/bin/grunt sudo chmod 777 /usr/local/bin/grunt echo "***Installing Arrow TiSense Example version $DEFAULT_SENSE_VERSION (tisense)" # navigate to the base directory cd $BASE_DEVICE_DIR #------------------- # install/update tisense demo #------------------- if [ ! -d "$ARROW_DIR/$TISENSE" ]; then mkdir -p $ARROW_DIR cd $ARROW_DIR git clone https://github.com/ArrowElectronics/$TISENSE.git $TISENSE fi cd $BASE_DEVICE_DIR/$ARROW_DIR/$TISENSE git pull git checkout tags/$DEFAULT_SENSE_VERSION #reset the path cd $BASE_DEVICE_DIR
export function uniqueId(prefix, correlativo) { var id = ++correlativo + ''; return prefix ? prefix + id : id; };
/** * Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.model.option.pricing.fourier; /** * A Characteristic Exponent with value at -i equal to 0. That is E[exp(x)] = 1, where E[] is the expectation and x is the random variable. */ public interface MartingaleCharacteristicExponent extends CharacteristicExponent { }
<reponame>pjunior/titanium testSuite("Titanium.App API tests", "dummy.html", { run: function() { test("Titanium.App exists", function() { assertNotNull(Titanium); assertNotNull(Titanium.App); }); test("Titanium.App top-level API", function() { assertNotNull(Titanium.App); assertNotNull(Titanium.App.getID); assertNotNull(Titanium.App.getName); assertNotNull(Titanium.App.getVersion); assertNotNull(Titanium.App.getUpdateURL); assertNotNull(Titanium.App.getGUID); assertNotNull(Titanium.App.appURLToPath); assertNotNull(Titanium.App.arguments); assertNotNull(Titanium.App.exit); assertNotNull(Titanium.App.loadProperties); assertNotNull(Titanium.version); assertNotNull(Titanium.platform); }); test("Titanium.App top-level data", function() { assertEquals(Titanium.App.getID(), "com.titaniumapp.apivalidator"); assertEquals(Titanium.App.getName(), "apivalidator"); assertEquals(Titanium.App.getVersion(), "0.1"); assertEquals(Titanium.App.getUpdateURL(), "http://updatesite.titaniumapp.com"); assert(Titanium.App.arguments.length >= 1); assert(Titanium.platform in {'win32':1, 'osx':1, 'linux':1}); }); } });
<reponame>mariosky/protoboard<gh_stars>1-10 #!/usr/bin/env python from FIS import * # Variables service = LinguisticVariable('service') service.addMF('poor',MF.Gaussian(1.5,0.0)) service.addMF('good',MF.Gaussian(1.5,5.0)) service.addMF('excelent',MF.Gaussian(1.5,10)) food = LinguisticVariable('food') food.addMF('rancid',MF.Trapezoidal(0.0, 0.0, 1.0, 3.0)) food.addMF('delicious',MF.Trapezoidal(7.0, 9.0, 10.0, 10.0)) tip = LinguisticVariable('tip', type = 'out', range = (0,30)) tip.addMF('cheap',MF.Triangular(0.0,5.0,10.0)) tip.addMF('average',MF.Triangular(10.0,15.0,20.0)) tip.addMF('generous',MF.Triangular(20.0,25.0,30.0)) # Rules r1 = FuzzyRule() r1.antecedent.append(FuzzyOperator('or',FuzzyProposition(service,service.mfs['poor']),FuzzyProposition(food,food.mfs['rancid']))) r1.consequent.append(FuzzyProposition(tip,tip.mfs['cheap'])) r2 = FuzzyRule() r2.antecedent.append(FuzzyProposition(service,service.mfs['good'])) r2.consequent.append(FuzzyProposition(tip,tip.mfs['average'])) r3 = FuzzyRule() r3.antecedent.append(FuzzyOperator('or',FuzzyProposition(service,service.mfs['excelent']),FuzzyProposition(food,food.mfs['delicious']))) r3.consequent.append(FuzzyProposition(tip,tip.mfs['generous'])) reglas = [r1,r2,r3] fis = FIS(reglas) def eval(s, f): service.current_value = s food.current_value = f return fis.eval() if __name__ == '__main__': eval(5,5)
<reponame>scenarioo/scenarioo-js<filename>src/docuWriter/docuWriter.js<gh_stars>1-10 import path from 'path'; import fs from 'fs'; import del from 'del'; import isUndefined from 'lodash/isUndefined'; import isArray from 'lodash/isArray'; import isString from 'lodash/isString'; import omit from 'lodash/omit'; import pick from 'lodash/pick'; import merge from 'lodash/merge'; import Q from 'q'; import mkdirp from 'mkdirp'; import {leadingZeros, encodeFileName} from './utils'; import store from '../scenariooStore'; import identifierSanitizer from './identifierSanitizer'; import entityValidator from './entityValidator'; import xmlWriter from './xmlWriter'; import pageNameExtractor from './pageNameExtractor'; let buildOutputDir; /** * @namespace docuWriter */ const docuWriter = { cleanBuild, registerPageNameFunction, start, saveUseCase, saveBuild, saveScenario, saveStep }; export default docuWriter; /** * Use this to register your custom pageName function. * Scenarioo will pass in a node.js url object. * * @func docuWriter#registerPageNameFunction */ export function registerPageNameFunction(pageNameFunction) { pageNameExtractor.registerCustomExtractionFunction(pageNameFunction); } /** * Initializes the writer and also saves the branch.xml file. * Is invoked by the reporter at the beginning of the test run * * @func docuWriter#start * @param {object} branch * @param {string} buildname * @param {string} scenariooTargetDirectory * @param {object} options * @returns {Promise} */ export function start(branch, buildname, scenariooTargetDirectory, options) { entityValidator.validateBranch(branch); this.branch = branch; this.branch.name = identifierSanitizer.sanitize(branch.name); const buildDirName = encodeFileName(identifierSanitizer.sanitize(buildname)); // generate directories and write branch.xml buildOutputDir = path.join(scenariooTargetDirectory, encodeFileName(this.branch.name), buildDirName); return cleanBuildOnStartIfEnabled(buildOutputDir, options) .then(() => xmlWriter.writeXmlFile('branch', this.branch, path.resolve(path.join(scenariooTargetDirectory, encodeFileName(this.branch.name)), 'branch.xml'))); } export function cleanBuild(options) { var scenariooTargetDirectory = path.resolve(options.targetDirectory); var buildOutputDir = path.join(scenariooTargetDirectory, encodeFileName(identifierSanitizer.sanitize(options.branchName)), encodeFileName(identifierSanitizer.sanitize(options.buildName))); if (!options.disableScenariooLogOutput) { console.log('Cleaning build output directory for scenarioo documentation of this build: ' + buildOutputDir); } del.sync(buildOutputDir); } /** * cleans specified build directory if required by options * * @param buildOutputDir * @param options * @returns {Promise} */ function cleanBuildOnStartIfEnabled(buildOutputDir, options) { if (options && options.cleanBuildOnStart) { if (!options.disableScenariooLogOutput) { console.log('Cleaning build output directory for scenarioo documentation of this build: ' + buildOutputDir); } return del(buildOutputDir); } else { return Q.when(true); } } /** * invoked by the jasmine reporter at the end of the test run * @func docuWriter#saveBuild * @param {object} build * @returns {Promise} */ export function saveBuild(build) { entityValidator.validateBuild(build); build.name = identifierSanitizer.sanitize(build.name); return xmlWriter.writeXmlFile('build', build, path.join(buildOutputDir, 'build.xml')); } /** * Saves the given useCase to the scenarioo file structure. * Invoked by the reporter at the end of each use case. * * @func docuWriter#saveUseCase * @param {object} useCase * @returns {Promise} */ export function saveUseCase(useCase) { if (isUndefined(buildOutputDir)) { throw 'Cannot save use case. No outputDirectory specified. docuWriter.start(branch, build, targetDir) not invoked?'; } // pick known attributes from given useCase object (user might choose to store other state on the usecase) const useCaseToSave = pick(useCase, ['name', 'description', 'status', 'labels']); entityValidator.validateUseCase(useCaseToSave); const absUseCasePath = path.resolve(buildOutputDir, encodeFileName(useCaseToSave.name)); useCase.name = identifierSanitizer.sanitize(useCaseToSave.name); return xmlWriter.writeXmlFile('useCase', useCaseToSave, path.join(absUseCasePath, 'usecase.xml')); } /** * Saves the given scenario according to the scenarioo file structure. * Invoked by the reporter at the end of each scenario. * * @func docuWriter#saveScenario * @param {object} currentScenario * @param {string} useCaseName * @returns {Promise} */ export function saveScenario(currentScenario, useCaseName) { if (isUndefined(buildOutputDir)) { throw 'Cannot save use scenario. No outputDirectory specified. docuWriter.start(branch, build, targetDir) not invoked?'; } // pick known attributes from given scenario object (user might choose to store other state on the scenario) const scenarioToSave = pick(currentScenario, ['name', 'description', 'status', 'labels']); entityValidator.validateScenario(scenarioToSave); const absScenarioPath = path.resolve( buildOutputDir, encodeFileName(useCaseName), encodeFileName(scenarioToSave.name) ); scenarioToSave.name = identifierSanitizer.sanitize(scenarioToSave.name); return xmlWriter.writeXmlFile('scenario', scenarioToSave, path.join(absScenarioPath, 'scenario.xml')); } /** * Saves a step (xml plus screenshot) * * This method can be used in protractor tests directly to define a step explicitly and will be invoked asynchronous in the event queue. * To be invoked in your e2e tests or in your page objects or somehow hooked into protractors click and other important interaction functions. * * @func docuWriter#saveStep * @param {string} [stepTitle] A text to display as caption for this step * @param {object} [additionalProperties] * @param {string[]} [additionalProperties.state] * @param {string[]} [additionalProperties.labels] * @param {object[]} [additionalProperties.screenAnnotations] * @returns {Promise} The returned promise will resolve to an object containing the saved step object, the path to the step xml file as well as the path to the screenshot file */ export function saveStep(stepTitle, additionalProperties) { if (!isString(stepTitle)) { additionalProperties = stepTitle; stepTitle = ''; } // Because this is invoked by the e2e test, // we have to access the store directly from here. if (isUndefined(buildOutputDir)) { // if you disable scenario documentation generation (e.g. via environment variable in your protractor config) // this will still be invoked, since "saveStep(..)" is called from within your tests. // in this case, just do nothing. return Q.when(true); } const currentScenario = { useCaseName: store.getCurrentUseCase().name, scenarioName: store.getCurrentScenario().name, stepCounter: store.incrementStepCounter() }; const absScenarioPath = path.resolve( buildOutputDir, encodeFileName(currentScenario.useCaseName), encodeFileName(currentScenario.scenarioName) ); const screenshotPromise = saveScreenshot(currentScenario.stepCounter, absScenarioPath); const stepXmlPromise = writeStepXml(stepTitle, currentScenario, absScenarioPath, additionalProperties); return Q.all([stepXmlPromise, screenshotPromise]).then(results => { return { step: results[0].step, xmlPath: results[0].file, screenshotPath: results[1] }; }); } /** * Fetches the url and the htmlSource from the current page * * @ignore * @returns {Promise} */ function getStepDataFromWebpage() { var currentUrlPromise = browser.getCurrentUrl(); var htmlSourcePromise = browser.getPageSource(); var visibleTextPromise = browser.findElement(by.css('body')).getText(); return currentUrlPromise .then(currentUrl => { return htmlSourcePromise .then(htmlSource => { return visibleTextPromise .then(visibleText => { return { url: currentUrl, source: htmlSource, visibleText: visibleText }; }); }); }); } function getPageNameFromUrl(urlString) { return identifierSanitizer.sanitize(pageNameExtractor.getPageNameFromUrl(urlString)); } /** * writes step xml file (000.xml, 001.xml, etc.) */ function writeStepXml(stepTitle, currentScenario, absScenarioPath, additionalProperties) { return getStepDataFromWebpage() .then(browserData => { const currentStepCounter = leadingZeros(currentScenario.stepCounter); const pageName = getPageNameFromUrl(browserData.url); const stepData = { page: { name: pageName }, stepDescription: { index: currentScenario.stepCounter, title: stepTitle, screenshotFileName: `${currentStepCounter}.png` }, html: { htmlSource: browserData.source }, metadata: { visibleText: browserData.visibleText } }; // now let's add additional properties that were passed in by the developer if (additionalProperties && additionalProperties.labels) { stepData.stepDescription.labels = additionalProperties.labels; } if (additionalProperties && additionalProperties.screenAnnotations && isArray(additionalProperties.screenAnnotations)) { stepData.screenAnnotations = additionalProperties.screenAnnotations.map(annotation => { return merge({}, omit(annotation, ['x', 'y', 'width', 'height']), { region: pick(annotation, ['x', 'y', 'width', 'height']) }); }); } if (additionalProperties && additionalProperties.status) { stepData.stepDescription.status = additionalProperties.status; } const xmlFileName = path.join(absScenarioPath, 'steps', currentStepCounter + '.xml'); return xmlWriter.writeXmlFile('step', stepData, xmlFileName) .then(() => { return { file: xmlFileName, step: stepData }; }); }); } function saveScreenshot(stepCounter, absScenarioPath) { const screenShotDir = path.resolve(absScenarioPath, 'screenshots'); const screenShotFileName = path.resolve(screenShotDir, leadingZeros(stepCounter) + '.png'); return browser.takeScreenshot() .then(data => ( // recursively create the directory for our new screenshot Q.nfcall(mkdirp, screenShotDir) .then(() => ( // then save screenshot file Q.nfcall(fs.writeFile, screenShotFileName, data, 'base64') .then(() => screenShotFileName) )) )); }
import pact from "./pact"; module.exports = exports = pact; export default pact; export * from "./verifier"; export * from "./server"; export * from "./publisher"; export * from "./broker"; export * from "./stub";
#!/usr/bin/env bash root_path=/Users/titan/repository/lol mkdir -p ${root_path}/logs sudo nginx -t -p ${root_path} -c ${root_path}/config/nginx.conf sudo nginx -s quit -p ${root_path} -c ${root_path}/config/nginx.conf echo "nginx stop" echo -e "===========================================\n\n" tail -f ${root_path}/logs/error.log
/********************************************************************************************************** NAME: index_definition.sql SYNOPSIS: Displays the definition of indexes; useful to audit indexes across servers & environments DEPENDENCIES: The following dependencies are required to execute this script: - SQL Server 2005 or newer AUTHOR: <NAME>, http://sqlfool.com CREATED: 2012-10-15 VERSION: 1.0 LICENSE: Apache License v2 ---------------------------------------------------------------------------- DISCLAIMER: This code and information are provided "AS IS" without warranty of any kind, either expressed or implied, including but not limited to the implied warranties or merchantability and/or fitness for a particular purpose. ---------------------------------------------------------------------------- --------------------------------------------------------------------------------------------------------- -- DATE VERSION AUTHOR DESCRIPTION -- --------------------------------------------------------------------------------------------------------- 20150619 1.0 Michelle Ufford Open Sourced on GitHub **********************************************************************************************************/ -- Single database WITH indexCTE AS ( SELECT st.object_id AS objectID , st.name AS tableName , si.index_id AS indexID , si.name AS indexName , si.type_desc AS indexType , sc.column_id AS columnID , sc.name + CASE WHEN sic.is_descending_key = 1 THEN ' DESC' ELSE '' END AS columnName , sic.key_ordinal AS ordinalPosition , CASE WHEN sic.is_included_column = 0 AND key_ordinal > 0 THEN sc.name ELSE NULL END AS indexKeys , CASE WHEN sic.is_included_column = 1 THEN sc.name ELSE NULL END AS includedColumns , sic.partition_ordinal AS partitionOrdinal , CASE WHEN sic.partition_ordinal > 0 THEN sc.name ELSE NULL END AS partitionColumns , si.is_primary_key AS isPrimaryKey , si.is_unique AS isUnique , si.is_unique_constraint AS isUniqueConstraint , si.has_filter AS isFilteredIndex , COALESCE(si.filter_definition, '') AS filterDefinition FROM sys.tables AS st INNER JOIN sys.indexes AS si ON si.object_id = st.object_id INNER JOIN sys.index_columns AS sic ON sic.object_id=si.object_id AND sic.index_id=si.index_id INNER JOIN sys.columns AS sc ON sc.object_id = sic.object_id and sc.column_id = sic.column_id ) SELECT DISTINCT @@SERVERNAME AS ServerName , DB_NAME() AS DatabaseName , tableName , indexName , indexType , STUFF(( SELECT ', ' + indexKeys FROM indexCTE WHERE objectID = cte.objectID AND indexID = cte.indexID AND indexKeys IS NOT NULL ORDER BY ordinalPosition FOR XML PATH(''), TYPE).value('.','varchar(max)'),1,1,'') AS indexKeys , COALESCE(STUFF(( SELECT ', ' + includedColumns FROM indexCTE WHERE objectID = cte.objectID AND indexID = cte.indexID AND includedColumns IS NOT NULL ORDER BY columnID FOR XML PATH(''), TYPE).value('.','varchar(max)'),1,1,''), '') AS includedColumns , COALESCE(STUFF(( SELECT ', ' + partitionColumns FROM indexCTE WHERE objectID = cte.objectID AND indexID = cte.indexID AND partitionColumns IS NOT NULL ORDER BY partitionOrdinal FOR XML PATH(''), TYPE).value('.','varchar(max)'),1,1,''), '') AS partitionKeys , isPrimaryKey , isUnique , isUniqueConstraint , isFilteredIndex , FilterDefinition FROM indexCTE AS cte WHERE tableName = 'your_example' ORDER BY tableName , indexName; /*********************************************************************************************************/ -- All databases IF OBJECT_ID('tempdb..#IndexAudit') IS NOT NULL DROP TABLE #IndexAudit; CREATE TABLE #IndexAudit ( serverName SYSNAME , databaseName SYSNAME , tableName VARCHAR(128) , indexName VARCHAR(128) , indexType NVARCHAR(60) , indexKeys VARCHAR(8000) , includedColumns VARCHAR(8000) , partitionColumns VARCHAR(8000) , isPrimaryKey BIT , isUnique BIT , isUniqueConstraint BIT , isFilteredIndex BIT , FilterDefinition VARCHAR(8000) ); EXECUTE sp_foreachdb 'USE ?; WITH indexCTE AS ( SELECT st.object_id AS objectID , st.name AS tableName , si.index_id AS indexID , si.type_desc AS indexType , si.name AS indexName , sc.column_id AS columnID , sc.name + CASE WHEN sic.is_descending_key = 1 THEN '' DESC'' ELSE '''' END AS columnName , sic.key_ordinal AS ordinalPosition , CASE WHEN sic.is_included_column = 0 AND key_ordinal > 0 THEN sc.name ELSE NULL END AS indexKeys , CASE WHEN sic.is_included_column = 1 THEN sc.name ELSE NULL END AS includedColumns , sic.partition_ordinal AS partitionOrdinal , CASE WHEN sic.partition_ordinal > 0 THEN sc.name ELSE NULL END AS partitionColumns , si.is_primary_key AS isPrimaryKey , si.is_unique AS isUnique , si.is_unique_constraint AS isUniqueConstraint , si.has_filter AS isFilteredIndex , COALESCE(si.filter_definition, '''') AS filterDefinition FROM sys.tables AS st INNER JOIN sys.indexes AS si ON si.object_id = st.object_id INNER JOIN sys.index_columns AS sic ON sic.object_id=si.object_id AND sic.index_id=si.index_id INNER JOIN sys.columns AS sc ON sc.object_id = sic.object_id and sc.column_id = sic.column_id ) INSERT INTO #IndexAudit SELECT DISTINCT @@SERVERNAME AS ServerName , DB_NAME() AS DatabaseName , tableName , indexName , indexType , STUFF(( SELECT '', '' + indexKeys FROM indexCTE WHERE objectID = cte.objectID AND indexID = cte.indexID AND indexKeys IS NOT NULL ORDER BY ordinalPosition FOR XML PATH(''''), TYPE).value(''.'',''varchar(max)''),1,1,'''') AS indexKeys , COALESCE(STUFF(( SELECT '', '' + includedColumns FROM indexCTE WHERE objectID = cte.objectID AND indexID = cte.indexID AND includedColumns IS NOT NULL ORDER BY columnID FOR XML PATH(''''), TYPE).value(''.'',''varchar(max)''),1,1,''''), '''') AS includedColumns , COALESCE(STUFF(( SELECT '', '' + partitionColumns FROM indexCTE WHERE objectID = cte.objectID AND indexID = cte.indexID AND partitionColumns IS NOT NULL ORDER BY partitionOrdinal FOR XML PATH(''''), TYPE).value(''.'',''varchar(max)''),1,1,''''), '''') AS partitionKeys , isPrimaryKey , isUnique , isUniqueConstraint , isFilteredIndex , FilterDefinition FROM indexCTE AS cte ORDER BY tableName , indexName; '; -- For multi-server testing, dump results to a temp table and compare tables SELECT * FROM #IndexAudit WHERE databaseName NOT IN ('tempdb', 'master', 'msdb', 'model') ORDER BY serverName , databaseName , tableName , indexName;
#!/usr/bin/env bash # Compile the thrift API for the Python service . "`dirname "$0"`/.compile-thrift-include" TARGET="../concourse-driver-python" PACKAGE=$TARGET cd $THRIFT_DIR # Run the thrift compile thrift -out $TARGET -gen py concourse.thrift if [ $? -ne 0 ]; then exit 1 fi echo "Finished compiling the Thrift API for Python to "$(cd $PACKAGE && pwd) exit 0
/******************************************************************************* * Copyright (C) 2016 Maxim Integrated Products, Inc., All Rights Reserved. * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included * in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL MAXIM INTEGRATED BE LIABLE FOR ANY CLAIM, DAMAGES * OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. * * Except as contained in this notice, the name of Maxim Integrated * Products, Inc. shall not be used except as stated in the Maxim Integrated * Products, Inc. Branding Policy. * * The mere transfer of this software does not imply any licenses * of trade secrets, proprietary technology, copyrights, patents, * trademarks, maskwork rights, or any other form of intellectual * property whatsoever. Maxim Integrated Products, Inc. retains all * ownership rights. ******************************************************************************* */ #ifndef _MXC_FLC_REGS_H_ #define _MXC_FLC_REGS_H_ #ifdef __cplusplus extern "C" { #endif #include <stdint.h> /* If types are not defined elsewhere (CMSIS) define them here */ #ifndef __IO #define __IO volatile #endif #ifndef __I #define __I volatile const #endif #ifndef __O #define __O volatile #endif #define MXC_V_FLC_ERASE_CODE_PAGE_ERASE ((uint8_t)0x55) #define MXC_V_FLC_ERASE_CODE_MASS_ERASE ((uint8_t)0xAA) #define MXC_V_FLC_FLSH_UNLOCK_KEY ((uint8_t)0x2) /* Typedefed structure(s) for module registers (per instance or section) with direct 32-bit access to each register in module. */ /* Offset Register Description ============= ============================================================================ */ typedef struct { __IO uint32_t faddr; /* 0x0000 Flash Operation Address */ __IO uint32_t fckdiv; /* 0x0004 Flash Clock Pulse Divisor */ __IO uint32_t ctrl; /* 0x0008 Flash Control Register */ __I uint32_t rsv00C[6]; /* 0x000C-0x0020 */ __IO uint32_t intr; /* 0x0024 Flash Controller Interrupt Flags and Enable/Disable 0 */ __I uint32_t rsv028[2]; /* 0x0028-0x002C */ __IO uint32_t fdata; /* 0x0030 Flash Operation Data Register */ __I uint32_t rsv034[7]; /* 0x0034-0x004C */ __IO uint32_t perform; /* 0x0050 Flash Performance Settings */ __I uint32_t rsv054[11]; /* 0x0054-0x007C */ __IO uint32_t status; /* 0x0080 Security Status Flags */ __I uint32_t rsv084; /* 0x0084 */ __IO uint32_t security; /* 0x0088 Flash Controller Security Settings */ __I uint32_t rsv08C[4]; /* 0x008C-0x0098 */ __IO uint32_t bypass; /* 0x009C Status Flags for DSB Operations */ __I uint32_t rsv0A0[24]; /* 0x00A0-0x00FC */ __IO uint32_t user_option; /* 0x0100 Used to set DSB Access code and Auto-Lock in info block */ __I uint32_t rsv104[15]; /* 0x0104-0x013C */ __IO uint32_t ctrl2; /* 0x0140 Flash Control Register 2 */ __IO uint32_t intfl1; /* 0x0144 Interrupt Flags Register 1 */ __IO uint32_t inten1; /* 0x0148 Interrupt Enable/Disable Register 1 */ __I uint32_t rsv14C[9]; /* 0x014C-0x016C */ __IO uint32_t bl_ctrl; /* 0x0170 Bootloader Control Register */ __IO uint32_t twk; /* 0x0174 FLC TWK Cycle Count */ __I uint32_t rsv178; /* 0x0178 */ __IO uint32_t slm; /* 0x017C Sleep Mode Register */ } mxc_flc_regs_t; /* Register offsets for module FLC. */ #define MXC_R_FLC_OFFS_FADDR ((uint32_t)0x00000000UL) #define MXC_R_FLC_OFFS_FCKDIV ((uint32_t)0x00000004UL) #define MXC_R_FLC_OFFS_CTRL ((uint32_t)0x00000008UL) #define MXC_R_FLC_OFFS_INTR ((uint32_t)0x00000024UL) #define MXC_R_FLC_OFFS_FDATA ((uint32_t)0x00000030UL) #define MXC_R_FLC_OFFS_PERFORM ((uint32_t)0x00000050UL) #define MXC_R_FLC_OFFS_STATUS ((uint32_t)0x00000080UL) #define MXC_R_FLC_OFFS_SECURITY ((uint32_t)0x00000088UL) #define MXC_R_FLC_OFFS_BYPASS ((uint32_t)0x0000009CUL) #define MXC_R_FLC_OFFS_USER_OPTION ((uint32_t)0x00000100UL) #define MXC_R_FLC_OFFS_CTRL2 ((uint32_t)0x00000140UL) #define MXC_R_FLC_OFFS_INTFL1 ((uint32_t)0x00000144UL) #define MXC_R_FLC_OFFS_INTEN1 ((uint32_t)0x00000148UL) #define MXC_R_FLC_OFFS_BL_CTRL ((uint32_t)0x00000170UL) #define MXC_R_FLC_OFFS_TWK ((uint32_t)0x00000174UL) #define MXC_R_FLC_OFFS_SLM ((uint32_t)0x0000017CUL) /* Field positions and masks for module FLC. */ #define MXC_F_FLC_FADDR_FADDR_POS 0 #define MXC_F_FLC_FADDR_FADDR ((uint32_t)(0x003FFFFFUL << MXC_F_FLC_FADDR_FADDR_POS)) #define MXC_F_FLC_FCKDIV_FCKDIV_POS 0 #define MXC_F_FLC_FCKDIV_FCKDIV ((uint32_t)(0x0000007FUL << MXC_F_FLC_FCKDIV_FCKDIV_POS)) #define MXC_F_FLC_CTRL_WRITE_POS 0 #define MXC_F_FLC_CTRL_WRITE ((uint32_t)(0x00000001UL << MXC_F_FLC_CTRL_WRITE_POS)) #define MXC_F_FLC_CTRL_MASS_ERASE_POS 1 #define MXC_F_FLC_CTRL_MASS_ERASE ((uint32_t)(0x00000001UL << MXC_F_FLC_CTRL_MASS_ERASE_POS)) #define MXC_F_FLC_CTRL_PAGE_ERASE_POS 2 #define MXC_F_FLC_CTRL_PAGE_ERASE ((uint32_t)(0x00000001UL << MXC_F_FLC_CTRL_PAGE_ERASE_POS)) #define MXC_F_FLC_CTRL_ERASE_CODE_POS 8 #define MXC_F_FLC_CTRL_ERASE_CODE ((uint32_t)(0x000000FFUL << MXC_F_FLC_CTRL_ERASE_CODE_POS)) #define MXC_F_FLC_CTRL_INFO_BLOCK_UNLOCK_POS 16 #define MXC_F_FLC_CTRL_INFO_BLOCK_UNLOCK ((uint32_t)(0x00000001UL << MXC_F_FLC_CTRL_INFO_BLOCK_UNLOCK_POS)) #define MXC_F_FLC_CTRL_WRITE_ENABLE_POS 17 #define MXC_F_FLC_CTRL_WRITE_ENABLE ((uint32_t)(0x00000001UL << MXC_F_FLC_CTRL_WRITE_ENABLE_POS)) #define MXC_F_FLC_CTRL_PENDING_POS 24 #define MXC_F_FLC_CTRL_PENDING ((uint32_t)(0x00000001UL << MXC_F_FLC_CTRL_PENDING_POS)) #define MXC_F_FLC_CTRL_INFO_BLOCK_VALID_POS 25 #define MXC_F_FLC_CTRL_INFO_BLOCK_VALID ((uint32_t)(0x00000001UL << MXC_F_FLC_CTRL_INFO_BLOCK_VALID_POS)) #define MXC_F_FLC_CTRL_AUTO_INCRE_MODE_POS 27 #define MXC_F_FLC_CTRL_AUTO_INCRE_MODE ((uint32_t)(0x00000001UL << MXC_F_FLC_CTRL_AUTO_INCRE_MODE_POS)) #define MXC_F_FLC_CTRL_FLSH_UNLOCK_POS 28 #define MXC_F_FLC_CTRL_FLSH_UNLOCK ((uint32_t)(0x0000000FUL << MXC_F_FLC_CTRL_FLSH_UNLOCK_POS)) #define MXC_F_FLC_INTR_STARTED_IF_POS 0 #define MXC_F_FLC_INTR_STARTED_IF ((uint32_t)(0x00000001UL << MXC_F_FLC_INTR_STARTED_IF_POS)) #define MXC_F_FLC_INTR_FAILED_IF_POS 1 #define MXC_F_FLC_INTR_FAILED_IF ((uint32_t)(0x00000001UL << MXC_F_FLC_INTR_FAILED_IF_POS)) #define MXC_F_FLC_INTR_STARTED_IE_POS 8 #define MXC_F_FLC_INTR_STARTED_IE ((uint32_t)(0x00000001UL << MXC_F_FLC_INTR_STARTED_IE_POS)) #define MXC_F_FLC_INTR_FAILED_IE_POS 9 #define MXC_F_FLC_INTR_FAILED_IE ((uint32_t)(0x00000001UL << MXC_F_FLC_INTR_FAILED_IE_POS)) #define MXC_F_FLC_PERFORM_DELAY_SE_EN_POS 0 #define MXC_F_FLC_PERFORM_DELAY_SE_EN ((uint32_t)(0x00000001UL << MXC_F_FLC_PERFORM_DELAY_SE_EN_POS)) #define MXC_F_FLC_PERFORM_FAST_READ_MODE_EN_POS 8 #define MXC_F_FLC_PERFORM_FAST_READ_MODE_EN ((uint32_t)(0x00000001UL << MXC_F_FLC_PERFORM_FAST_READ_MODE_EN_POS)) #define MXC_F_FLC_STATUS_JTAG_LOCK_WINDOW_POS 0 #define MXC_F_FLC_STATUS_JTAG_LOCK_WINDOW ((uint32_t)(0x00000001UL << MXC_F_FLC_STATUS_JTAG_LOCK_WINDOW_POS)) #define MXC_F_FLC_STATUS_JTAG_LOCK_STATIC_POS 1 #define MXC_F_FLC_STATUS_JTAG_LOCK_STATIC ((uint32_t)(0x00000001UL << MXC_F_FLC_STATUS_JTAG_LOCK_STATIC_POS)) #define MXC_F_FLC_STATUS_AUTO_LOCK_POS 3 #define MXC_F_FLC_STATUS_AUTO_LOCK ((uint32_t)(0x00000001UL << MXC_F_FLC_STATUS_AUTO_LOCK_POS)) #define MXC_F_FLC_STATUS_TRIM_UPDATE_DONE_POS 29 #define MXC_F_FLC_STATUS_TRIM_UPDATE_DONE ((uint32_t)(0x00000001UL << MXC_F_FLC_STATUS_TRIM_UPDATE_DONE_POS)) #define MXC_F_FLC_STATUS_INFO_BLOCK_VALID_POS 30 #define MXC_F_FLC_STATUS_INFO_BLOCK_VALID ((uint32_t)(0x00000001UL << MXC_F_FLC_STATUS_INFO_BLOCK_VALID_POS)) #define MXC_F_FLC_SECURITY_DEBUG_DISABLE_POS 0 #define MXC_F_FLC_SECURITY_DEBUG_DISABLE ((uint32_t)(0x000000FFUL << MXC_F_FLC_SECURITY_DEBUG_DISABLE_POS)) #define MXC_F_FLC_SECURITY_MASS_ERASE_LOCK_POS 8 #define MXC_F_FLC_SECURITY_MASS_ERASE_LOCK ((uint32_t)(0x0000000FUL << MXC_F_FLC_SECURITY_MASS_ERASE_LOCK_POS)) #define MXC_F_FLC_SECURITY_SECURITY_LOCK_POS 28 #define MXC_F_FLC_SECURITY_SECURITY_LOCK ((uint32_t)(0x0000000FUL << MXC_F_FLC_SECURITY_SECURITY_LOCK_POS)) #define MXC_F_FLC_BYPASS_DESTRUCT_BYPASS_ERASE_POS 0 #define MXC_F_FLC_BYPASS_DESTRUCT_BYPASS_ERASE ((uint32_t)(0x00000001UL << MXC_F_FLC_BYPASS_DESTRUCT_BYPASS_ERASE_POS)) #define MXC_F_FLC_BYPASS_SUPERWIPE_ERASE_POS 1 #define MXC_F_FLC_BYPASS_SUPERWIPE_ERASE ((uint32_t)(0x00000001UL << MXC_F_FLC_BYPASS_SUPERWIPE_ERASE_POS)) #define MXC_F_FLC_BYPASS_DESTRUCT_BYPASS_COMPLETE_POS 2 #define MXC_F_FLC_BYPASS_DESTRUCT_BYPASS_COMPLETE ((uint32_t)(0x00000001UL << MXC_F_FLC_BYPASS_DESTRUCT_BYPASS_COMPLETE_POS)) #define MXC_F_FLC_BYPASS_SUPERWIPE_COMPLETE_POS 3 #define MXC_F_FLC_BYPASS_SUPERWIPE_COMPLETE ((uint32_t)(0x00000001UL << MXC_F_FLC_BYPASS_SUPERWIPE_COMPLETE_POS)) #define MXC_F_FLC_CTRL2_FLASH_LVE_POS 0 #define MXC_F_FLC_CTRL2_FLASH_LVE ((uint32_t)(0x00000001UL << MXC_F_FLC_CTRL2_FLASH_LVE_POS)) #define MXC_F_FLC_CTRL2_FRC_FCLK1_ON_POS 1 #define MXC_F_FLC_CTRL2_FRC_FCLK1_ON ((uint32_t)(0x00000001UL << MXC_F_FLC_CTRL2_FRC_FCLK1_ON_POS)) #define MXC_F_FLC_CTRL2_BYPASS_AHB_FAIL_POS 8 #define MXC_F_FLC_CTRL2_BYPASS_AHB_FAIL ((uint32_t)(0x000000FFUL << MXC_F_FLC_CTRL2_BYPASS_AHB_FAIL_POS)) #define MXC_F_FLC_INTFL1_SRAM_ADDR_WRAPPED_POS 0 #define MXC_F_FLC_INTFL1_SRAM_ADDR_WRAPPED ((uint32_t)(0x00000001UL << MXC_F_FLC_INTFL1_SRAM_ADDR_WRAPPED_POS)) #define MXC_F_FLC_INTFL1_INVALID_FLASH_ADDR_POS 1 #define MXC_F_FLC_INTFL1_INVALID_FLASH_ADDR ((uint32_t)(0x00000001UL << MXC_F_FLC_INTFL1_INVALID_FLASH_ADDR_POS)) #define MXC_F_FLC_INTFL1_FLASH_READ_LOCKED_POS 2 #define MXC_F_FLC_INTFL1_FLASH_READ_LOCKED ((uint32_t)(0x00000001UL << MXC_F_FLC_INTFL1_FLASH_READ_LOCKED_POS)) #define MXC_F_FLC_INTFL1_TRIM_UPDATE_DONE_POS 3 #define MXC_F_FLC_INTFL1_TRIM_UPDATE_DONE ((uint32_t)(0x00000001UL << MXC_F_FLC_INTFL1_TRIM_UPDATE_DONE_POS)) #define MXC_F_FLC_INTEN1_SRAM_ADDR_WRAPPED_POS 0 #define MXC_F_FLC_INTEN1_SRAM_ADDR_WRAPPED ((uint32_t)(0x00000001UL << MXC_F_FLC_INTEN1_SRAM_ADDR_WRAPPED_POS)) #define MXC_F_FLC_INTEN1_INVALID_FLASH_ADDR_POS 1 #define MXC_F_FLC_INTEN1_INVALID_FLASH_ADDR ((uint32_t)(0x00000001UL << MXC_F_FLC_INTEN1_INVALID_FLASH_ADDR_POS)) #define MXC_F_FLC_INTEN1_FLASH_READ_LOCKED_POS 2 #define MXC_F_FLC_INTEN1_FLASH_READ_LOCKED ((uint32_t)(0x00000001UL << MXC_F_FLC_INTEN1_FLASH_READ_LOCKED_POS)) #define MXC_F_FLC_INTEN1_TRIM_UPDATE_DONE_POS 3 #define MXC_F_FLC_INTEN1_TRIM_UPDATE_DONE ((uint32_t)(0x00000001UL << MXC_F_FLC_INTEN1_TRIM_UPDATE_DONE_POS)) #ifdef __cplusplus } #endif #endif /* _MXC_FLC_REGS_H_ */
# code snippet import Foundation // Get name and age from the user print("What is your name?") guard let name = readLine() else { fatalError("Unable to read from the standard input.") } print("What is your age?") guard let ageString = readLine(), let age = Int(ageString) else { fatalError("Unable to read from the standard input.") } // Print the greeting print("Hi \(name), your age is \(age)")
<reponame>michael-gann/larder import React from "react"; import ReactDOM from "react-dom"; import "./index.css"; import App from "./App"; import { Provider } from "react-redux"; import configureStore from "./store/reducers/rootReducer.reducer"; import * as userActions from "./store/reducers/users.reducer"; import * as categoryActions from "./store/reducers/categories.reducer"; import * as measurementActions from "./store/reducers/measurements.reducer"; import * as ingredientActions from "./store/reducers/ingredients.reducer"; const store = configureStore(); store.dispatch(userActions.authenticateUser()); store.dispatch(categoryActions.categories()); store.dispatch(measurementActions.measurements()); store.dispatch(ingredientActions.ingredients()); ReactDOM.render( <React.StrictMode> <Provider store={store}> <App /> </Provider> </React.StrictMode>, document.getElementById("root") );
SELECT temperatures FROM ( SELECT temperatures, ROW_NUMBER() OVER(ORDER BY temperatures DESC) AS row_num FROM temperatures ) AS t WHERE row_num > 5 AND row_num <= 10
import javafx.application.Application; import javafx.scene.Scene; import javafx.scene.layout.GridPane; import javafx.scene.web.WebView; import javafx.stage.Stage; import javafx.webengine_debugger.JavaFXWebEngineDebugger; import javafx.webengine_debugger.JavaFXWebEngineDebuggerFactory; public class TestMultipleViews extends Application{ public static void main(String[] args) { launch(); } @Override public void start(Stage primaryStage) throws Exception { GridPane root=new GridPane(); String urls[]=new String[]{ "https://html5test.com/", "https://www.youtube.com/watch?v=XH0CSzdHwg0", "https://github.com/riccardobl" }; for(int i=0;i<3;i++){ WebView wv=new WebView(); wv.getEngine().load(urls[i]); JavaFXWebEngineDebugger debugger = JavaFXWebEngineDebuggerFactory.create(wv.getEngine(),0); System.out.println("["+i+"] chrome-devtools://devtools/bundled/inspector.html?ws=127.0.0.1:"+debugger.getPort()); root.add(wv,i,0); } primaryStage.setScene(new Scene(root)); primaryStage.show(); } }
<gh_stars>0 import { Unknown } from './unknown'; export declare type Void = Unknown; /** * Void is an alias for Unknown * * @deprecated Please use Unknown instead */ export declare const Void: Unknown;
<filename>targets/TARGET_Maxim/TARGET_MAX32630/device/wdt2_regs.h /** * @file * @brief Registers, Bit Masks and Bit Positions for the WDT2 Peripheral Module. */ /* **************************************************************************** * Copyright (C) 2016 Maxim Integrated Products, Inc., All Rights Reserved. * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included * in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL MAXIM INTEGRATED BE LIABLE FOR ANY CLAIM, DAMAGES * OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. * * Except as contained in this notice, the name of Maxim Integrated * Products, Inc. shall not be used except as stated in the Maxim Integrated * Products, Inc. Branding Policy. * * The mere transfer of this software does not imply any licenses * of trade secrets, proprietary technology, copyrights, patents, * trademarks, maskwork rights, or any other form of intellectual * property whatsoever. Maxim Integrated Products, Inc. retains all * ownership rights. * * $Date: 2016-10-10 19:54:34 -0500 (Mon, 10 Oct 2016) $ * $Revision: 24678 $ * *************************************************************************** */ /* **** Includes **** */ #include <stdint.h> /* Define to prevent redundant inclusion */ #ifndef _MXC_WDT2_REGS_H_ #define _MXC_WDT2_REGS_H_ #ifdef __cplusplus extern "C" { #endif ///@cond /* If types are not defined elsewhere (CMSIS) define them here */ #ifndef __IO #define __IO volatile #endif #ifndef __I #define __I volatile const #endif #ifndef __O #define __O volatile #endif #ifndef __RO #define __RO volatile const #endif ///@endcond /** * @ingroup wdt2 * @defgroup wdt2_registers WDT2 Registers * @brief Registers, Bit Masks and Bit Positions * @{ */ /* Typedefed structure(s) for module registers (per instance or section) with direct 32-bit access to each register in module. */ /** * Structure type to access the WDT2 Registers, see #MXC_WDT2 to get a pointer to the WDT2 register structure. * @note This is an always-on watchdog timer, it operates in all modes of operation. */ typedef struct { __IO uint32_t ctrl; /**< WDT2_CTRL Register - WDT Control Register */ __IO uint32_t clear; /**< WDT2_CLEAR Register - WDT Clear Register to prevent a WDT Reset (Feed Dog) */ __IO uint32_t flags; /**< WDT2_FLAGS Register - WDT Interrupt and Reset Flags */ __IO uint32_t enable; /**< WDT2_ENABLE Register - WDT Reset and Interrupt Enable/Disable Controls */ __RO uint32_t rsv010; /**< <em><b>RESERVED, DO NOT MODIFY</b></em>. */ __IO uint32_t lock_ctrl; /**< WDT2_LOCK_CTRL Register - Lock for Control Register */ } mxc_wdt2_regs_t; /**@} end of group wdt2_registers.*/ /* Register offsets for module WDT2. */ /** * @ingroup wdt2_registers * @defgroup WDT2_Register_Offsets Register Offsets * @brief Watchdog Timer 2 Register Offsets from the WDT2 Base Peripheral Address. * @details Use #MXC_WDT2 for the WDT2 Base Peripheral Address. * @{ */ #define MXC_R_WDT2_OFFS_CTRL ((uint32_t)0x00000000UL) /**< WDT2_CTRL Offset: <tt>0x0000</tt> */ #define MXC_R_WDT2_OFFS_CLEAR ((uint32_t)0x00000004UL) /**< WDT2_CLEAR Offset: <tt>0x0004</tt> */ #define MXC_R_WDT2_OFFS_FLAGS ((uint32_t)0x00000008UL) /**< WDT2_FLAGS Offset: <tt>0x0008</tt> */ #define MXC_R_WDT2_OFFS_ENABLE ((uint32_t)0x0000000CUL) /**< WDT2_ENABLE Offset: <tt>0x000C</tt> */ #define MXC_R_WDT2_OFFS_LOCK_CTRL ((uint32_t)0x00000014UL) /**< WDT2_LOCK_CTRL Offset: <tt>0x0014</tt> */ /**@} end of group WDT2_Register_Offsets */ /* Field positions and masks for module WDT2. */ /** * @ingroup wdt2_registers * @defgroup WDT2_CTRL_Register WDT2_CTRL Register * @brief Field Positions and Bit Masks for the WDT2_CTRL register * @{ */ #define MXC_F_WDT2_CTRL_INT_PERIOD_POS 0 /**< INT_PERIOD Field Position */ #define MXC_F_WDT2_CTRL_INT_PERIOD ((uint32_t)(0x0000000FUL << MXC_F_WDT2_CTRL_INT_PERIOD_POS)) /**< INT_PERIOD Field Mask - This field is used to set the interrupt period on the WDT. */ #define MXC_F_WDT2_CTRL_RST_PERIOD_POS 4 /**< RST_PERIOD Field Position */ #define MXC_F_WDT2_CTRL_RST_PERIOD ((uint32_t)(0x0000000FUL << MXC_F_WDT2_CTRL_RST_PERIOD_POS)) /**< RST_PERIOD Field Mask - This field sets the time after an * interrupt period has expired before the device resets. If the * INT_PERIOD Flag is cleared prior to the RST_PERIOD expiration, * the device will not reset. */ #define MXC_F_WDT2_CTRL_EN_TIMER_POS 8 /**< EN_TIMER Field Position */ #define MXC_F_WDT2_CTRL_EN_TIMER ((uint32_t)(0x00000001UL << MXC_F_WDT2_CTRL_EN_TIMER_POS)) /**< EN_TIMER Field Mask */ #define MXC_F_WDT2_CTRL_EN_CLOCK_POS 9 /**< EN_CLOCK Field Position */ #define MXC_F_WDT2_CTRL_EN_CLOCK ((uint32_t)(0x00000001UL << MXC_F_WDT2_CTRL_EN_CLOCK_POS)) /**< EN_CLOCK Field Mask */ #define MXC_F_WDT2_CTRL_EN_TIMER_SLP_POS 10 /**< WAIT_PERIOD Field Position */ #define MXC_F_WDT2_CTRL_EN_TIMER_SLP ((uint32_t)(0x00000001UL << MXC_F_WDT2_CTRL_EN_TIMER_SLP_POS)) /**< WAIT_PERIOD Field Mask */ /**@} end of group WDT2_CTRL */ /** * @ingroup wdt2_registers * @defgroup WDT2_FLAGS_Register WDT2_FLAGS Register * @brief Field Positions and Bit Masks for the WDT2_FLAGS register. Watchdog Timer 2 Flags for Interrupts and Reset. * @{ */ #define MXC_F_WDT2_FLAGS_TIMEOUT_POS 0 /**< TIMEOUT Flag Position */ #define MXC_F_WDT2_FLAGS_TIMEOUT ((uint32_t)(0x00000001UL << MXC_F_WDT2_FLAGS_TIMEOUT_POS)) /**< TIMEOUT Flag Mask - if this flag is set it indicates the Watchdog Timer 2 timed out. */ #define MXC_F_WDT2_FLAGS_RESET_OUT_POS 2 /**< RESET_OUT Flag Position */ #define MXC_F_WDT2_FLAGS_RESET_OUT ((uint32_t)(0x00000001UL << MXC_F_WDT2_FLAGS_RESET_OUT_POS)) /**< RESET_FLAG Flag Mask - This flag indicates that the watchdog timer timed out and the reset period elapsed without the timer being cleared. This will result in a system restart. */ /**@} end of group WDT2_FLAGS */ /** * @ingroup wdt2_registers * @defgroup WDT2_ENABLE_Register WDT2_ENABLE Register * @brief Field Positions and Bit Masks for the WDT2_ENABLE register. * @{ */ #define MXC_F_WDT2_ENABLE_TIMEOUT_POS 0 /**< ENABLE_TIMEOUT Field Position */ #define MXC_F_WDT2_ENABLE_TIMEOUT ((uint32_t)(0x00000001UL << MXC_F_WDT2_ENABLE_TIMEOUT_POS)) /**< ENABLE_TIMEOUT Field Mask */ #define MXC_F_WDT2_ENABLE_RESET_OUT_POS 2 /**< ENABLE_RESET_OUT Field Position */ #define MXC_F_WDT2_ENABLE_RESET_OUT ((uint32_t)(0x00000001UL << MXC_F_WDT2_ENABLE_RESET_OUT_POS)) /**< ENABLE_RESET_OUT Field Mask */ /**@} end of group WDT2_ENABLE */ /** * @ingroup wdt2_registers * @defgroup WDT2_LOCK_CTRL_Register WDT2_LOCK_CTRL Register * @brief The WDT2_LOCK_CTRL register controls read/write access to the \ref WDT2_CTRL_Register. * @{ */ #define MXC_F_WDT2_LOCK_CTRL_WDLOCK_POS 0 /**< WDLOCK Field's position in the WDT2_LOCK_CTRL register. */ #define MXC_F_WDT2_LOCK_CTRL_WDLOCK ((uint32_t)(0x000000FFUL << MXC_F_WDT2_LOCK_CTRL_WDLOCK_POS)) /**< WDLOCK Field mask for the WDT2_LOCK_CTRL register. Reading a value of */ /**@} end of group WDT2_ENABLE */ /* Field values and shifted values for module WDT2. */ /** * @ingroup WDT2_CTRL_Register * @defgroup WDT2_CTRL_field_values WDT2_CTRL Register Field and Shifted Field Values * @brief Field values and Shifted Field values for the WDT2_CTRL register. * @details Shifted field values are field values shifted to the loacation of the field in the register. */ /** * @ingroup WDT2_CTRL_field_values * @defgroup WDT2_CTRL_INT_PERIOD_Value Watchdog Timer Interrupt Period * @brief Sets the duration of the watchdog interrupt period. * @details The INT_PERIOD field sets the duration of the watchdog interrupt * period, which is the time period from the WDT2 being * enabled/cleared until the WDT2 flag, #MXC_F_WDT2_FLAGS_TIMEOUT, is * set. * The values defined are in the number of watchdog clock cycles. * @{ */ #define MXC_V_WDT2_CTRL_INT_PERIOD_2_25_NANO_CLKS ((uint32_t)(0x00000000UL)) /**< Interupt Period of \f$ 2^{25} \f$ WDT2 CLK Cycles */ #define MXC_V_WDT2_CTRL_INT_PERIOD_2_24_NANO_CLKS ((uint32_t)(0x00000001UL)) /**< Interupt Period of \f$ 2^{24} \f$ WDT2 CLK Cycles */ #define MXC_V_WDT2_CTRL_INT_PERIOD_2_23_NANO_CLKS ((uint32_t)(0x00000002UL)) /**< Interupt Period of \f$ 2^{23} \f$ WDT2 CLK Cycles */ #define MXC_V_WDT2_CTRL_INT_PERIOD_2_22_NANO_CLKS ((uint32_t)(0x00000003UL)) /**< Interupt Period of \f$ 2^{22} \f$ WDT2 CLK Cycles */ #define MXC_V_WDT2_CTRL_INT_PERIOD_2_21_NANO_CLKS ((uint32_t)(0x00000004UL)) /**< Interupt Period of \f$ 2^{21} \f$ WDT2 CLK Cycles */ #define MXC_V_WDT2_CTRL_INT_PERIOD_2_20_NANO_CLKS ((uint32_t)(0x00000005UL)) /**< Interupt Period of \f$ 2^{20} \f$ WDT2 CLK Cycles */ #define MXC_V_WDT2_CTRL_INT_PERIOD_2_19_NANO_CLKS ((uint32_t)(0x00000006UL)) /**< Interupt Period of \f$ 2^{19} \f$ WDT2 CLK Cycles */ #define MXC_V_WDT2_CTRL_INT_PERIOD_2_18_NANO_CLKS ((uint32_t)(0x00000007UL)) /**< Interupt Period of \f$ 2^{18} \f$ WDT2 CLK Cycles */ #define MXC_V_WDT2_CTRL_INT_PERIOD_2_17_NANO_CLKS ((uint32_t)(0x00000008UL)) /**< Interupt Period of \f$ 2^{17} \f$ WDT2 CLK Cycles */ #define MXC_V_WDT2_CTRL_INT_PERIOD_2_16_NANO_CLKS ((uint32_t)(0x00000009UL)) /**< Interupt Period of \f$ 2^{16} \f$ WDT2 CLK Cycles */ #define MXC_V_WDT2_CTRL_INT_PERIOD_2_15_NANO_CLKS ((uint32_t)(0x0000000AUL)) /**< Interupt Period of \f$ 2^{15} \f$ WDT2 CLK Cycles */ #define MXC_V_WDT2_CTRL_INT_PERIOD_2_14_NANO_CLKS ((uint32_t)(0x0000000BUL)) /**< Interupt Period of \f$ 2^{14} \f$ WDT2 CLK Cycles */ #define MXC_V_WDT2_CTRL_INT_PERIOD_2_13_NANO_CLKS ((uint32_t)(0x0000000CUL)) /**< Interupt Period of \f$ 2^{13} \f$ WDT2 CLK Cycles */ #define MXC_V_WDT2_CTRL_INT_PERIOD_2_12_NANO_CLKS ((uint32_t)(0x0000000DUL)) /**< Interupt Period of \f$ 2^{12} \f$ WDT2 CLK Cycles */ #define MXC_V_WDT2_CTRL_INT_PERIOD_2_11_NANO_CLKS ((uint32_t)(0x0000000EUL)) /**< Interupt Period of \f$ 2^{11} \f$ WDT2 CLK Cycles */ #define MXC_V_WDT2_CTRL_INT_PERIOD_2_10_NANO_CLKS ((uint32_t)(0x0000000FUL)) /**< Interupt Period of \f$ 2^{10} \f$ WDT2 CLK Cycles */ /**@} end of group WDT2_CTRL_INT_PERIOD_Value */ /** * @ingroup WDT2_CTRL_field_values * @defgroup WDT2_CTRL_INT_PERIOD_Shifted Watchdog Timer Interrupt Period Shifted Values * @brief Shifted values for the \ref WDT2_CTRL_INT_PERIOD_Value * @details The shifted value is * shifted to align with the fields location in the WDT2_CTRL register. * @{ */ #define MXC_S_WDT2_CTRL_INT_PERIOD_2_25_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_INT_PERIOD_2_25_NANO_CLKS << MXC_F_WDT2_CTRL_INT_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_INT_PERIOD_2_25_NANO_CLKS */ #define MXC_S_WDT2_CTRL_INT_PERIOD_2_24_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_INT_PERIOD_2_24_NANO_CLKS << MXC_F_WDT2_CTRL_INT_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_INT_PERIOD_2_24_NANO_CLKS */ #define MXC_S_WDT2_CTRL_INT_PERIOD_2_23_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_INT_PERIOD_2_23_NANO_CLKS << MXC_F_WDT2_CTRL_INT_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_INT_PERIOD_2_23_NANO_CLKS */ #define MXC_S_WDT2_CTRL_INT_PERIOD_2_22_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_INT_PERIOD_2_22_NANO_CLKS << MXC_F_WDT2_CTRL_INT_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_INT_PERIOD_2_22_NANO_CLKS */ #define MXC_S_WDT2_CTRL_INT_PERIOD_2_21_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_INT_PERIOD_2_21_NANO_CLKS << MXC_F_WDT2_CTRL_INT_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_INT_PERIOD_2_21_NANO_CLKS */ #define MXC_S_WDT2_CTRL_INT_PERIOD_2_20_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_INT_PERIOD_2_20_NANO_CLKS << MXC_F_WDT2_CTRL_INT_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_INT_PERIOD_2_20_NANO_CLKS */ #define MXC_S_WDT2_CTRL_INT_PERIOD_2_19_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_INT_PERIOD_2_19_NANO_CLKS << MXC_F_WDT2_CTRL_INT_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_INT_PERIOD_2_19_NANO_CLKS */ #define MXC_S_WDT2_CTRL_INT_PERIOD_2_18_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_INT_PERIOD_2_18_NANO_CLKS << MXC_F_WDT2_CTRL_INT_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_INT_PERIOD_2_18_NANO_CLKS */ #define MXC_S_WDT2_CTRL_INT_PERIOD_2_17_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_INT_PERIOD_2_17_NANO_CLKS << MXC_F_WDT2_CTRL_INT_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_INT_PERIOD_2_17_NANO_CLKS */ #define MXC_S_WDT2_CTRL_INT_PERIOD_2_16_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_INT_PERIOD_2_16_NANO_CLKS << MXC_F_WDT2_CTRL_INT_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_INT_PERIOD_2_16_NANO_CLKS */ #define MXC_S_WDT2_CTRL_INT_PERIOD_2_15_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_INT_PERIOD_2_15_NANO_CLKS << MXC_F_WDT2_CTRL_INT_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_INT_PERIOD_2_15_NANO_CLKS */ #define MXC_S_WDT2_CTRL_INT_PERIOD_2_14_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_INT_PERIOD_2_14_NANO_CLKS << MXC_F_WDT2_CTRL_INT_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_INT_PERIOD_2_14_NANO_CLKS */ #define MXC_S_WDT2_CTRL_INT_PERIOD_2_13_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_INT_PERIOD_2_13_NANO_CLKS << MXC_F_WDT2_CTRL_INT_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_INT_PERIOD_2_13_NANO_CLKS */ #define MXC_S_WDT2_CTRL_INT_PERIOD_2_12_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_INT_PERIOD_2_12_NANO_CLKS << MXC_F_WDT2_CTRL_INT_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_INT_PERIOD_2_12_NANO_CLKS */ #define MXC_S_WDT2_CTRL_INT_PERIOD_2_11_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_INT_PERIOD_2_11_NANO_CLKS << MXC_F_WDT2_CTRL_INT_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_INT_PERIOD_2_11_NANO_CLKS */ #define MXC_S_WDT2_CTRL_INT_PERIOD_2_10_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_INT_PERIOD_2_10_NANO_CLKS << MXC_F_WDT2_CTRL_INT_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_INT_PERIOD_2_10_NANO_CLKS */ /**@} end of group WDT2_CTRL_INT_PERIOD_Shifted */ /** * @ingroup WDT2_CTRL_field_values * @defgroup WDT2_CTRL_RST_PERIOD_Value Watchdog Timer Reset Period * @brief Sets the duration of the watchdog reset period. * @details The RST_PERIOD field sets the duration of the watchdog reset * period, which is the time period from the WDT being * enabled/cleared until the WDT2 flag, #MXC_F_WDT2_CTRL_RST_PERIOD is * set. * The values defined are in the number of watchdog clock cycles. * @{ */ #define MXC_V_WDT2_CTRL_RST_PERIOD_2_25_NANO_CLKS ((uint32_t)(0x00000000UL)) /**< Reset Period of \f$ 2^{25} \f$ WDT2 CLK CYCLES */ #define MXC_V_WDT2_CTRL_RST_PERIOD_2_24_NANO_CLKS ((uint32_t)(0x00000001UL)) /**< Reset Period of \f$ 2^{24} \f$ WDT2 CLK CYCLES */ #define MXC_V_WDT2_CTRL_RST_PERIOD_2_23_NANO_CLKS ((uint32_t)(0x00000002UL)) /**< Reset Period of \f$ 2^{23} \f$ WDT2 CLK CYCLES */ #define MXC_V_WDT2_CTRL_RST_PERIOD_2_22_NANO_CLKS ((uint32_t)(0x00000003UL)) /**< Reset Period of \f$ 2^{22} \f$ WDT2 CLK CYCLES */ #define MXC_V_WDT2_CTRL_RST_PERIOD_2_21_NANO_CLKS ((uint32_t)(0x00000004UL)) /**< Reset Period of \f$ 2^{21} \f$ WDT2 CLK CYCLES */ #define MXC_V_WDT2_CTRL_RST_PERIOD_2_20_NANO_CLKS ((uint32_t)(0x00000005UL)) /**< Reset Period of \f$ 2^{20} \f$ WDT2 CLK CYCLES */ #define MXC_V_WDT2_CTRL_RST_PERIOD_2_19_NANO_CLKS ((uint32_t)(0x00000006UL)) /**< Reset Period of \f$ 2^{19} \f$ WDT2 CLK CYCLES */ #define MXC_V_WDT2_CTRL_RST_PERIOD_2_18_NANO_CLKS ((uint32_t)(0x00000007UL)) /**< Reset Period of \f$ 2^{18} \f$ WDT2 CLK CYCLES */ #define MXC_V_WDT2_CTRL_RST_PERIOD_2_17_NANO_CLKS ((uint32_t)(0x00000008UL)) /**< Reset Period of \f$ 2^{17} \f$ WDT2 CLK CYCLES */ #define MXC_V_WDT2_CTRL_RST_PERIOD_2_16_NANO_CLKS ((uint32_t)(0x00000009UL)) /**< Reset Period of \f$ 2^{16} \f$ WDT2 CLK CYCLES */ #define MXC_V_WDT2_CTRL_RST_PERIOD_2_15_NANO_CLKS ((uint32_t)(0x0000000AUL)) /**< Reset Period of \f$ 2^{15} \f$ WDT2 CLK CYCLES */ #define MXC_V_WDT2_CTRL_RST_PERIOD_2_14_NANO_CLKS ((uint32_t)(0x0000000BUL)) /**< Reset Period of \f$ 2^{14} \f$ WDT2 CLK CYCLES */ #define MXC_V_WDT2_CTRL_RST_PERIOD_2_13_NANO_CLKS ((uint32_t)(0x0000000CUL)) /**< Reset Period of \f$ 2^{13} \f$ WDT2 CLK CYCLES */ #define MXC_V_WDT2_CTRL_RST_PERIOD_2_12_NANO_CLKS ((uint32_t)(0x0000000DUL)) /**< Reset Period of \f$ 2^{12} \f$ WDT2 CLK CYCLES */ #define MXC_V_WDT2_CTRL_RST_PERIOD_2_11_NANO_CLKS ((uint32_t)(0x0000000EUL)) /**< Reset Period of \f$ 2^{11} \f$ WDT2 CLK CYCLES */ #define MXC_V_WDT2_CTRL_RST_PERIOD_2_10_NANO_CLKS ((uint32_t)(0x0000000FUL)) /**< Reset Period of \f$ 2^{10} \f$ WDT2 CLK CYCLES */ /**@} end of group WDT2_CTRL_RST_PERIOD_Value */ /** * @ingroup WDT2_CTRL_field_values * @defgroup WDT2_CTRL_RST_PERIOD_Shifted Watchdog Timer Reset Period Shifted Values * @brief Shifted values for the \ref WDT2_CTRL_RST_PERIOD_Value * @details These values are shifted to align with the field's location in the WDT2_CTRL register. * @{ */ #define MXC_S_WDT2_CTRL_RST_PERIOD_2_25_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_RST_PERIOD_2_25_NANO_CLKS << MXC_F_WDT2_CTRL_RST_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_RST_PERIOD_2_25_NANO_CLKS */ #define MXC_S_WDT2_CTRL_RST_PERIOD_2_24_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_RST_PERIOD_2_24_NANO_CLKS << MXC_F_WDT2_CTRL_RST_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_RST_PERIOD_2_24_NANO_CLKS */ #define MXC_S_WDT2_CTRL_RST_PERIOD_2_23_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_RST_PERIOD_2_23_NANO_CLKS << MXC_F_WDT2_CTRL_RST_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_RST_PERIOD_2_23_NANO_CLKS */ #define MXC_S_WDT2_CTRL_RST_PERIOD_2_22_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_RST_PERIOD_2_22_NANO_CLKS << MXC_F_WDT2_CTRL_RST_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_RST_PERIOD_2_22_NANO_CLKS */ #define MXC_S_WDT2_CTRL_RST_PERIOD_2_21_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_RST_PERIOD_2_21_NANO_CLKS << MXC_F_WDT2_CTRL_RST_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_RST_PERIOD_2_21_NANO_CLKS */ #define MXC_S_WDT2_CTRL_RST_PERIOD_2_20_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_RST_PERIOD_2_20_NANO_CLKS << MXC_F_WDT2_CTRL_RST_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_RST_PERIOD_2_20_NANO_CLKS */ #define MXC_S_WDT2_CTRL_RST_PERIOD_2_19_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_RST_PERIOD_2_19_NANO_CLKS << MXC_F_WDT2_CTRL_RST_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_RST_PERIOD_2_19_NANO_CLKS */ #define MXC_S_WDT2_CTRL_RST_PERIOD_2_18_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_RST_PERIOD_2_18_NANO_CLKS << MXC_F_WDT2_CTRL_RST_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_RST_PERIOD_2_18_NANO_CLKS */ #define MXC_S_WDT2_CTRL_RST_PERIOD_2_17_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_RST_PERIOD_2_17_NANO_CLKS << MXC_F_WDT2_CTRL_RST_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_RST_PERIOD_2_17_NANO_CLKS */ #define MXC_S_WDT2_CTRL_RST_PERIOD_2_16_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_RST_PERIOD_2_16_NANO_CLKS << MXC_F_WDT2_CTRL_RST_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_RST_PERIOD_2_16_NANO_CLKS */ #define MXC_S_WDT2_CTRL_RST_PERIOD_2_15_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_RST_PERIOD_2_15_NANO_CLKS << MXC_F_WDT2_CTRL_RST_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_RST_PERIOD_2_15_NANO_CLKS */ #define MXC_S_WDT2_CTRL_RST_PERIOD_2_14_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_RST_PERIOD_2_14_NANO_CLKS << MXC_F_WDT2_CTRL_RST_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_RST_PERIOD_2_14_NANO_CLKS */ #define MXC_S_WDT2_CTRL_RST_PERIOD_2_13_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_RST_PERIOD_2_13_NANO_CLKS << MXC_F_WDT2_CTRL_RST_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_RST_PERIOD_2_13_NANO_CLKS */ #define MXC_S_WDT2_CTRL_RST_PERIOD_2_12_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_RST_PERIOD_2_12_NANO_CLKS << MXC_F_WDT2_CTRL_RST_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_RST_PERIOD_2_12_NANO_CLKS */ #define MXC_S_WDT2_CTRL_RST_PERIOD_2_11_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_RST_PERIOD_2_11_NANO_CLKS << MXC_F_WDT2_CTRL_RST_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_RST_PERIOD_2_11_NANO_CLKS */ #define MXC_S_WDT2_CTRL_RST_PERIOD_2_10_NANO_CLKS ((uint32_t)(MXC_V_WDT2_CTRL_RST_PERIOD_2_10_NANO_CLKS << MXC_F_WDT2_CTRL_RST_PERIOD_POS)) /**< Shifted Field Value for #MXC_V_WDT2_CTRL_RST_PERIOD_2_10_NANO_CLKS */ /**@} end of group WDT2_CTRL_RST_PERIOD_Shifted */ /** * @ingroup WDT2_LOCK_CTRL_Register * @defgroup WDT2_LOCK_field_values Watchdog Timer WDT2_LOCK field values * @brief Lock/Unlock values for the watchdog timer \ref WDT2_CTRL_Register. * @{ */ #define MXC_V_WDT2_LOCK_KEY 0x24 /**< Writing this value to the WDT2_LOCK field of the \ref WDT2_LOCK_CTRL_Register \b locks the \ref WDT2_CTRL_Register making it read only. */ #define MXC_V_WDT2_UNLOCK_KEY 0x42 /**< Writing this value to the WDT2_LOCK field of the \ref WDT2_LOCK_CTRL_Register \b unlocks the \ref WDT2_CTRL_Register making it read/write. */ /**@} end of group WDT2_LOCK_field_values */ ///@cond /** * @internal * @ingroup WDT2_CLEAR_Register * @defgroup WDT2_CLEAR_field_values Watchdog Timer Clear Sequence Values * @brief Writing the sequence of #MXC_V_WDT2_RESET_KEY_0, #MXC_V_WDT2_RESET_KEY_1 to the \ref WDT2_CLEAR_Register will clear/reset the watchdog timer count. * @note The values #MXC_V_WDT2_RESET_KEY_0, #MXC_V_WDT2_RESET_KEY_1 must be written sequentially to the \ref WDT2_CLEAR_Register to clear the watchdog counter. * @{ */ #define MXC_V_WDT2_RESET_KEY_0 0xA5 /**< First value to write to the \ref WDT2_CLEAR_Register to perform a WDT2 clear. */ #define MXC_V_WDT2_RESET_KEY_1 0x5A /**< Second value to write to the \ref WDT2_CLEAR_Register to perform a WDT2 clear. */ /** * @} end of group WDT2_CLEAR_field_values * @endinternal */ ///@endcond /**@} wdt2_registers*/ #ifdef __cplusplus } #endif #endif /* _MXC_WDT2_REGS_H_ */
#!/bin/bash # In this file the authentication tokens where censored from the requests. curl 'https://campus.tum.de/tumonline/ee/rest/slc.xm.ac/achievements?$orderBy=acDate=descnf' \ -H 'Connection: keep-alive' \ -H 'Accept: application/json' \ -H 'User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36' \ -H 'Accept-Language: de' \ -H 'sec-ch-ua-mobile: ?0' \ -H 'Authorization: Bearer <CENSORED>=' \ -H 'Content-Type: application/json' \ -H 'Sec-Fetch-Site: same-origin' \ -H 'Sec-Fetch-Mode: cors' \ -H 'Sec-Fetch-Dest: empty' \ -H 'Referer: https://campus.tum.de/tumonline/ee/ui/ca2/app/desktop/' \ -H 'Cookie: <CENSORED!>' \ --compressed > grades.json; python3 extract_ids.py; # Extract a list of ids and write it to ids.txt. # For every course, fetch the statistics html page. for id in $(cat ids.txt); do curl "https://campus.tum.de/tumonline/pl/ui/\$ctx;design=ca2;header=max;lang=de/WBEXAMSTATISTICS.wbStart?pOrgNr=&pStpSpNr=$id" \ -H 'Connection: keep-alive' \ -H 'sec-ch-ua: "Chromium";v="92", " Not A;Brand";v="99", "Google Chrome";v="92"' \ -H 'sec-ch-ua-mobile: ?0' \ -H 'Upgrade-Insecure-Requests: 1' \ -H 'User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36' \ -H 'Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9' \ -H 'Sec-Fetch-Site: same-origin' \ -H 'Sec-Fetch-Mode: navigate' \ -H 'Sec-Fetch-Dest: iframe' \ -H 'Referer: https://campus.tum.de/tumonline/ee/ui/ca2/app/desktop/' \ -H 'Accept-Language: en-US,en;q=0.9,de-DE;q=0.8,de;q=0.7,he-IL;q=0.6,he;q=0.5,no;q=0.4' \ -H 'Cookie: <CENSORED!>' \ --compressed > stats/$id.html; done
/******************************************************************************* * Copyright (c) 2015 IBM Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package net.wasdev.gameon.concierge; import net.wasdev.gameon.room.common.Room; public class Connection { private Room startRoom; private Room endRoom; private String startingEntrance; public Connection(Room startingRoom, Room endRoom, String startingEntrance) { this.startingEntrance = startingEntrance; this.startRoom = startingRoom; this.endRoom = endRoom; } public Room getStartRoom() { return startRoom; } public Room getEndRoom() { return endRoom; } public String getStartingEntrance() { return startingEntrance; } }
<reponame>Fragrant-Yang/git-test-copy<gh_stars>1-10 #ifndef NLIB_CONDITION_H #define NLIB_CONDITION_H #include "Mutex.h" class Condition { public: explicit Condition( MutexLock& mutex) : m_nMutex(mutex) { pthread_cond_init(&m_nCond, NULL); } ~Condition() { pthread_cond_destroy(&m_nCond); } void wait() { // 在对象存在期间,互斥锁的拥有线程id为0 // 在对象析构时,设置互斥锁的拥有线程id为执行对象析构的线程id MutexLock::UnassignGuard ug(m_nMutex); pthread_cond_wait( &m_nCond, m_nMutex.getPthreadMutex()); } bool waitForSeconds( double seconds); void notify() { pthread_cond_signal(&m_nCond); } void notifyAll() { pthread_cond_broadcast(&m_nCond); } private: MutexLock& m_nMutex; pthread_cond_t m_nCond; }; #endif
# yoda_transform.py from typing import List def yodlify(text: str) -> str: """ Transforms the input text into Yoda-speak. Args: text (str): The input text to be transformed. Returns: str: The Yoda-speak version of the input text. """ words = text.split() yoda_speak = " ".join(words[::-1]) return yoda_speak def yodl(text_list: List[str]) -> List[str]: """ Transforms a list of strings into Yoda-speak. Args: text_list (List[str]): The list of input strings to be transformed. Returns: List[str]: The list of Yoda-speak transformed strings. """ yoda_list = [yodlify(text) for text in text_list] return yoda_list __author__ = "<Your Name>" __email__ = "<Your Email>" __license__ = "MIT" __all__ = ["yodl", "yodlify"]
# set sudo to work whether logged in as root user or non-root user if [[ ${EUID} == 0 ]]; then export SUDO=""; else export SUDO="sudo"; fi ${SUDO} apt-get -y install expect > /dev/null 2>&1 FINGERPRINT=$(cat FINGERPRINT.txt) expect -c "spawn gpg --edit-key \ ${FINGERPRINT} \ trust quit; send \"5\ry\r\"; expect eof" \ > /dev/null 2>&1 echo "Success! PGP keypair marked as trusted"
<gh_stars>0 import _ from 'lodash'; import React from 'react'; import PropTypes from 'prop-types'; import { FormattedMessage } from 'react-intl'; import SafeHTMLMessage from '@folio/react-intl-safe-html'; import compose from 'compose-function'; import { MultiSelectionFilter, SearchAndSort, withTags } from '@folio/stripes/smart-components'; import { Tags as ERMTags } from '@folio/stripes-erm-components'; import { Accordion, FilterAccordionHeader } from '@folio/stripes/components'; import getSASParams from '@folio/stripes-erm-components/lib/getSASParams'; import { CalloutContext, stripesConnect } from '@folio/stripes/core'; import ViewDirectoryEntry from '../components/ViewDirectoryEntry'; import EditDirectoryEntry from '../components/EditDirectoryEntry'; import packageInfo from '../../package'; import { parseFilters, deparseFilters } from '../util/parseFilters'; const INITIAL_RESULT_COUNT = 100; const searchableIndexes = [ { label: 'Search all fields', value: 'name,tags.value,symbols.symbol' }, { label: 'Name', value: 'name' }, { label: 'Tags', value: 'tags.value' }, { label: 'Symbols', value: 'symbols.symbol' }, ]; const appDetails = { directory: { title: 'Directory', visibleColumns: [ 'fullyQualifiedName', 'type', 'tagSummary', 'symbolSummary' ], }, }; class DirectoryEntries extends React.Component { static manifest = Object.freeze({ custprops: { type: 'okapi', path: 'directory/custprops', params: { perPage: '100' }, shouldRefresh: () => false, }, dirents: { type: 'okapi', path: 'directory/entry', params: getSASParams({ searchKey: 'name', filterKeys: { 'tags': 'tags.value', 'type': 'type.value' }, columnMap: { 'fullyQualifiedName': 'name', 'tagSummary': 'tags.value', 'symbolSummary': 'symbols.symbol', }, }), records: 'results', recordsRequired: '%{resultCount}', perRequest: 100, limitParam: 'perPage', resultCount: { initialValue: INITIAL_RESULT_COUNT }, throwErrors: false }, namingAuthorities: { type: 'okapi', path: 'directory/namingAuthority', params: { perPage: '100' }, }, refdata: { type: 'okapi', path: 'directory/refdata', params: { perPage: '100' }, }, selectedRecord: { type: 'okapi', path: 'directory/entry/${selectedRecordId}', // eslint-disable-line no-template-curly-in-string fetch: false, // XXX do not do this: see comments at https://openlibraryenvironment.atlassian.net/browse/PR-841 // PUT: { // headers: { // 'Accept': 'application/json', // 'Content-Type': 'application/json', // }, // }, }, services: { type: 'okapi', path: 'directory/service', params: { filters: 'status.value=managed', perPage: '100', sort: 'id' }, throwErrors: false, resourceShouldRefresh: true, }, directoryTags: { type: 'okapi', path: 'directory/tags', params: { perPage: '100' }, }, resultCount: { initialValue: INITIAL_RESULT_COUNT }, // If this (query) isn't here, then we get this.props.parentMutator.query is undefined in the UI query: {}, selectedRecordId: { initialValue: '' }, }); static contextType = CalloutContext; static propTypes = { resources: PropTypes.shape({ query: PropTypes.shape({ qindex: PropTypes.string, }), directoryTags: PropTypes.shape({ records: PropTypes.array, }), custprops: PropTypes.object, symbols: PropTypes.object, namingAuthorities: PropTypes.object, refdata: PropTypes.shape({ records: PropTypes.arrayOf(PropTypes.shape({ values: PropTypes.array, })), }), }), mutator: PropTypes.object, stripes: PropTypes.shape({ logger: PropTypes.shape({ log: PropTypes.func, }), }), } constructor(props) { super(props); this.onClose = this.onClose.bind(this); } onClose() { this.toggleModal(false); } onChangeIndex = (e) => { const qindex = e.target.value; this.props.stripes.logger.log('action', `changed query-index to '${qindex}'`); this.props.mutator.query.update({ qindex }); } handleCreate = (record) => { const { mutator } = this.props; mutator.dirents.POST(record) .then((newRecord) => { this.context.sendCallout({ message: <SafeHTMLMessage id="ui-directory.create.callout" values={{ name: newRecord.name }} /> }); mutator.query.update({ _path: `/directory/entries/view/${newRecord.id}`, layer: '', }); }) .catch(response => { response.json() .then(error => this.context.sendCallout({ type: 'error', message: <SafeHTMLMessage id="ui-directory.create.callout.error" values={{ err: error.message }} /> })) .catch(() => this.context.sendCallout({ type: 'error', message: <SafeHTMLMessage id="ui-directory.create.callout.error" values={{ err: '' }} /> })); }); }; handleUpdate = (record) => { this.props.mutator.selectedRecordId.replace(record.id); return this.props.mutator.selectedRecord.PUT(record); } renderFiltersFromData = (options) => { const { resources, mutator } = this.props; const byName = parseFilters(_.get(resources.query, 'filters')); const values = { tags: byName.tags || [], type: byName.type || [], }; const setFilterState = (group) => { if (group.values === null) { delete byName[group.name]; } else { byName[group.name] = group.values; } mutator.query.update({ filters: deparseFilters(byName) }); }; const clearGroup = (name) => setFilterState({ name, values: [] }); const renderGenericFilterSelection = (filterName) => { return ( <Accordion label={<FormattedMessage id={`ui-directory.filter.${filterName}`} />} id={filterName} name={filterName} separator={false} header={FilterAccordionHeader} displayClearButton={values[filterName].length > 0} onClearFilter={() => clearGroup(filterName)} > <MultiSelectionFilter name={filterName} dataOptions={options[filterName]} selectedValues={values[filterName]} onChange={setFilterState} /> </Accordion> ); }; return ( <> {renderGenericFilterSelection('type')} {renderGenericFilterSelection('tags')} </> ); } renderFilters = () => { const { resources } = this.props; const tags = ((resources.directoryTags || {}).records || []).map(obj => ({ value: obj.value, label: obj.normValue })); const type = resources.refdata?.records?.filter(obj => obj.desc === 'DirectoryEntry.Type')[0]?.values || []; return this.renderFiltersFromData({ tags, type }); }; render() { const { mutator, resources } = this.props; const helperApps = { tags: ERMTags }; const path = '/directory/entries'; packageInfo.stripes.route = path; packageInfo.stripes.home = path; const { visibleColumns } = appDetails.directory; return ( <> <SearchAndSort key="dirents" objectName="dirents" packageInfo={packageInfo} searchableIndexes={searchableIndexes} selectedIndex={_.get(this.props.resources.query, 'qindex')} onChangeIndex={this.onChangeIndex} initialResultCount={INITIAL_RESULT_COUNT} resultCountIncrement={INITIAL_RESULT_COUNT} getHelperComponent={(name) => helperApps[name]} getHelperResourcePath={(helper, id) => `directory/entry/${id}`} viewRecordComponent={ViewDirectoryEntry} editRecordComponent={EditDirectoryEntry} viewRecordPerms="module.directory.enabled" newRecordPerms="ui-directory.create" onCreate={this.handleCreate} detailProps={{ onCreate: this.handleCreate, onUpdate: this.handleUpdate }} parentResources={{ ...resources, records: resources.dirents, services: resources.services, custprops: _.get(resources, 'custprops.records', []), }} parentMutator={{ query: mutator.query, resultCount: mutator.resultCount, }} showSingleResult visibleColumns={visibleColumns} columnMapping={{ fullyQualifiedName: <FormattedMessage id="ui-directory.entries.name" />, type: <FormattedMessage id="ui-directory.entries.type" />, tagSummary: <FormattedMessage id="ui-directory.entries.tagSummary" />, symbolSummary: <FormattedMessage id="ui-directory.entries.symbolSummary" />, }} columnWidths={{ fullyQualifiedName: '40%', type: '20%', tagSummary: '20%', symbolSummary: '20%', }} resultsFormatter={{ type: a => a.type?.label || '', tagSummary: a => a.tagSummary || '', symbolSummary: a => a.symbolSummary || '', }} renderFilters={this.renderFilters} /> </> ); } } export default compose( stripesConnect, withTags, )(DirectoryEntries);
"""Leetcode 105. Construct Binary Tree from Preorder and Inorder Traversal Medium URL: https://leetcode.com/problems/construct-binary-tree-from-preorder-and-inorder-traversal/ Given preorder and inorder traversal of a tree, construct the binary tree. Note: You may assume that duplicates do not exist in the tree. For example, given preorder = [3,9,20,15,7] inorder = [9,3,15,20,7] Return the following binary tree: 3 / \ 9 20 / \ 15 7 """ # Definition for a binary tree node. class TreeNode(object): def __init__(self, val): self.val = val self.left = None self.right = None class SolutionPreorderFirstInorderRootRecur(object): def _build(self, pre_start, pre_end, in_start, in_end, in_val_pos, preorder, inorder): if pre_start > pre_end or in_start > in_end: return None # Preorder's first is root. root = TreeNode(preorder[pre_start]) # In inorder, get root's pos for separating left and right. in_root_pos = in_val_pos[root.val] # Compute the number of left from root. in_n_left = in_root_pos - in_start # Build binary trees from root's left and right. root.left = self._build(pre_start + 1, pre_start + in_n_left, in_start, in_root_pos - 1, in_val_pos, preorder, inorder) root.right = self._build(pre_start + in_n_left + 1, pre_end, in_root_pos + 1, in_end, in_val_pos, preorder, inorder) return root def buildTree(self, preorder, inorder): """ :type preorder: List[int] :type inorder: List[int] :rtype: TreeNode - Preorder's first is root - In inorder, get root's position. - Then we can separate the the remaining data into left and right. preorder = [3,9,20,15,7] ^ l r r r inorder = [9,3,15,20,7] l ^ r r r Time complexity: O(n), where n is the number of nodes. Space complexity: O(n). """ # Create dict for inorder node->pos. in_val_pos = {v: i for (i, v) in enumerate(inorder)} # Build binary tree by recursion. pre_start, pre_end = 0, len(preorder) - 1 in_start, in_end = 0, len(inorder) - 1 return self._build(pre_start, pre_end, in_start, in_end, in_val_pos, preorder, inorder) def main(): # Output: [3, 9, 20, 15, 7] # 3 # / \ # 9 20 # / \ # 15 7 preorder = [3, 9, 20, 15, 7] inorder = [9, 3, 15, 20, 7] root = SolutionPreorderFirstInorderRootRecur().buildTree(preorder, inorder) print (root.val, root.left.val, root.right.val, root.right.left.val, root.right.right.val) if __name__ == '__main__': main()
import mongo from 'mongodb'; const mongodb = mongo.MongoClient; const url = 'mongodb://localhost:27017/tempDatabase'; var shoppingDAC = function() { const getShoppingList = function (callback) { mongodb.connect(url, function (err, db) { const collection = db.collection('shoppingList'); collection.find({}).sort({ingredient:1}).toArray(function (err, results) { callback(results); } ); }); } const deleteShoppingList = function (callback) { mongodb.connect(url, function (err, db) { const collection = db.collection('shoppingList'); collection.remove({}, function(err, results) { callback(results); } ); }); } const insertItemInShoppingList = (item, callback) => { mongodb.connect(url, function (err, db) { const collection = db.collection('shoppingList'); collection.insert(item, (err, results) => { callback(results); }) }); } const insertItemsInShoppingList = (orderList, callback) => { console.log(`orderList`); console.log(orderList); mongodb.connect(url, function (err, db) { const collection = db.collection('shoppingList'); const nameList = []; for (let item of orderList) { nameList.push(item.ingredient); } collection.find({ingredient: {$exists:true, $in:nameList}}).toArray((err, results) => { // for (let name of nameList) { for (let item of results) { let index = nameList.indexOf(item.ingredient); if (index > -1) { nameList.splice(index, 1); } } // } let insertList = []; for (let item of orderList) { if (nameList.indexOf(item.ingredient) > -1) { insertList.push(item); } } if (insertList.length > 0) { console.log(`inserting ${insertList.length} ingredients`); console.log(insertList); collection.insertMany(insertList, () => { callback(insertList); }); } else { callback(insertList); } }) }); } const findItemInShoppingList = (item, callback) => { mongodb.connect(url, function (err, db) { const collection = db.collection('shoppingList'); collection.findOne(item, (err, results)=> { callback(results); }); }); } const deleteItemFromShoppingList = (item, callback) => { mongodb.connect(url,function(err, db) { const collection = db.collection('shoppingList'); collection.remove(item, (err) => { if (err) { callback(err); return; } getShoppingList((results) => { callback(results); }) }); }); } return { getShoppingList : getShoppingList, deleteShoppingList : deleteShoppingList, insertItemInShoppingList : insertItemInShoppingList, insertItemsInShoppingList : insertItemsInShoppingList, findItemInShoppingList : findItemInShoppingList, deleteItemFromShoppingList : deleteItemFromShoppingList }; } module.exports = shoppingDAC;
<reponame>Misssusu/simple-style-react-ui import React from "react"; import Demo from "../demo"; const code = require('!!raw-loader!./install.code.txt'); const InstallDemo: React.FunctionComponent = () => { return( <div> <h2>可通过 npm 或者 yarn 来安装</h2> <Demo code={code.default}></Demo> </div> ) } export default InstallDemo
function removeChars(str, chars) { let result = ''; for (let i = 0; i < str.length; i++) { if (!chars.includes(str[i])) result += str[i]; } return result; } const result = removeChars('Hello', ['l', 'H']); console.log(result);
<reponame>Hyman-Yuan/meiduo_project from celery import Celery import os # 在当前程序系统环境中添加django配置文件模板, # 如果当前celery 中有函数或者方法使用了django的配置文件,则必须加 os.environ.setdefault("DJANGO_SETTINGS_MODULE", "meiduo_mall.settings.dev") # 创建celery实例 # 1.创建celery客户端对象 Celery第一个位置参数是当前任务的别名 celery_cli = Celery('meiduo_mall') # 加载celery配置 # 2.载入celery的配置信息,指定任务队列的存储位置,传入当前celery的配置文件作为参数 celery_cli.config_from_object('celery_tasks.config') # 自动注册celery任务 # 3.给当前celery的 任务列表 添加任务,[]列表里的参数,是任务的导包路径 celery_cli.autodiscover_tasks([ 'celery_tasks.sms','celery_tasks.send_email', ])
/* * Copyright (C) 2012-2014 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package info.archinnov.achilles.internal.context.facade; import java.lang.reflect.Method; import java.util.List; import java.util.Set; import com.google.common.base.Optional; import info.archinnov.achilles.internal.context.ConfigurationContext; import info.archinnov.achilles.internal.metadata.holder.EntityMeta; import info.archinnov.achilles.internal.metadata.holder.PropertyMeta; import info.archinnov.achilles.type.ConsistencyLevel; import info.archinnov.achilles.type.Options; public interface PersistentStateHolder { public boolean isClusteredCounter(); public EntityMeta getEntityMeta(); public PropertyMeta getIdMeta(); public Object getEntity(); public void setEntity(Object entity); public <T> Class<T> getEntityClass(); public Object getPrimaryKey(); public Options getOptions(); public Optional<Integer> getTtl(); public Optional<Long> getTimestamp(); public Optional<ConsistencyLevel> getConsistencyLevel(); public Optional<com.datastax.driver.core.ConsistencyLevel> getSerialConsistencyLevel(); public List<Options.CASCondition> getCasConditions(); public boolean hasCasConditions(); public Optional getCASResultListener(); public Set<Method> getAllGettersExceptCounters(); public List<PropertyMeta> getAllCountersMeta(); public ConfigurationContext getConfigContext(); }
<reponame>twitter-zuiwanyuan/finatra<filename>utils/src/test/scala/com/twitter/finatra/tests/conversions/BooleanConversionsTest.scala package com.twitter.finatra.tests.conversions import com.twitter.finatra.conversions.boolean._ import com.twitter.inject.Test class BooleanConversionsTest extends Test { "RichBoolean" should { "#option when true" in { true.option { 1 } should equal(Some(1)) } "#option when false" in { false.option { 1 } should equal(None) } "trigger positive onTrue" in { var triggered = false true.onTrue {triggered = true} should equal(true) triggered should equal(true) } "trigger negative onTrue" in { var triggered = false false.onTrue {triggered = true} should equal(false) triggered should equal(false) } "trigger positive onFalse" in { var triggered = false false.onFalse {triggered = true} should equal(false) triggered should equal(true) } "trigger negative onFalse" in { var triggered = false true.onFalse {triggered = true} should equal(true) triggered should equal(false) } } }
<filename>src/components/evan-form/utils.js import AsyncValidator from "async-validator"; const utils = { validate: (model, rules, callback, options) => { const initOptions = { showMessage: true, }; options = Object.assign({}, initOptions, options || {}); // 如果需要验证的fields为空,调用验证时立刻返回callback if ((!rules || rules.length === 0) && callback) { callback(true, null); return true; } let errors = []; const props = Object.keys(rules); let count = 0; for (let i in props) { const prop = props[i]; const value = utils.getValueByProp(model, prop); utils.validateItem(rules, prop, value, (err) => { if (err && err.length > 0) { errors = errors.concat(err); } // 处理异步校验,等所有校验都结束时再callback count++; if (count === props.length) { if (errors.length > 0) { if (options.showMessage) { utils.showToast(errors[0].message); } callback(false, errors); } else { callback(true, null); } } }); } }, validateField: (model, rules, props, callback, options) => { const initOptions = { showMessage: true, }; options = Object.assign({}, initOptions, options || {}); props = [].concat(props); if (props.length === 0) { return; } let errors = []; let count = 0; for (let i in props) { const prop = props[i]; const value = utils.getValueByProp(model, prop); utils.validateItem(rules, prop, value, (err) => { if (err && err.length > 0) { errors = errors.concat(err); } // 处理异步校验,等所有校验都结束时再callback count++; if (count === props.length) { if (errors.length > 0) { if (options.showMessage) { utils.showToast(errors[0].message); } callback(false, errors); } else { callback(true, null); } } }); } }, validateItem(rules, prop, value, callback) { if (!rules || JSON.stringify(rules) === "{}") { if (callback instanceof Function) { callback(); } return true; } const propRules = [].concat(rules[prop] || []); propRules.forEach((rule) => { if (rule.pattern) { rule.pattern = new RegExp(rule.pattern); } }); const descriptor = { [prop]: propRules, }; const validator = new AsyncValidator(descriptor); const model = { [prop]: value, }; validator.validate( model, { firstFields: true, }, (errors) => { callback(errors); } ); }, getValueByProp: (obj, prop) => { let tempObj = obj; prop = prop.replace(/\[(\w+)\]/g, ".$1").replace(/^\./, ""); let keyArr = prop.split("."); let i = 0; for (let len = keyArr.length; i < len - 1; ++i) { if (!tempObj) break; let key = keyArr[i]; if (key in tempObj) { tempObj = tempObj[key]; } else { break; } } return tempObj ? typeof tempObj[keyArr[i]] === "string" ? tempObj[keyArr[i]].trim() : tempObj[keyArr[i]] : null; }, showToast: (message) => { uni.showToast({ title: message, icon: "none", }); }, }; export default utils;
package frc.robot.gamestate.routine; import edu.wpi.first.wpilibj.Timer; /** * Derived class of AutonSegment that acts as a delay during an autonomous period. */ public class AutonTimer extends AutonSegment { // Timer util class used to determine time passed. private Timer timer; // The delay amount in seconds private double timerDelaySeconds; /** * Constructor for AutonTimer * * @param timerDelaySeconds Delay amount in seconds */ public AutonTimer(double timerDelaySeconds) { this.timerDelaySeconds = timerDelaySeconds; timer = new Timer(); } @Override public void autonomousInit() { System.out.println("AutonTimer.autonomousInit"); // Start the timer (counts up) timer.start(); } @Override public void autonomousPeriodic() { // If the timer has met or exceeded the amount of delay specified, // Reset the timer, stop the timer, and indicate to the Autonomous logic // that this segment is complete. if(timer.get() >= timerDelaySeconds) { timer.reset(); timer.stop(); complete = true; } } }
var db = require('../../db'); var users = {}; users.invalid = { id: 'thisisastring', username: 1919 }; module.exports = function(cb) { db.select().from('users').orderBy('id') .then(function(rows) { users.valid = rows; cb(null, { valid: rows, invalid: { id: 'thisisastring', username: 1919 } }); }) .catch(cb); };
for (int i = 0; i < 10; ++i) { cout << i*i << endl; }
/*--------------------------------------------------------- * Copyright 2021 The Go Authors. All rights reserved. * Licensed under the MIT License. See LICENSE in the project root for license information. *--------------------------------------------------------*/ import path = require('path'); import { DocumentSymbol, FileType, Uri, TextDocument, SymbolKind, Range, Position } from 'vscode'; import { packagePathToGoModPathMap } from '../../src/goModules'; import { MockTestWorkspace } from '../mocks/MockTest'; // eslint-disable-next-line @typescript-eslint/no-unused-vars export function getSymbols_Regex(doc: TextDocument, token: unknown): Thenable<DocumentSymbol[]> { const syms: DocumentSymbol[] = []; const range = new Range(new Position(0, 0), new Position(0, 0)); doc.getText().replace(/^func (Test|Benchmark|Example)([A-Z]\w+)(\(.*\))/gm, (m, type, name, details) => { syms.push(new DocumentSymbol(type + name, details, SymbolKind.Function, range, range)); return m; }); return Promise.resolve(syms); } export function populateModulePathCache(workspace: MockTestWorkspace) { function walk(dir: Uri, modpath?: string) { const dirs: Uri[] = []; for (const [name, type] of workspace.fs.dirs.get(dir.toString())) { const uri = dir.with({ path: path.join(dir.path, name) }); if (type === FileType.Directory) { dirs.push(uri); } else if (name === 'go.mod') { modpath = dir.path; } } packagePathToGoModPathMap[dir.path] = modpath || ''; for (const dir of dirs) { walk(dir, modpath); } } // prevent getModFolderPath from actually doing anything; for (const pkg in packagePathToGoModPathMap) { delete packagePathToGoModPathMap[pkg]; } walk(Uri.file('/')); }
#-- # Copyright (c) 2011 <NAME>, <NAME>, Geni Inc # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. #++ class Tr8n::IpLocation < ActiveRecord::Base set_table_name :tr8n_ip_locations def self.no_country_clause %q{COALESCE(country, 'ZZZ') = 'ZZZ'} end def self.find_by_ip(ip) ip = case ip when String Tr8n::IpAddress.new(ip).to_i else ip.to_i end first(:conditions => ['low <= ? AND ? <= high', ip, ip]) || new.freeze rescue ArgumentError puts "Invalid ip: #{ip}" unless Rails.env.test? new.freeze end def blank? new_record? || 'ZZZ' == cntry end def self.import_from_file(file, opts=nil) opts ||= {:verbose => false} puts "Deleting old records..." if opts[:verbose] delete_all puts "Done." if opts[:verbose] puts "Importing new records..." if opts[:verbose] file = File.open(file) if file.is_a?(String) file.each_line do |line| next if line =~ /^\s*\#|^\s*$/ line.chomp!.tr!('"\'','') values = line.split(',') create!( :low => values[0], :high => values[1], :registry => values[2], :assigned => Time.at(values[3].to_i), :ctry => values[4], :cntry => values[5], :country => Iconv.conv('UTF-8', 'ISO_8859-1', values[6]) ) $stdout << '.' if opts[:verbose] end puts "Done." if opts[:verbose] end end
BASE_DIR="$(pwd)/$(dirname "$0")" cd $BASE_DIR composer install ## Basic test cd $BASE_DIR/basic_test ../vendor/bin/phpunit *.php --verbose --log-junit all-tests.xml ## Out of order test cd $BASE_DIR/out_of_order_test ../vendor/bin/phpunit *.php --verbose --log-junit all-tests.xml ## Coverage test cd $BASE_DIR/coverage_test ../vendor/bin/phpunit --coverage-clover coverage.xml coverageTest.php --filter testOne
#!/bin/sh ##### VARS ############## red=$(tput setaf 1) green=$(tput setaf 2) yellow=$(tput setaf 3) blue=$(tput setaf 4) normal=$(tput sgr0) ##### HELPERS ########### function waitToContinue() { printf "\npress any key to continue..." read -n 1 } function replace() { export VARS="$1" cat generated/values-${ENV}.yaml | envsubst "$VARS" > generated/values-${ENV}.yaml.tmp [[ $? = 1 ]] && printFailureAndExit "Replacing environment variables" mv generated/values-${ENV}.yaml.tmp generated/values-${ENV}.yaml [[ $? = 0 ]] && printSuccess || printFailureAndExit "Overriding 'generated/values-${ENV}" } function printUsageAndExit() { printf "\nUsage: ./day-2-generator.sh [CONFIGFILE]\n" exit 1 } function printHeader() { printf "${2}################################################################################\n" printf "# Name: Day-2-Operations Generator\n" printf "# Description: TODO\n" printf "# Author: gattma,fhochleitner\n" printf "# Version: v1.0\n" printf "# Documentation: https://gepardec.atlassian.net/wiki/spaces/G/pages/2393276417/Day-2-Operations\n" printf "# Configuration: ${1}\n" printf "################################################################################${normal}\n\n" } function printActionHeader() { printf "\n${2}################################################################################\n" printf "%*s\n" $(((${#1}+80)/2)) "${2}${1}" printf "################################################################################${normal}\n" } function printSuccess() { printf '\033[79`%s\n' "${green}OK${normal}" } function printFailure() { printf '\033[75`%s\n' "${red}FAILED${normal}" } function printFailureAndExit() { printf "${1} ${red}FAILED${normal}" exit 1 } ##### SURVEY ############ function generateCertificate() { printActionHeader "GENERATE SEALED SECRETS CERTIFICATE" $yellow if [[ -f "generated/${ENV}.key" ]] && [[ -f "generated/${ENV}.crt" ]]; then printf "Existing Certififactes found for Cluster: ${ENV}...\n" printf "Skipping generation and use existing files..." printSuccess else printf "Generating certificate and private key..." openssl req -x509 -nodes -newkey rsa:4096 -keyout "generated/${ENV}.key" -out "generated/${ENV}.crt" -subj "/CN=sealed-secret/O=sealed-secret" >> /dev/null [[ $? = 0 ]] && printSuccess || printFailureAndExit "Generating" fi } function encryptSealedSecretCertificateForAnsibleVault() { printActionHeader "ENCRYPT SEALED SECRET CERTIFICATE FOR VAULT" $yellow printf "Enter Ansible-Vault-Password: " read -s password echo ${password} >> vault.password printf "\nEncrypting certificate for sealed secret..." ansible-vault encrypt_string --vault-password-file vault.password --name 'sealedSecretCertificate' -- "$(cat generated/${ENV}.crt | base64 -w0)" > generated/${ENV}-crt-vault.yaml [[ $? = 0 ]] && printSuccess || printFailureAndExit "Encrypting" printf "\nCopy the following part (red) into inventory-spoke-gepaplexx-${ENV}/group-vars/all/vault.yaml\n" printf "${red}$(cat generated/${ENV}-crt-vault.yaml)${normal}" waitToContinue printActionHeader "ENCRYPT SEALED SECRET KEY FOR VAULT" $yellow printf "Encrypting key for sealed secret..." ansible-vault encrypt_string --vault-password-file vault.password --name 'sealedSecretPrivateKey' -- "$(cat generated/${ENV}.key | base64 -w0)" > generated/${ENV}-key-vault.yaml [[ $? = 0 ]] && printSuccess || printFailureAndExit "Encrypting" printf "\nCopy the following part (red) into inventory-spoke-gepaplexx-${ENV}/group-vars/all/vault.yaml\n" printf "${red}$(cat generated/${ENV}-key-vault.yaml)${normal}" waitToContinue rm vault.password } function removeIdentityProvGoogle() { printf "Disable Identity Provider 'GOOGLE'..." sed -i .bak \ '/name: "google.clientSecret"/,+1d;/name: "google.clientId"/,+1d;/name: "google.restrDomain"/,+1d' \ generated/values-${ENV}.yaml rm generated/*.bak export GOOGLE_ENABLE=false replace '$GOOGLE_ENABLE' } function removeIdentityProvGit() { printf "Disable Identity Provider 'GIT'..." sed -i .bak \ '/name: "git.clientSecret"/,+1d;/name: "git.clientId"/,+1d;/name: "git.restrOrgs"/,+1d' \ generated/values-${ENV}.yaml rm generated/*.bak export GIT_ENABLE=false replace '$GIT_ENABLE' } function configureIdentityProvGoogle() { printf "generating sealed secret values for google oauth identity provider..." export GOOGLE_CLIENTSECRET=$(printf "$GOOGLE_CLIENTSECRET" | base64 -w0) export GOOGLE_CLIENTID=$(printf "$GOOGLE_CLIENTID" | base64 -w0) export GOOGLE_RESTRICTED_DOMAIN=$(printf "$GOOGLE_RESTRICTED_DOMAIN" | base64 -w0) cat templates/secret-ip-google.yaml.TEMPLATE \ | envsubst '$GOOGLE_CLIENTSECRET:$GOOGLE_CLIENTID:$GOOGLE_RESTRICTED_DOMAIN' \ | kubeseal --cert generated/${ENV}.crt -o yaml > generated/google-oauth-secret.yaml [[ $? = 0 ]] && printSuccess || printFailureAndExit "Generating" printf "Replacing parameters in values-${ENV}.yaml..." export GOOGLE_CLIENTSECRET=$(cat generated/google-oauth-secret.yaml | grep clientSecret | cut -d ':' -f 2 | xargs) export GOOGLE_CLIENTID=$(cat generated/google-oauth-secret.yaml | grep clientId | cut -d ':' -f 2 | xargs) export GOOGLE_RESTRDOMAIN=$(cat generated/google-oauth-secret.yaml | grep restrDomain | cut -d ':' -f 2 | xargs) export GOOGLE_ENABLE=true replace '$GOOGLE_CLIENTSECRET:$GOOGLE_CLIENTID:$GOOGLE_RESTRDOMAIN:$GOOGLE_ENABLE' printf "Cleanup..." rm generated/google-oauth-secret.yaml [[ $? = 0 ]] && printSuccess || printFailureAndExit "Cleanup" } function configureIdentityProvGit() { printf "Generating sealed secret values for git oauth identity provider..." export GIT_CLIENTSECRET=$(printf "$GIT_CLIENTSECRET" | base64 -w0) export GIT_CLIENTID=$(printf "$GIT_CLIENTID" | base64 -w0) export GIT_RESTRICTED_ORGS=$(printf "$GIT_RESTRICTED_ORGS" | base64 -w0) cat templates/secret-ip-git.yaml.TEMPLATE \ | envsubst '$GIT_CLIENTSECRET:$GIT_CLIENTID:$GIT_RESTRICTED_ORGS' \ | kubeseal --cert generated/${ENV}.crt -o yaml > generated/github-oauth-secret.yaml [[ $? = 0 ]] && printSuccess || printFailureAndExit "Generating" printf "Replacing parameters in values-${ENV}.yaml..." export GIT_CLIENTSECRET=$(cat generated/github-oauth-secret.yaml | grep clientSecret | cut -d ':' -f 2 | xargs) export GIT_CLIENTID=$(cat generated/github-oauth-secret.yaml | grep clientId | cut -d ':' -f 2 | xargs) export GIT_RESTRORGS=$(cat generated/github-oauth-secret.yaml | grep restrOrgs | cut -d ':' -f 2 | xargs) export GIT_ENABLE=true replace '$GIT_CLIENTSECRET:$GIT_CLIENTID:$GIT_RESTRORGS:$GIT_ENABLE' printf "Cleanup..." rm generated/github-oauth-secret.yaml [[ $? = 0 ]] && printSuccess || printFailureAndExit "Cleanup" } function configureClusterUpdater() { printf "Generating values for cluster updater..." export ENV=${ENV} export CONSOLE_URL=$CONSOLE_URL export SLACK_B64=$(printf "$SLACK_CHANNEL_CU" | base64 -w0) [[ $? = 0 ]] && printSuccess || printFailureAndExit "Generating" printf "Replacing parameters in values-${ENV}.yaml..." replace '$SLACK_B64:$CONSOLE_URL:${ENV}' } function configureClusterConfig() { printf "Generating sealed secret value for alertmanager..." if [ -z $ALERTMANAGER_CONFIG ] then export ENV=${ENV} export SLACK_CHANNEL_AM=$SLACK_CHANNEL_AM cat templates/default-alertmanager.yaml.TEMPLATE \ | envsubst '${ENV}:$SLACK_CHANNEL_AM' > generated/alertmanager.yaml AM_YAML=generated/alertmanager.yaml else AM_YAML=$ALERTMANAGER_CONFIG fi export ALERTMANAGER_CONFIG=$(cat $AM_YAML | base64 -w0) cat templates/secret-alertmanager.yaml.TEMPLATE \ | envsubst '$ALERTMANAGER_CONFIG' \ | kubeseal --cert generated/${ENV}.crt -o yaml > generated/alertmanager-secret.yaml export GITHUB_CICD_TOOLS_WORKFLOWREPOSITORY_USERNAME=$( printf "$GITHUB_CICD_TOOLS_WORKFLOWREPOSITORY_USERNAME" | base64 -w0) export GITHUB_CICD_TOOLS_WORKFLOWREPOSITORY_SSHPRIVATEKEY=$GITHUB_CICD_TOOLS_WORKFLOWREPOSITORY_SSHPRIVATEKEY export GITHUB_CICD_TOOLS_WORKFLOWREPOSITORY_ENABLED=$GITHUB_CICD_TOOLS_WORKFLOWREPOSITORY_ENABLED cat templates/secret-cicd-repository-git.yaml.TEMPLATE \ | envsubst '$GITHUB_CICD_TOOLS_WORKFLOWREPOSITORY_USERNAME:$GITHUB_CICD_TOOLS_WORKFLOWREPOSITORY_SSHPRIVATEKEY:$GITHUB_CICD_TOOLS_WORKFLOWREPOSITORY_ENABLED' \ | kubeseal --cert generated/${ENV}.crt -o yaml > generated/cicd-repository-git-secret.yaml [[ $? = 0 ]] && printSuccess || printFailureAndExit "Generating" printf "Replacing parameters in values-${ENV}.yaml..." export ENCRYPTED_YAML=$(cat generated/alertmanager-secret.yaml | grep alertmanager.yaml | cut -d ':' -f 2 | xargs) export GITHUB_CICD_TOOLS_WORKFLOWREPOSITORY_USERNAME=$(cat generated/cicd-repository-git-secret.yaml | grep username | cut -d ':' -f 2 | xargs) export GITHUB_CICD_TOOLS_WORKFLOWREPOSITORY_SSHPRIVATEKEY=$(cat generated/cicd-repository-git-secret.yaml | grep sshPrivateKey | cut -d ':' -f 2 | xargs) replace '$ENCRYPTED_YAML:$GITHUB_CICD_TOOLS_WORKFLOWREPOSITORY_USERNAME:$GITHUB_CICD_TOOLS_WORKFLOWREPOSITORY_SSHPRIVATEKEY' printf "Cleanup..." rm generated/alertmanager-secret.yaml rm generated/alertmanager.yaml rm generated/cicd-repository-git-secret.yaml [[ $? = 0 ]] && printSuccess || printFailureAndExit "Cleanup" } function configureRookCeph() { [[ ${ENABLE_ROOK_CEPH} = true ]] && printf "Enable Rook/Ceph deployment..." || printf "Disable Rook/Ceph deployment..."; export ENABLE_ROOK_CEPH=$ENABLE_ROOK_CEPH replace '$ENABLE_ROOK_CEPH' } function configureClusterLogging() { [[ ${ENABLE_CLUSTER_LOGGING} = true ]] && printf "Enable ClusterLogging deployment..." || printf "Disable ClusterLogging deployment..."; export ENABLE_CLUSTER_LOGGING=$ENABLE_CLUSTER_LOGGING replace '$ENABLE_CLUSTER_LOGGING' } function configureClusterCertificates() { printf "Replace custom url for certificate patches..." export APISERVER_CUSTOMURL=$APISERVER_CUSTOMURL replace '$APISERVER_CUSTOMURL' printf "Configure cluster issuer solvers..." export SOLVERS_DNS_ZONE=$SOLVERS_DNS_ZONE export SOLVERS_ACCESSKEYID=$SOLVERS_ACCESSKEYID export SOLVERS_SECRETNAME=$SOLVERS_SECRETNAME export SOLVERS_SECRETACCESSKEY=$(printf "$SOLVERS_SECRETACCESSKEY" | base64 -w0) cat templates/secret-route53-credentials-secret.yaml.TEMPLATE \ | envsubst '$SOLVERS_SECRETACCESSKEY:$ENV' \ | kubeseal --cert generated/${ENV}.crt -o yaml > generated/route-53-credentials-secret.yaml export SOLVERS_SECRETACCESSKEY=$(cat generated/route-53-credentials-secret.yaml | grep secret-access-key | cut -d ':' -f 2 | xargs) replace '$SOLVERS_DNS_ZONE:$SOLVERS_ACCESSKEYID:$SOLVERS_SECRETNAME:$SOLVERS_SECRETACCESSKEY' printf "Configure cluster issuer certificates..." export CERTIFICATES_DEFAULTINGRESS=$CERTIFICATES_DEFAULTINGRESS export CERTIFICATES_CONSOLE=$CERTIFICATES_CONSOLE export CERTIFICATES_API=$CERTIFICATES_API replace '$CERTIFICATES_DEFAULTINGRESS:$CERTIFICATES_CONSOLE:$CERTIFICATES_API' printf "Cleanup..." rm generated/route-53-credentials-secret.yaml [[ $? = 0 ]] && printSuccess || printFailureAndExit "Cleanup" } function configureConsolePatches() { printf "Replace hostname..." export ROUTE_HOSTNAME=$ROUTE_HOSTNAME replace '$ROUTE_HOSTNAME' } function configureGepaplexxCicdTools() { printf "Configure Gepaplexx CICD tools hostnames..." export GEPAPLEXX_CICD_TOOLS_ARGOCD_ROUTE_HOSTNAME=${GEPAPLEXX_CICD_TOOLS_ARGOCD_ROUTE_HOSTNAME} export GEPAPLEXX_CICD_TOOLS_ROLLOUTS_ROUTE_HOSTNAME=${GEPAPLEXX_CICD_TOOLS_ROLLOUTS_ROUTE_HOSTNAME} export GEPAPLEXX_CICD_TOOLS_WORKFLOWS_ROUTE_HOSTNAME=${GEPAPLEXX_CICD_TOOLS_WORKFLOWS_ROUTE_HOSTNAME} replace '$GEPAPLEXX_CICD_TOOLS_ARGOCD_ROUTE_HOSTNAME:$GEPAPLEXX_CICD_TOOLS_ROLLOUTS_ROUTE_HOSTNAME:$GEPAPLEXX_CICD_TOOLS_WORKFLOWS_ROUTE_HOSTNAME' [[ $? = 0 ]] && printSuccess || printFailureAndExit "Generating" printf "Replacing parameters in values-${ENV}.yaml..." printf "generating sealed secret for cicd-tools postgres db..." cat templates/secret-postgresql-creds.yaml.TEMPLATE \ | envsubst '$GEPAPLEXX_CICD_TOOLS_PSQL_PASSWORD:$GEPAPLEXX_CICD_TOOLS_PSQL_POSTGRES_PASSWORD' \ | kubeseal --cert generated/${ENV}.crt -o yaml > generated/postgreql-creds-secret.yaml [[ $? = 0 ]] && printSuccess || printFailureAndExit "Generating" printf "Replacing parameters in values-${ENV}.yaml..." export GEPAPLEXX_CICD_TOOLS_PSQL_PASSWORD=$(cat generated/postgreql-creds-secret.yaml | grep password | grep -v postgres-password | cut -d ':' -f 2 | xargs) export GEPAPLEXX_CICD_TOOLS_PSQL_POSTGRES_PASSWORD=$(cat generated/postgreql-creds-secret.yaml | grep postgres-password | cut -d ':' -f 2 | xargs) replace '$GEPAPLEXX_CICD_TOOLS_PSQL_PASSWORD:$GEPAPLEXX_CICD_TOOLS_PSQL_POSTGRES_PASSWORD' printf "Cleanup..." rm generated/postgreql-creds-secret.yaml [[ $? = 0 ]] && printSuccess || printFailureAndExit "Cleanup" } function checkPrerequisites() { ok=true printf "envsubst is installed..." which envsubst >> /dev/null [[ $? = 0 ]] && printSuccess || { ok=false; printFailure; } printf "ansible is installed..." which ansible >> /dev/null [[ $? = 0 ]] && printSuccess || { ok=false; printFailure; } printf "ansible-vault is installed..." which ansible-vault >> /dev/null [[ $? = 0 ]] && printSuccess || { ok=false; printFailure; } printf "kubeseal is installed..." which kubeseal >> /dev/null [[ $? = 0 ]] && printSuccess || { ok=false; printFailure; } printf "openssl is installed..." which openssl >> /dev/null [[ $? = 0 ]] && printSuccess || { ok=false; printFailure; } [[ $ok = false ]] && { printf "\n${red}Check failed! Please install the necessary tools.${normal}"; exit 1; } } function main() { [[ -z ${1} ]] && { printUsageAndExit; } [[ ! -f ${1} ]] && { echo "File not found: '${1}'"; exit 1; } printHeader "${1}" $blue . ${1} printActionHeader "CHECK PREREQUISITES" $yellow checkPrerequisites mkdir -p generated cp templates/day-2-ops-values.yaml.TEMPLATE generated/values-${ENV}.yaml [[ $? = 1 ]] && printFailureAndExit "Copying day-2-ops-values-TEMPLATE" waitToContinue generateCertificate encryptSealedSecretCertificateForAnsibleVault printActionHeader "CONFIGURE IDENTITY PROVIDER" $yellow [[ ${ENABLE_GOOGLE_IP} = true ]] && configureIdentityProvGoogle || removeIdentityProvGoogle; [[ ${ENABLE_GIT_IP} = true ]] && configureIdentityProvGit || removeIdentityProvGit; printActionHeader "CONFIGURE CLUSTER UPDATER" $yellow configureClusterUpdater printActionHeader "CONFIGURE CLUSTER CONFIG" $yellow configureClusterConfig printActionHeader "CONFIGURE ROOK/CEPH INSTANCE" $yellow configureRookCeph printActionHeader "CONFIGURE CLUSTER LOGGING" $yellow configureClusterLogging printActionHeader "CONFIGURE CLUSTER CERTIFICATES" $yellow configureClusterCertificates printActionHeader "CONFIGURE CONSOLE PATCHES" $yellow configureConsolePatches printActionHeader "CONFIGURE GEPAPLEXX CICD TOOLS" $yellow configureGepaplexxCicdTools printActionHeader "SUMMARY" $green printf "Successfully generated values for environment '${ENV}': generated/values-${ENV}.yaml\n" printf "Copy these file to https://github.com/gepaplexx/gp-helm-chart-development/tree/main/day-2-operations/gp-cluster-setup/values \n" } # 1 .. Config file main "${1}"
#!/usr/bin/env bash testdir=$(readlink -f $(dirname $0)) rootdir=$(readlink -f $testdir/../../..) source $rootdir/test/common/autotest_common.sh source $rootdir/test/iscsi_tgt/common.sh # $1 = "iso" - triggers isolation mode (setting up required environment). # $2 = test type posix or vpp. defaults to posix. iscsitestinit $1 $2 rpc_py="$rootdir/scripts/rpc.py" fio_py="$rootdir/scripts/fio.py" # Namespaces are NOT used here on purpose. This test requires changes to detect # ifc_index for interface that was put into namespace. Needed for net_interface_add_ip_address. ISCSI_APP="$rootdir/app/iscsi_tgt/iscsi_tgt" NETMASK=127.0.0.0/24 MIGRATION_ADDRESS=127.0.0.2 function kill_all_iscsi_target() { for ((i = 0; i < 2; i++)); do rpc_addr="/var/tmp/spdk${i}.sock" $rpc_py -s $rpc_addr kill_instance SIGTERM done } function rpc_config() { # $1 = RPC server address # $2 = Netmask $rpc_py -s $1 add_initiator_group $INITIATOR_TAG $INITIATOR_NAME $2 $rpc_py -s $1 bdev_malloc_create 64 512 } function rpc_add_target_node() { $rpc_py -s $1 net_interface_add_ip_address 1 $MIGRATION_ADDRESS $rpc_py -s $1 iscsi_create_portal_group $PORTAL_TAG $MIGRATION_ADDRESS:$ISCSI_PORT $rpc_py -s $1 iscsi_create_target_node target1 target1_alias 'Malloc0:0' $PORTAL_TAG:$INITIATOR_TAG 64 -d } timing_enter ip_migration echo "Running ip migration tests" for ((i = 0; i < 2; i++)); do timing_enter start_iscsi_tgt_$i rpc_addr="/var/tmp/spdk${i}.sock" # TODO: run the different iSCSI instances on non-overlapping CPU masks $ISCSI_APP -r $rpc_addr -i $i -m $ISCSI_TEST_CORE_MASK --wait-for-rpc & pid=$! echo "Process pid: $pid" trap 'kill_all_iscsi_target; exit 1' SIGINT SIGTERM EXIT waitforlisten $pid $rpc_addr $rpc_py -s $rpc_addr iscsi_set_options -o 30 -a 64 $rpc_py -s $rpc_addr start_subsystem_init echo "iscsi_tgt is listening. Running tests..." timing_exit start_iscsi_tgt_$i rpc_config $rpc_addr $NETMASK trap 'kill_all_iscsi_target; iscsitestfini $1 $2; exit 1' \ SIGINT SIGTERM EXIT done rpc_first_addr="/var/tmp/spdk0.sock" rpc_add_target_node $rpc_first_addr sleep 1 iscsiadm -m discovery -t sendtargets -p $MIGRATION_ADDRESS:$ISCSI_PORT sleep 1 iscsiadm -m node --login -p $MIGRATION_ADDRESS:$ISCSI_PORT # fio tests for multi-process $fio_py -p iscsi -i 4096 -d 32 -t randrw -r 10 & fiopid=$! sleep 5 $rpc_py -s $rpc_first_addr kill_instance SIGTERM rpc_second_addr="/var/tmp/spdk1.sock" rpc_add_target_node $rpc_second_addr wait $fiopid trap - SIGINT SIGTERM EXIT iscsicleanup $rpc_py -s $rpc_second_addr kill_instance SIGTERM iscsitestfini $1 $2 report_test_completion "iscsi_ip_migration" timing_exit ip_migration
#!/bin/bash # Generate the hooks/post-refresh file for a DNS plugin # Usage: bash generate_dnsplugins_postrefreshhook.sh path/to/dns/plugin # For example, from the certbot home directory: # tools/snap/generate_dnsplugins_postrefreshhook.sh certbot-dns-dnsimple set -eu PLUGIN_PATH=$1 mkdir -p "${PLUGIN_PATH}/snap/hooks" cat <<EOF > "${PLUGIN_PATH}/snap/hooks/post-refresh" #!/bin/sh -e # This file is generated automatically and should not be edited manually. # get certbot version if [ ! -f "\$SNAP/certbot-shared/certbot-version.txt" ]; then echo "No certbot version available; not doing version comparison check" >> "\$SNAP_DATA/debuglog" exit 0 fi cb_installed=\$(cat \$SNAP/certbot-shared/certbot-version.txt) # get required certbot version for plugin. certbot version must be at least the plugin's # version. note that this is not the required version in setup.py, but the version number itself. cb_required=\$(grep -oP "version = '\K.*(?=')" \$SNAP/setup.py) \$SNAP/bin/python3 -c "import sys; from packaging import version; sys.exit(1) if\ version.parse('\$cb_installed') < version.parse('\$cb_required') else sys.exit(0)" || exit_code=\$? if [ "\$exit_code" -eq 1 ]; then echo "Certbot is version \$cb_installed but needs to be at least \$cb_required before" \\ "this plugin can be updated; will try again on next refresh." exit 1 fi EOF
function daysToYWD(days){ let years = Math.floor(days/365); let weekdays = days % 365; let weeks = Math.floor(weekdays/7); let days = weekdays % 7; return `${years} years, ${weeks} weeks, ${days} days`; }
// Define the DnaDef type representing the definition of a DNA sequence pub struct DnaDef { // Define the fields of the DnaDef type as per the requirements // For example: // name: String, // sequence: String, } // Implement the missing methods for the DnaDefBuf struct to manage DNA definitions pub struct DnaDefBuf { dna_defs: CasBufFreshSync<DnaDef>, } impl DnaDefBuf { // Implement methods to manage DNA definitions // For example: // pub fn add_dna_definition(&mut self, dna_def: DnaDef) { // self.dna_defs.push(dna_def); // } // pub fn get_dna_definition(&self, index: usize) -> Option<&DnaDef> { // self.dna_defs.get(index) // } // pub fn remove_dna_definition(&mut self, index: usize) -> Option<DnaDef> { // self.dna_defs.remove(index) // } } // Implement any additional methods required for managing DNA definitions in the DnaStore trait pub trait DnaStore { // Define methods for managing DNA definitions // For example: // fn add_dna_definition(&mut self, dna_def: DnaDef); // fn get_dna_definition(&self, index: usize) -> Option<&DnaDef>; // fn remove_dna_definition(&mut self, index: usize) -> Option<DnaDef>; }
<gh_stars>10-100 /* * Copyright (c) 2004-2021, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hisp.dhis.android.core.domain.aggregated.data.internal; import org.hisp.dhis.android.core.data.dataset.DataSetSamples; import org.hisp.dhis.android.core.data.utils.FillPropertiesTestUtils; import org.hisp.dhis.android.core.dataset.DataSet; import org.junit.Before; import org.junit.Test; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import java.util.Date; import static com.google.common.truth.Truth.assertThat; import static org.mockito.Mockito.when; public class AggregatedDataSyncLastUpdatedCalculatorShould { private DataSet dataSet = DataSetSamples.getDataSet(); private int dataElementsHash = 1111111; private int organisationUnitsHash = 22222222; private int pastPeriods = 5; private Date syncLastUpdated = FillPropertiesTestUtils.parseDate("2018-01-01T15:08:27.882"); private Date expectedLastUpdated = FillPropertiesTestUtils.parseDate("2017-12-31T15:08:27.882"); @Mock private AggregatedDataSyncHashHelper hashHelper; private AggregatedDataSync syncValue = AggregatedDataSync.builder() .dataSet(dataSet.uid()) .periodType(dataSet.periodType()) .pastPeriods(pastPeriods) .futurePeriods(dataSet.openFuturePeriods()) .dataElementsHash(dataElementsHash) .organisationUnitsHash(organisationUnitsHash) .lastUpdated(syncLastUpdated) .build(); private AggregatedDataSyncLastUpdatedCalculator calculator; @Before public void setUp() throws Exception { MockitoAnnotations.initMocks(this); when(hashHelper.getDataSetDataElementsHash(dataSet)).thenReturn(dataElementsHash); calculator = new AggregatedDataSyncLastUpdatedCalculator(hashHelper); } @Test public void return_null_if_sync_value_null() { Date lastUpdated = calculator.getLastUpdated(null, dataSet, 3, 5, 0); assertThat(lastUpdated).isNull(); } @Test public void return_expected_last_updated_if_same_values() { Date lastUpdated = calculator.getLastUpdated(syncValue, dataSet, pastPeriods, dataSet.openFuturePeriods(), organisationUnitsHash); assertThat(lastUpdated).isEqualTo(expectedLastUpdated); } @Test public void return_null_if_organisation_units_hash_changed() { Date lastUpdated = calculator.getLastUpdated(syncValue, dataSet, pastPeriods, dataSet.openFuturePeriods(), 33333); assertThat(lastUpdated).isNull(); } @Test public void return_null_if_data_set_elements_hash_changed() { when(hashHelper.getDataSetDataElementsHash(dataSet)).thenReturn(77777); Date lastUpdated = calculator.getLastUpdated(syncValue, dataSet, pastPeriods, dataSet.openFuturePeriods(), organisationUnitsHash); assertThat(lastUpdated).isNull(); } @Test public void return_null_if_future_periods_are_increased() { Date lastUpdated = calculator.getLastUpdated(syncValue, dataSet, pastPeriods, dataSet.openFuturePeriods() + 1, organisationUnitsHash); assertThat(lastUpdated).isNull(); } @Test public void return_expected_last_updated_if_future_periods_are_decreased() { Date lastUpdated = calculator.getLastUpdated(syncValue, dataSet, pastPeriods, dataSet.openFuturePeriods() - 1, organisationUnitsHash); assertThat(lastUpdated).isEqualTo(expectedLastUpdated); } @Test public void return_null_if_past_periods_are_increased() { Date lastUpdated = calculator.getLastUpdated(syncValue, dataSet, pastPeriods + 1, dataSet.openFuturePeriods(), organisationUnitsHash); assertThat(lastUpdated).isNull(); } @Test public void return_expected_past_updated_if_future_periods_are_decreased() { Date lastUpdated = calculator.getLastUpdated(syncValue, dataSet, pastPeriods - 1, dataSet.openFuturePeriods(), organisationUnitsHash); assertThat(lastUpdated).isEqualTo(expectedLastUpdated); } }
import commons from 'feathers-hooks-common'; import { restrictToOwner } from 'feathers-authentication-hooks'; import local from '@feathersjs/authentication-local'; import { toChecksumAddress } from 'web3-utils'; import isOwner from './hooks/isOwner'; import notifyOfChange from '../../hooks/notifyOfChange'; import sanitizeAddress from '../../hooks/sanitizeAddress'; import setAddress from '../../hooks/setAddress'; import { updatedAt, createdAt } from '../../hooks/timestamps'; const normalizeId = () => context => { if (context.id) { context.id = toChecksumAddress(context.id); } return context; }; // ToDo: figure out what ownerField means in this context const restrict = [ // normalizeId(), restrictToOwner({ idField: '_id', ownerField: '_id', }), ]; const address = [ setAddress('address'), sanitizeAddress('address', { required: true, validate: true }), ]; // ToDo: Sort out realtime updates to other models const notifyParents = [ // { // service: 'pools', // parentField: 'ownerId', // childField: '_id', // watchFields: ['avatar', 'name'], // } ]; // TODO write a hook to prevent overwriting a non-zero giverId with 0 module.exports = { before: { all: [], find: [], get: [], create: [local.hooks.hashPassword(), commons.discard('_id'), createdAt], update: [...restrict, commons.stashBefore(), updatedAt], patch: [...restrict, commons.stashBefore(), updatedAt], remove: [commons.disallow()], }, after: { all: [ commons.when(hook => hook.params.provider), local.hooks.protect('password') ], //commons.discard('_id') find: [ commons.iff( commons.isNot(isOwner), commons.discard('email')), ], get: [ commons.iff( commons.isNot(isOwner), commons.discard('email')), ], create: [], update: [notifyOfChange(...notifyParents)], patch: [notifyOfChange(...notifyParents)], remove: [notifyOfChange(...notifyParents)], }, error: { all: [], find: [], get: [], create: [], update: [], patch: [], remove: [], }, };
def float_to_string(number): # Round the number rounded_num = round(number, 2) # Return the number converted to a string return str(rounded_num)
<gh_stars>0 exports.seed = function(knex) { return knex('recipesinstructions').insert([ { recipeid: 1, instructionsid: 3 }, { recipeid: 1, instructionsid: 4 }, { recipeid: 4, instructionsid: 2 }, { recipeid: 3, instructionsid: 1 }, { recipeid: 2, instructionsid: 5 }, { recipeid: 2, instructionsid: 15 }, { recipeid: 3, instructionsid: 16 }, { recipeid: 4, instructionsid: 14 }, { recipeid: 5, instructionsid: 13 }, { recipeid: 6, instructionsid: 12 }, { recipeid: 5, instructionsid: 11 } ]); };
<gh_stars>0 export default { namespace: 'restree', state: [ { key: 0, title: "x", children: [{ key: 3, title: "华或多或少" }, { key: 4, title: "大大师傅" } ] }, { key: 1, title: "sfddssdfds" }, { key: 2, title: "啦啦啦啦啦" }, { key: 1, title: "sfddssdfds" }, { key: 2, title: "啦啦啦啦啦" }, { key: 1, title: "sfddssdfds" }, { key: 2, title: "啦啦啦啦啦" }, { key: 1, title: "sfddssdfds" }, { key: 2, title: "啦啦啦啦啦" } ], reducers: { drop(state, { payload: task }) { return [ { key: 0, title: "新的结构", children: [{ key: 3, title: "华或多或少" }, { key: 4, title: "大大师傅" }, { key: 2, title: "啦啦啦啦啦" } ] }, { key: 1, title: "重置过顺序啦" } ] }, } };
<gh_stars>0 import { ui } from "../ui/layaMaxUI"; import ProgressView from "../widget/ProgressView"; import { AppConfig } from "../AppConfig"; import Log from "../utils/Log"; export default class LoadPage extends ui.page.LoadPageUI { private static self: LoadPage = null; private progressBar: ProgressView = null; private subpackage = [ 'res', 'comp', 'level', 'sound' ]; private stateFlag = []; private subLen = 0; //加载索引值 private loadIndex = 0; public min: number = 0; public max: number = 100; //总共移动距离:460 当前位置 public progress: number = 0; private callBack: Function = null; constructor() { super(); this.progressBar = new ProgressView(); this.progressBarLayout.addChild(this.progressBar); this.subLen = this.subpackage.length; } public setCallBack(callBack: Function) { this.callBack = callBack; } public static getSelf(callBack?: Function): LoadPage { if (LoadPage.self == null) { LoadPage.self = new LoadPage(); } if (callBack) { LoadPage.self.setCallBack(callBack); LoadPage.self.loadRes(callBack); } return LoadPage.self; } private totalProgress: Array<number> = [0, 0, 0]; public loadRes(callBack: Function): void { this.callBack = callBack; this.setProgress(0); let thiz = this; thiz.loadIndex = 0; let isSuccess: boolean = true; if (!AppConfig.isWX || this.subLen == 0) { thiz.onCompelete(isSuccess); return; } //加载分包 for (let i = 0; i < this.subLen; i++) { if (thiz.stateFlag[i]) { thiz.loadIndex++; // thiz.loadProgress.value = thiz.loadIndex / thiz.subLen; // thiz.loadText.text = '正在玩命加载...' + thiz.loadIndex + '/' + thiz.subLen; if (thiz.loadIndex == thiz.subLen) { thiz.onCompelete(isSuccess); } continue; } let loadTask = wx.loadSubpackage({ name: this.subpackage[i], // name 可以填 name 或者 root success: (res) => { // 分包加载成功后通过 success 回调 Log.d(res); thiz.stateFlag[i] = true; }, fail: (res) => { // 分包加载失败通过 fail 回调 isSuccess = false; thiz.stateFlag[i] = false; }, complete() { thiz.loadIndex++; // thiz.loadProgress.value = thiz.loadIndex / thiz.subLen; if (thiz.loadIndex == thiz.subLen) { // Laya.timer.once(2000, thiz, thiz.onCompelete, [true]); thiz.onCompelete(isSuccess); } } }); loadTask.onProgressUpdate(res => { Log.d('下载进度 i=>' + i, res.progress) Log.d('已经下载的数据长度', res.totalBytesWritten) Log.d('预期需要下载的数据总长度', res.totalBytesExpectedToWrite) thiz.totalProgress[i] = res.totalBytesWritten / res.totalBytesExpectedToWrite * 100; let per = (thiz.totalProgress[0] + thiz.totalProgress[1] + thiz.totalProgress[2]) / 300; // thiz.loadProgress.value = (thiz.totalProgress[0] + thiz.totalProgress[1] + thiz.totalProgress[2]) / 300; thiz.setProgress(per * 100); }) } } public setProgress(progress: number, min?: number, max?: number) { if (!isNaN(min)) { this.min = min; } if (!isNaN(max)) { this.max = max; } this.progress = progress; //根据百分比,计算应该移动的距离 //该移动的距离 this.progressBar.setProgress(this.progress); // this.progressBar.x = moveX; this.loadText.text = '正在玩命加载...' + Math.round(progress) + '%'; } /** * 加载结束 * * @private * @memberof LoadPage */ private onCompelete(flag: boolean): void { if (this.callBack) { this.callBack(flag); } } onDisable() { Log.d("移出舞台"); LoadPage.self = null; this.destroy(); } }
<reponame>copslock/o-ran_nonrtric<filename>enrichment-coordinator-service/src/main/java/org/oransc/enrichment/controllers/producer/ProducerController.java<gh_stars>0 /*- * ========================LICENSE_START================================= * O-RAN-SC * %% * Copyright (C) 2020 Nordix Foundation * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ========================LICENSE_END=================================== */ package org.oransc.enrichment.controllers.producer; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiResponse; import io.swagger.annotations.ApiResponses; import java.lang.invoke.MethodHandles; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.oransc.enrichment.controllers.ErrorResponse; import org.oransc.enrichment.controllers.VoidResponse; import org.oransc.enrichment.controllers.consumer.ConsumerCallbacks; import org.oransc.enrichment.controllers.producer.ProducerRegistrationInfo.ProducerEiTypeRegistrationInfo; import org.oransc.enrichment.repository.EiJob; import org.oransc.enrichment.repository.EiJobs; import org.oransc.enrichment.repository.EiProducer; import org.oransc.enrichment.repository.EiProducers; import org.oransc.enrichment.repository.EiType; import org.oransc.enrichment.repository.EiTypes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.DeleteMapping; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.PutMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RestController; @SuppressWarnings("squid:S2629") // Invoke method(s) only conditionally @RestController("ProducerController") @Api(tags = {ProducerConsts.PRODUCER_API_NAME}) public class ProducerController { private final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private static Gson gson = new GsonBuilder().create(); @Autowired private EiJobs eiJobs; @Autowired private EiTypes eiTypes; @Autowired private EiProducers eiProducers; @Autowired ProducerCallbacks producerCallbacks; @Autowired ConsumerCallbacks consumerCallbacks; @GetMapping(path = ProducerConsts.API_ROOT + "/eitypes", produces = MediaType.APPLICATION_JSON_VALUE) @ApiOperation(value = "EI type identifiers", notes = "") @ApiResponses( value = { // @ApiResponse( code = 200, message = "EI type identifiers", response = String.class, responseContainer = "List"), // }) public ResponseEntity<Object> getEiTypeIdentifiers( // ) { List<String> result = new ArrayList<>(); for (EiType eiType : this.eiTypes.getAllEiTypes()) { result.add(eiType.getId()); } return new ResponseEntity<>(gson.toJson(result), HttpStatus.OK); } @GetMapping(path = ProducerConsts.API_ROOT + "/eitypes/{eiTypeId}", produces = MediaType.APPLICATION_JSON_VALUE) @ApiOperation(value = "Individual EI type", notes = "") @ApiResponses( value = { // @ApiResponse(code = 200, message = "EI type", response = ProducerEiTypeInfo.class), // @ApiResponse( code = 404, message = "Enrichment Information type is not found", response = ErrorResponse.ErrorInfo.class)}) public ResponseEntity<Object> getEiType( // @PathVariable("eiTypeId") String eiTypeId) { try { EiType t = this.eiTypes.getType(eiTypeId); ProducerEiTypeInfo info = toEiTypeInfo(t); return new ResponseEntity<>(gson.toJson(info), HttpStatus.OK); } catch (Exception e) { return ErrorResponse.create(e, HttpStatus.NOT_FOUND); } } @GetMapping(path = ProducerConsts.API_ROOT + "/eiproducers", produces = MediaType.APPLICATION_JSON_VALUE) @ApiOperation(value = "EI producer identifiers", notes = "") @ApiResponses( value = { // @ApiResponse( code = 200, message = "EI producer identifiers", response = String.class, responseContainer = "List"), // }) public ResponseEntity<Object> getEiProducerIdentifiers( // ) { List<String> result = new ArrayList<>(); for (EiProducer eiProducer : this.eiProducers.getAllProducers()) { result.add(eiProducer.getId()); } return new ResponseEntity<>(gson.toJson(result), HttpStatus.OK); } @GetMapping( path = ProducerConsts.API_ROOT + "/eiproducers/{eiProducerId}", produces = MediaType.APPLICATION_JSON_VALUE) @ApiOperation(value = "Individual EI producer", notes = "") @ApiResponses( value = { // @ApiResponse(code = 200, message = "EI jobs", response = ProducerRegistrationInfo.class), // @ApiResponse( code = 404, message = "Enrichment Information producer is not found", response = ErrorResponse.ErrorInfo.class)}) public ResponseEntity<Object> getEiProducer( // @PathVariable("eiProducerId") String eiProducerId) { try { EiProducer p = this.eiProducers.getProducer(eiProducerId); ProducerRegistrationInfo info = toEiProducerRegistrationInfo(p); return new ResponseEntity<>(gson.toJson(info), HttpStatus.OK); } catch (Exception e) { return ErrorResponse.create(e, HttpStatus.NOT_FOUND); } } @GetMapping( path = ProducerConsts.API_ROOT + "/eiproducers/{eiProducerId}/eijobs", produces = MediaType.APPLICATION_JSON_VALUE) @ApiOperation(value = "EI job definitions", notes = "EI job definitions for one EI producer") @ApiResponses( value = { // @ApiResponse(code = 200, message = "EI jobs", response = ProducerJobInfo.class, responseContainer = "List"), // @ApiResponse( code = 404, message = "Enrichment Information producer is not found", response = ErrorResponse.ErrorInfo.class)}) public ResponseEntity<Object> getEiProducerJobs( // @PathVariable("eiProducerId") String eiProducerId) { try { EiProducer producer = this.eiProducers.getProducer(eiProducerId); Collection<ProducerJobInfo> producerJobs = new ArrayList<>(); for (EiType type : producer.getEiTypes()) { for (EiJob eiJob : this.eiJobs.getJobsForType(type)) { ProducerJobInfo request = new ProducerJobInfo(eiJob); producerJobs.add(request); } } return new ResponseEntity<>(gson.toJson(producerJobs), HttpStatus.OK); } catch (Exception e) { return ErrorResponse.create(e, HttpStatus.NOT_FOUND); } } @GetMapping( path = ProducerConsts.API_ROOT + "/eiproducers/{eiProducerId}/status", produces = MediaType.APPLICATION_JSON_VALUE) @ApiOperation(value = "EI producer status") @ApiResponses( value = { // @ApiResponse(code = 200, message = "EI jobs", response = ProducerStatusInfo.class), // @ApiResponse( code = 404, message = "Enrichment Information producer is not found", response = ErrorResponse.ErrorInfo.class)}) public ResponseEntity<Object> getEiProducerStatus( // @PathVariable("eiProducerId") String eiProducerId) { try { EiProducer producer = this.eiProducers.getProducer(eiProducerId); return new ResponseEntity<>(gson.toJson(producerStatusInfo(producer)), HttpStatus.OK); } catch (Exception e) { return ErrorResponse.create(e, HttpStatus.NOT_FOUND); } } private ProducerStatusInfo producerStatusInfo(EiProducer producer) { ProducerStatusInfo.OperationalState opState = producer.isAvailable() ? ProducerStatusInfo.OperationalState.ENABLED : ProducerStatusInfo.OperationalState.DISABLED; return new ProducerStatusInfo(opState); } @PutMapping( path = ProducerConsts.API_ROOT + "/eiproducers/{eiProducerId}", produces = MediaType.APPLICATION_JSON_VALUE) @ApiOperation(value = "Individual EI producer", notes = "") @ApiResponses( value = { // @ApiResponse(code = 201, message = "Producer created", response = VoidResponse.class), // @ApiResponse(code = 200, message = "Producer updated", response = VoidResponse.class)}// ) public ResponseEntity<Object> putEiProducer( // @PathVariable("eiProducerId") String eiProducerId, // @RequestBody ProducerRegistrationInfo registrationInfo) { try { EiProducer previousDefinition = this.eiProducers.get(eiProducerId); if (previousDefinition != null) { for (EiType type : previousDefinition.getEiTypes()) { type.removeProducer(previousDefinition); } } EiProducer producer = registerProducer(eiProducerId, registrationInfo); if (previousDefinition != null) { purgeTypes(previousDefinition.getEiTypes()); this.consumerCallbacks.notifyConsumersProducerDeleted(previousDefinition); } this.consumerCallbacks.notifyConsumersProducerAdded(producer); return new ResponseEntity<>(previousDefinition == null ? HttpStatus.CREATED : HttpStatus.OK); } catch (Exception e) { return ErrorResponse.create(e, HttpStatus.NOT_FOUND); } } private void purgeTypes(Collection<EiType> types) { for (EiType type : types) { if (type.getProducerIds().isEmpty()) { this.eiTypes.remove(type); } } } @DeleteMapping( path = ProducerConsts.API_ROOT + "/eiproducers/{eiProducerId}", produces = MediaType.APPLICATION_JSON_VALUE) @ApiOperation(value = "Individual EI producer", notes = "") @ApiResponses( value = { // @ApiResponse(code = 200, message = "Not used", response = VoidResponse.class), @ApiResponse(code = 204, message = "Producer deleted", response = VoidResponse.class), @ApiResponse(code = 404, message = "Producer is not found", response = ErrorResponse.ErrorInfo.class)}) public ResponseEntity<Object> deleteEiProducer(@PathVariable("eiProducerId") String eiProducerId) { try { final EiProducer producer = this.eiProducers.getProducer(eiProducerId); this.eiProducers.deregisterProducer(producer, this.eiTypes, this.eiJobs); this.consumerCallbacks.notifyConsumersProducerDeleted(producer); return new ResponseEntity<>(HttpStatus.NO_CONTENT); } catch (Exception e) { return ErrorResponse.create(e, HttpStatus.NOT_FOUND); } } private EiType registerType(ProducerEiTypeRegistrationInfo typeInfo) { EiType type = this.eiTypes.get(typeInfo.eiTypeId); if (type == null) { type = new EiType(typeInfo.eiTypeId, typeInfo.jobDataSchema); this.eiTypes.put(type); this.consumerCallbacks.notifyConsumersTypeAdded(type); } return type; } EiProducer createProducer(Collection<EiType> types, String producerId, ProducerRegistrationInfo registrationInfo) { return new EiProducer(producerId, types, registrationInfo.jobCallbackUrl, registrationInfo.producerSupervisionCallbackUrl); } private EiProducer registerProducer(String producerId, ProducerRegistrationInfo registrationInfo) { ArrayList<EiType> typesForProducer = new ArrayList<>(); EiProducer producer = createProducer(typesForProducer, producerId, registrationInfo); for (ProducerEiTypeRegistrationInfo typeInfo : registrationInfo.types) { EiType type = registerType(typeInfo); typesForProducer.add(type); type.addProducer(producer); // } this.eiProducers.put(producer); producerCallbacks.restartJobs(producer, this.eiJobs); return producer; } ProducerRegistrationInfo toEiProducerRegistrationInfo(EiProducer p) { Collection<ProducerEiTypeRegistrationInfo> types = new ArrayList<>(); for (EiType type : p.getEiTypes()) { types.add(toEiTypeRegistrationInfo(type)); } return new ProducerRegistrationInfo(types, p.getJobCallbackUrl(), p.getProducerSupervisionCallbackUrl()); } private ProducerEiTypeRegistrationInfo toEiTypeRegistrationInfo(EiType type) { return new ProducerEiTypeRegistrationInfo(type.getJobDataSchema(), type.getId()); } private ProducerEiTypeInfo toEiTypeInfo(EiType t) { return new ProducerEiTypeInfo(t.getJobDataSchema(), t.getProducerIds()); } }
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'ui_fiscal_icfefetuarpagamentoformatado.ui' # # Created: Mon Nov 24 22:25:42 2014 # by: pyside-uic 0.2.15 running on PySide 1.2.2 # # WARNING! All changes made in this file will be lost! from PySide import QtCore, QtGui from pydaruma.pydaruma import iCFEfetuarPagamentoFormatado_ECF_Daruma from scripts.fiscal.retornofiscal import tratarRetornoFiscal class Ui_ui_FISCAL_iCFEfetuarPagamentoFormatado(QtGui.QWidget): def __init__(self): super(Ui_ui_FISCAL_iCFEfetuarPagamentoFormatado, self).__init__() self.setupUi(self) self.pushButtonEnviar.clicked.connect(self.on_pushButtonEnviar_clicked) self.pushButtonCancelar.clicked.connect(self.on_pushButtonCancelar_clicked) def on_pushButtonEnviar_clicked(self): StrFPGTO = self.lineEditFormaPGTO.text() StrValor = self.lineEditValor.text() tratarRetornoFiscal(iCFEfetuarPagamentoFormatado_ECF_Daruma(StrFPGTO,StrValor), self) def on_pushButtonCancelar_clicked(self): self.close() def setupUi(self, ui_FISCAL_iCFEfetuarPagamentoFormatado): ui_FISCAL_iCFEfetuarPagamentoFormatado.setObjectName("ui_FISCAL_iCFEfetuarPagamentoFormatado") ui_FISCAL_iCFEfetuarPagamentoFormatado.resize(309, 132) ui_FISCAL_iCFEfetuarPagamentoFormatado.setMinimumSize(QtCore.QSize(309, 132)) ui_FISCAL_iCFEfetuarPagamentoFormatado.setMaximumSize(QtCore.QSize(309, 132)) self.verticalLayout = QtGui.QVBoxLayout(ui_FISCAL_iCFEfetuarPagamentoFormatado) self.verticalLayout.setObjectName("verticalLayout") self.gridLayout = QtGui.QGridLayout() self.gridLayout.setObjectName("gridLayout") self.labelForma = QtGui.QLabel(ui_FISCAL_iCFEfetuarPagamentoFormatado) self.labelForma.setObjectName("labelForma") self.gridLayout.addWidget(self.labelForma, 0, 0, 1, 1) self.lineEditFormaPGTO = QtGui.QLineEdit(ui_FISCAL_iCFEfetuarPagamentoFormatado) self.lineEditFormaPGTO.setMaximumSize(QtCore.QSize(100, 16777215)) self.lineEditFormaPGTO.setObjectName("lineEditFormaPGTO") self.gridLayout.addWidget(self.lineEditFormaPGTO, 0, 1, 1, 1) self.labelValor = QtGui.QLabel(ui_FISCAL_iCFEfetuarPagamentoFormatado) self.labelValor.setObjectName("labelValor") self.gridLayout.addWidget(self.labelValor, 1, 0, 1, 1) self.lineEditValor = QtGui.QLineEdit(ui_FISCAL_iCFEfetuarPagamentoFormatado) self.lineEditValor.setMaximumSize(QtCore.QSize(70, 25)) self.lineEditValor.setObjectName("lineEditValor") self.gridLayout.addWidget(self.lineEditValor, 1, 1, 1, 1) self.verticalLayout.addLayout(self.gridLayout) self.horizontalLayout = QtGui.QHBoxLayout() self.horizontalLayout.setObjectName("horizontalLayout") spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum) self.horizontalLayout.addItem(spacerItem) self.pushButtonEnviar = QtGui.QPushButton(ui_FISCAL_iCFEfetuarPagamentoFormatado) self.pushButtonEnviar.setObjectName("pushButtonEnviar") self.horizontalLayout.addWidget(self.pushButtonEnviar) self.pushButtonCancelar = QtGui.QPushButton(ui_FISCAL_iCFEfetuarPagamentoFormatado) self.pushButtonCancelar.setObjectName("pushButtonCancelar") self.horizontalLayout.addWidget(self.pushButtonCancelar) spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum) self.horizontalLayout.addItem(spacerItem1) self.verticalLayout.addLayout(self.horizontalLayout) self.retranslateUi(ui_FISCAL_iCFEfetuarPagamentoFormatado) QtCore.QMetaObject.connectSlotsByName(ui_FISCAL_iCFEfetuarPagamentoFormatado) def retranslateUi(self, ui_FISCAL_iCFEfetuarPagamentoFormatado): ui_FISCAL_iCFEfetuarPagamentoFormatado.setWindowTitle(QtGui.QApplication.translate("ui_FISCAL_iCFEfetuarPagamentoFormatado", "iCFEfetuarPagamentoFormatado_ECF_Daruma", None, QtGui.QApplication.UnicodeUTF8)) self.labelForma.setText(QtGui.QApplication.translate("ui_FISCAL_iCFEfetuarPagamentoFormatado", "Forma Pagto:", None, QtGui.QApplication.UnicodeUTF8)) self.labelValor.setText(QtGui.QApplication.translate("ui_FISCAL_iCFEfetuarPagamentoFormatado", "Valor:", None, QtGui.QApplication.UnicodeUTF8)) self.pushButtonEnviar.setText(QtGui.QApplication.translate("ui_FISCAL_iCFEfetuarPagamentoFormatado", "Enviar", None, QtGui.QApplication.UnicodeUTF8)) self.pushButtonCancelar.setText(QtGui.QApplication.translate("ui_FISCAL_iCFEfetuarPagamentoFormatado", "Cancelar", None, QtGui.QApplication.UnicodeUTF8))
(function ($) { $(".datepicker").datepicker({ dateFormat: 'yy-mm-dd' }); $("table.selectable-row tbody tr").click(function () { window.document.location = $(this).attr("link"); }); $(".ikariera-show").click(function () { var a = $(this).attr("data-ikariera-show"); $("." + a).show(); }); })(jQuery); $(document).ready(function () { $(".chosen-select").chosen(); $(".automaticAjaxLoad").each(function () { var url = $(this).attr("ajaxLoadingUrl"); if (url != "") { $(this).load(url); } }); }); $(document).ready(function(){ $(".doc1").hide(0); $(".hide1").click(function(){ if($(".doc1").is(":visible")){ $(".doc1").hide(1000); } if($(".doc1").is(":hidden")){ $(".doc1").show(1000); } }); }); /* showHotPositionForm($("#hotPosition")); function showHotPositionForm(selector) { if (selector.is(":checked")) { $("#hotPositionHidden").show(); } else { $("#hotPositionHidden").hide(); } } */
<gh_stars>1-10 package sample; import com.sun.javafx.css.StyleCacheEntry; import com.sun.javafx.geom.Shape; import javafx.scene.layout.Pane; import javafx.scene.paint.Color; import javafx.scene.shape.Line; import java.io.IOException; import java.util.Arrays; public class ShapeLine { private double[] arrStart; private Color color; //private double cutOne; //private double cutTwo; /** * Конструктор - создание нового объекта с определенными значениями * @param arrPlace - координаты начальной и конечной позиции отрезка * */ ShapeLine(double[] arrPlace, Color color){ //double cutOne, double cutTwo this.arrStart = Arrays.copyOf(arrPlace, arrPlace.length); this.color = color; //this.cutOne = cutOne; //this.cutTwo = cutTwo; } Line drawLine(double[] arrPlace){ Line line = new Line(arrPlace[0], arrPlace[1], arrPlace[2], arrPlace[3]); line.setStroke(color); line.setStrokeWidth(1); return line; } Line moveLine(double[] arrPlace, double[] arrOffset) { double dxOne, dyOne, dxTwo, dyTwo; dxOne = arrOffset[0];// - arrPlace[0]; dyOne = arrOffset[1];// - arrPlace[1]; //dxTwo = arrOffset[2] - arrPlace[2]; //dyTwo = arrOffset[3] - arrPlace[3]; Line line = new Line(arrPlace[0] + dxOne, arrPlace[1] + dyOne, arrPlace[2] + dxOne, arrPlace[3] + dyOne); line.setStroke(color); line.setStrokeWidth(1); return line; } Line scaleLine(double[] arrPlace, double size, double[] centerPlace) { double x1, y1, x2, y2; x1 = arrPlace[0] * size + (1 - size) * centerPlace[0]; x2 = arrPlace[2] * size + (1 - size) * centerPlace[0]; y1 = arrPlace[1] * size + (1 - size) * centerPlace[1]; y2 = arrPlace[3] * size + (1 - size) * centerPlace[1]; Line line = new Line(x1, y1, x2, y2); line.setStroke(color); line.setStrokeWidth(1); return line; } Line rotateLine(double[] arrPlace, double angle, double[] centerPlace) { double x1, y1, x2, y2; x1 = centerPlace[0] + (arrPlace[0] - centerPlace[0]) * Math.cos(angle) + (arrPlace[1] - centerPlace[1]) * Math.sin(angle); x2 = centerPlace[0] + (arrPlace[2] - centerPlace[0]) * Math.cos(angle) + (arrPlace[3] - centerPlace[1]) * Math.sin(angle); y1 = centerPlace[1] + (arrPlace[1] - centerPlace[1]) * Math.cos(angle) + (arrPlace[0] - centerPlace[1]) * Math.sin(angle); y2 = centerPlace[1] + (arrPlace[3] - centerPlace[1]) * Math.cos(angle) + (arrPlace[2] - centerPlace[1]) * Math.sin(angle); Line line = new Line(x1, y1, x2, y2); line.setStroke(color); line.setStrokeWidth(1); return line; } }
mkdir -p bin ./gradlew makeJar && cp build/libs/driver-report-all-0.1.0.jar bin/main.jar && chmod 777 bin/main.jar echo "Done." echo
# # only init if installed. fasd_cache="$HOME/.fasd-init-bash" if [ "$(command -v fasd)" -nt "$fasd_cache" -o ! -s "$fasd_cache" ]; then eval "$(fasd --init posix-alias zsh-hook zsh-ccomp zsh-ccomp-install zsh-wcomp zsh-wcomp-install)" >| "$fasd_cache" fi source "$fasd_cache" unset fasd_cache # jump to recently used items alias a=' fasd -a' # any alias s=' fasd -si' # show / search / select alias d=' fasd -d' # directory alias f=' fasd -f' # file alias z=' fasd_cd -d' # cd, same functionality as j in autojump alias zz=' fasd_cd -d -i' # interactive directory jump alias vz='z -e mvim' alias vzz='zz -e mvim' alias va='a -e mvim' alias vs='a -e mvim' alias vd='d -e mvim' alias vf='f -e mvim' alias o='a -e open'
'use strict'; System.register(['./froala-editor-config'], function (_export, _context) { "use strict"; var Config; function configure(aurelia, configCallback) { var instance = aurelia.container.get(Config); if (configCallback !== undefined && typeof configCallback === 'function') { configCallback(instance); } aurelia.globalResources('./froala-editor'); } _export('configure', configure); return { setters: [function (_froalaEditorConfig) { Config = _froalaEditorConfig.Config; }], execute: function () {} }; });
PATH=$HOME/bin:$PATH export PATH PS1="[\$?]\$ " export PS1 HISTCONTROL=ignoreboth HISTIGNORE=ls:ll:la:cnl:d:pd:xd HISTTIMEFORMAT='%F %T ' alias l='ls -rt' alias ll='ls -lrt' alias la='ls -Aort' alias lla='ls -Alort' alias d='dirs -v' alias pd=pushd alias xd=popd # careful move and copy alias cmv='mv -i' alias ccp='cp -i' alias commando='ulimit -c unlimited' # pwd p() { echo ${PWD-$(pwd)} } # who and where am i? wami() { local h=${HOSTNAME-$(hostname -s)} local u=${USER-$(id -n $EUID)} local p=${PWD-$(pwd)} echo "${u}@${h}${p}" } # chdir and list cnl() { if [[ $# -gt 1 ]]; then return 1 fi if cd ${1:-${HOME}}; then ls -rt fi } # start in the background and disown disavow() { $* 2>&1 >/dev/null & if [[ $? -eq 0 ]]; then disown $! fi } # search and destory process by name sad() { local match match=$(pgrep -u $UID -lfi $*) if [[ ! -z $match ]]; then echo $match read -p "Kill with prejudice (yes/NO)? " ans case "$ans" in yes|Yes|YES) kill -9 ${match%% *};; esac return 0 fi return 1 }
#!/bin/bash ## ## Sense Collector - start-device-details.sh ## ## ## Set Specific Variables ## collector_type="device-details" ## ## Sense-Collector Details ## source sense-collector-details.sh ## ## Set Variables from Environmental Variables ## debug=$SENSE_COLLECTOR_DEBUG debug_curl=$SENSE_COLLECTOR_DEBUG_CURL debug_sleeping=$SENSE_COLLECTOR_DEBUG_SLEEPING host_hostname=$SENSE_COLLECTOR_HOST_HOSTNAME influxdb_password=$SENSE_COLLECTOR_INFLUXDB_PASSWORD influxdb_url=$SENSE_COLLECTOR_INFLUXDB_URL influxdb_username=$SENSE_COLLECTOR_INFLUXDB_USERNAME poll_interval=$SENSE_COLLECTOR_DEVICE_DETAILS_POLL_INTERVAL sense_monitor_id=$SENSE_COLLECTOR_MONITOR_ID sense_token=$SENSE_COLLECTOR_TOKEN threads=$SENSE_COLLECTOR_THREADS ## ## Check for required intervals ## if [ -z "${poll_interval}" ]; then echo "${echo_bold}${echo_color_random}${collector_type}:${echo_normal} ${echo_bold}SENSE_COLLECTOR_DEVICE_DETAILS_POLL_INTERVAL${echo_normal} environmental variable not set. Defaulting to ${echo_bold}60${echo_normal} seconds."; poll_interval="60"; export SENSE_COLLECTOR_POLL_INTERVAL="60"; fi if [ -z "${host_hostname}" ]; then echo "${echo_bold}${echo_color_random}${collector_type}:${echo_normal} ${echo_bold}SENSE_COLLECTOR_HOST_HOSTNAME${echo_normal} environmental variable not set. Defaulting to ${echo_bold}sense-collector${echo_normal}."; host_hostname="sense-collector"; export SENSE_COLLECTOR_HOST_HOSTNAME="sense-collector"; fi if [ -z "${threads}" ]; then echo "${echo_bold}${echo_color_random}${collector_type}:${echo_normal} ${echo_bold}SENSE_COLLECTOR_THREADS${echo_normal} environmental variable not set. Defaulting to ${echo_bold}4${echo_normal} threads."; threads="4"; export SENSE_COLLECTOR_THREADS="4"; fi if [ "$debug" == "true" ] then echo "$(date) - Starting Sense Collector (start-device-details.sh) - https://github.com/lux4rd0/sense-collector Debug Environmental Variables debug=${debug} debug_curl=${debug_curl} host_hostname=${host_hostname} influxdb_password=${influxdb_password} influxdb_url=${influxdb_url} influxdb_username=${influxdb_username} poll_interval=${poll_interval} sense_monitor_id=${sense_monitor_id} sense_token=${sense_token}" fi ## ## Send Startup Event Timestamp to InfluxDB ## process_start ## ## Curl Command ## if [ "$debug_curl" == "true" ]; then curl=( ); else curl=( --silent --show-error --fail ); fi ## ## Start Sense Device Details Loop ## while ( true ); do before=$(date +%s%N) ./exec-device-details.sh after=$(date +%s%N) delay=$(echo "scale=4;(${poll_interval}-($after-$before) / 1000000000)" | bc) if [ "$debug_sleeping" == "true" ]; then echo "${echo_bold}${echo_color_random}${collector_type}:${echo_normal} Sleeping: ${delay} seconds"; fi sleep "$delay" done
<gh_stars>0 import INegativeScoreParams from "../../types/INegativeScoreParams"; import { Modifier } from "../../types/Modifier"; export function computeNegativepoints(params: INegativeScoreParams, modifier: Modifier) { const energyScore = modifier === Modifier.DRINK ? _computeEnergyPointsForDrinks(params.energy) : _computeEnergyPoints(params.energy); const sugarScore = modifier === Modifier.DRINK ? _computeSugarScoreForDrinks(params.sugar) : _computeSugarScore(params.sugar); const fatScore = modifier === Modifier.FAT ? _computeSaturatedFatPointsForFats(params.totalFat, params.saturatedFat) : _computeSaturatedFatPoints(params.saturatedFat); return energyScore + sugarScore + fatScore + _computeSodiumScore(params.salt); } function _computeEnergyPoints(energy: number) { if (energy > 3350) { return 10; } if (energy > 3015) { return 9; } if (energy > 2680) { return 8; } if (energy > 2345) { return 7; } if (energy > 2010) { return 6; } if (energy > 1675) { return 5; } if (energy > 1340) { return 4; } if (energy > 1005) { return 3; } if (energy > 670) { return 2; } if (energy > 335) { return 1; } return 0; } function _computeEnergyPointsForDrinks(energy: number) { if (energy <= 0) { return 0; } if (energy <= 30) { return 1; } if (energy <= 60) { return 2; } if (energy <= 90) { return 3; } if (energy <= 120) { return 4; } if (energy <= 150) { return 5; } if (energy <= 180) { return 6; } if (energy <= 210) { return 7; } if (energy <= 240) { return 8; } if (energy <= 270) { return 9; } return 10; } function _computeSaturatedFatPoints(value: number) { if (value > 10) { return 10; } if (value > 9) { return 9; } if (value > 8) { return 8; } if (value > 7) { return 7; } if (value > 6) { return 6; } if (value > 5) { return 5; } if (value > 4) { return 4; } if (value > 3) { return 3; } if (value > 2) { return 2; } if (value > 1) { return 1; } return 0; } function _computeSaturatedFatPointsForFats(totalFat: number, saturatedFat: number) { if (totalFat === 0 && saturatedFat === 0) { return 0; } // Looks like the reference implementation rounds the ratio let ratio = _round(saturatedFat / totalFat * 100); if (ratio < 10) { return 0; } if (ratio < 16) { return 1; } if (ratio < 22) { return 2; } if (ratio < 28) { return 3; } if (ratio < 34) { return 4; } if (ratio < 40) { return 5; } if (ratio < 46) { return 6; } if (ratio < 52) { return 7; } if (ratio < 58) { return 8; } if (ratio < 64) { return 9; } return 10; } function _round(value: number) { const decimals = value % 1; if (decimals > 0.95) { return Math.round(value); } return value; } function _computeSodiumScore(salt: number) { const value = _saltToSodium(salt); if (value > 900) { return 10; } if (value > 810) { return 9; } if (value > 720) { return 8; } if (value > 630) { return 7; } if (value > 540) { return 6; } if (value > 450) { return 5; } if (value > 360) { return 4; } if (value > 270) { return 3; } if (value > 180) { return 2; } if (value > 90) { return 1; } return 0; } function _computeSugarScore(value: number) { if (value > 45) { return 10; } if (value > 40) { return 9; } if (value > 36) { return 8; } if (value > 31) { return 7; } if (value > 27) { return 6; } if (value > 22.5) { return 5; } if (value > 18) { return 4; } if (value > 13.5) { return 3; } if (value > 9) { return 2; } if (value > 4.5) { return 1; } return 0; } function _computeSugarScoreForDrinks(value: number) { if (value <= 0) { return 0; } if (value <= 1.5) { return 1; } if (value <= 3) { return 2; } if (value <= 4.5) { return 3; } if (value <= 6) { return 4; } if (value <= 7.5) { return 5; } if (value <= 9) { return 6; } if (value <= 10.5) { return 7; } if (value <= 12) { return 8; } if (value <= 13.5) { return 9; } return 10; } function _saltToSodium(salt: number) { return (salt / 2.5) * 1000; } export default { computeNegativepoints };
<filename>src/com/opengamma/maths/lowlevelapi/functions/utilities/Reverse.java /** * Copyright (C) 2011 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.maths.lowlevelapi.functions.utilities; import java.util.Arrays; import org.apache.commons.lang.Validate; /** * Reverses Vectors */ public class Reverse { /** * Reverses a vector in place * @param v1 the vector to be reversed */ public static void inPlace(int[] v1) { Validate.notNull(v1); int tmp; final int half = v1.length / 2; final int len = v1.length - 1; for (int i = 0; i < half; i++) { tmp = v1[len - i]; v1[len - i] = v1[i]; v1[i] = tmp; } } /** * Reverses a vector in place * @param v1 the vector to be reversed */ public static void inPlace(long[] v1) { Validate.notNull(v1); long tmp; final int half = v1.length / 2; final int len = v1.length - 1; for (int i = 0; i < half; i++) { tmp = v1[len - i]; v1[len - i] = v1[i]; v1[i] = tmp; } } /** * Reverses a vector in place * @param v1 the vector to be reversed */ public static void inPlace(float[] v1) { Validate.notNull(v1); float tmp; final int half = v1.length / 2; final int len = v1.length - 1; for (int i = 0; i < half; i++) { tmp = v1[len - i]; v1[len - i] = v1[i]; v1[i] = tmp; } } /** * Reverses a vector in place * @param v1 the vector to be reversed */ public static void inPlace(double[] v1) { Validate.notNull(v1); double tmp; final int half = v1.length / 2; final int len = v1.length - 1; for (int i = 0; i < half; i++) { tmp = v1[len - i]; v1[len - i] = v1[i]; v1[i] = tmp; } } /** * Reverses a vector in place * @param v1 the vector to be reversed * @return r a reversed copy of v1 */ public static int[] stateless(int[] v1) { Validate.notNull(v1); int[] r = Arrays.copyOf(v1, v1.length); inPlace(r); return r; } /** * Reverses a vector in place * @param v1 the vector to be reversed * @return r a reversed copy of v1 */ public static long[] stateless(long[] v1) { Validate.notNull(v1); long[] r = Arrays.copyOf(v1, v1.length); inPlace(r); return r; } /** * Reverses a vector in place * @param v1 the vector to be reversed * @return r a reversed copy of v1 */ public static float[] stateless(float[] v1) { Validate.notNull(v1); float[] r = Arrays.copyOf(v1, v1.length); inPlace(r); return r; } /** * Reverses a vector in place * @param v1 the vector to be reversed * @return r a reversed copy of v1 */ public static double[] stateless(double[] v1) { Validate.notNull(v1); double[] r = Arrays.copyOf(v1, v1.length); inPlace(r); return r; } }
@Injectable({ providedIn: 'root' }) export class IdService { private static idCounter: number = 0; constructor() { } generateId(): string { const paddedId = IdService.idCounter.toString().padStart(4, '0'); IdService.idCounter++; return `ID-${paddedId}`; } }
<filename>programmers/source/1-17.cpp // x만큼 간격이 있는 n개의 숫자 // 2019.03.08 #include<vector> using namespace std; vector<long long> solution(int x, int n) { vector<long long> answer; int tmp = x; for (int i = 0; i<n; i++) { answer.push_back(tmp); tmp += x; } return answer; }
#!/bin/bash FILES="test/*.ha" COUNT="0" OK=0 Wrongs=0 for f in $FILES do let COUNT=COUNT+1 name=$(echo $f | cut -f 1 -d '.') echo "Testing $name" cat $f | ./comp -noBin > $f.output mv a.ll $name.ll if(clang $name.ll -c -o $name.out) then rm $f.output rm $name.ll rm $name.out echo "OK" let OK=OK+1 else echo "Wrong" fi done echo "$OK OK of $COUNT in total"
package kbasesearchengine.system; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.yaml.snakeyaml.Yaml; import org.yaml.snakeyaml.constructor.SafeConstructor; import kbasesearchengine.common.ObjectJsonPath; import kbasesearchengine.parse.ObjectParseException; import kbasesearchengine.system.ObjectTypeParsingRules.Builder; import kbasesearchengine.tools.Utils; /** Utilities for creating {@link ObjectTypeParsingRules} from various data sources. * * @author <EMAIL> * */ public class ObjectTypeParsingRulesFileParser { //TODO TEST /** Create a set of ObjectTypeParsingRules version instances from a file. * * The rules will have the same storage object type, search type, and ui name. * * TODO document the file structure. * @param file the file containing the parsing rules. * @return a new set of parsing rules ordered by version. * @throws IOException if an IO error occurs reading the file. * @throws TypeParseException if the file contains erroneous parsing rules. */ public static List<ObjectTypeParsingRules> fromFile(final File file) throws IOException, TypeParseException { try (final InputStream is = new FileInputStream(file)) { return fromStream(is, file.toString()); } } /** Create a set of ObjectTypeParsingRules versions from a stream. * * The rules will have the same storage object type, search type, and ui name. * * @param is the stream to parse. * @param sourceInfo information about the source of the stream, usually a file name. * @return a new set of parsing rules ordered by version. * @throws IOException if an IO error occurs reading the stream. * @throws TypeParseException if the stream contains erroneous parsing rules. */ public List<ObjectTypeParsingRules> parseStream( final InputStream is, final String sourceInfo) throws IOException, TypeParseException { return fromStream(is, sourceInfo); } private static List<ObjectTypeParsingRules> fromStream(InputStream is, String sourceInfo) throws IOException, TypeParseException { if (!(is instanceof BufferedInputStream)) { is = new BufferedInputStream(is); } final Yaml yaml = new Yaml(new SafeConstructor()); final Object predata; try { predata = yaml.load(is); } catch (Exception e) { // wtf snakeyaml authors, not using checked exceptions is bad enough, but not // documenting any exceptions and overriding toString so you can't tell what // exception is being thrown is something else throw new TypeParseException(String.format("Error parsing source %s: %s %s", sourceInfo, e.getClass(), e.getMessage()), e); } if (!(predata instanceof Map)) { throw new TypeParseException( "Expected mapping in top level YAML/JSON in source: " + sourceInfo); } @SuppressWarnings("unchecked") Map<String, Object> obj = (Map<String, Object>) predata; return fromObject(obj, sourceInfo); } //TODO CODE should look at json schema for validating the object data prior to building objects, avoids a lot of typechecking code private static List<ObjectTypeParsingRules> fromObject( final Map<String, Object> obj, final String sourceInfo) throws TypeParseException { try { final String storageCode = (String)obj.get("storage-type"); final String type = (String)obj.get("storage-object-type"); if (Utils.isNullOrEmpty(storageCode)) { throw new ObjectParseException(getMissingKeyParseMessage("storage-type")); } if (Utils.isNullOrEmpty(type)) { throw new ObjectParseException(getMissingKeyParseMessage("storage-object-type")); } final StorageObjectType storageType = new StorageObjectType(storageCode, type); //TODO CODE better error if missing elements final String searchType = (String) obj.get("global-object-type"); final String uiTypeName = (String) obj.get("ui-type-name"); @SuppressWarnings("unchecked") final List<Map<String, Object>> versions = (List<Map<String, Object>>) obj.get("versions"); final List<ObjectTypeParsingRules> ret = new LinkedList<>(); for (int i = 0; i < versions.size(); i++) { final Builder builder = ObjectTypeParsingRules.getBuilder( new SearchObjectType(searchType, i + 1), storageType) .withNullableUITypeName(uiTypeName); ret.add(processVersion(builder, versions.get(i))); } return ret; } catch (ObjectParseException | IllegalArgumentException | NullPointerException e) { throw new TypeParseException(String.format("Error in source %s: %s", sourceInfo, e.getMessage()), e); } } private static ObjectTypeParsingRules processVersion( final Builder builder, final Map<String, Object> versionObj) throws ObjectParseException { final String subType = (String) versionObj.get("inner-sub-type"); if (!Utils.isNullOrEmpty(subType)) { builder.toSubObjectRule( subType, //TODO CODE add checks to ensure these exist getPath((String) versionObj.get("path-to-sub-objects")), getPath((String) versionObj.get("primary-key-path"))); } // throw exception if the other subobj values exist? // Indexing @SuppressWarnings("unchecked") List<Map<String, Object>> indexingRules = (List<Map<String, Object>>) versionObj.get("indexing-rules"); if (indexingRules != null) { for (Map<String, Object> rulesObj : indexingRules) { builder.withIndexingRule(buildRule(rulesObj)); } } return builder.build(); } private static IndexingRules buildRule( final Map<String, Object> rulesObj) throws ObjectParseException { final String path = (String) rulesObj.get("path"); final String keyName = (String) rulesObj.get("key-name"); final IndexingRules.Builder irBuilder; if (Utils.isNullOrEmpty(path)) { final String sourceKey = (String)rulesObj.get("source-key"); irBuilder = IndexingRules.fromSourceKey(sourceKey, keyName); } else { //TODO CODE throw exception if sourceKey != null? irBuilder = IndexingRules.fromPath(new ObjectJsonPath(path)); if (!Utils.isNullOrEmpty(keyName)) { irBuilder.withKeyName(keyName); } } if (getBool((Boolean) rulesObj.get("from-parent"))) { irBuilder.withFromParent(); } if (getBool(rulesObj.get("full-text"))) { irBuilder.withFullText(); } final String keywordType = (String)rulesObj.get("keyword-type"); if (!Utils.isNullOrEmpty(keywordType)) { //TODO CODE throw an error if fullText is true? irBuilder.withKeywordType(keywordType); } final String transform = (String) rulesObj.get("transform"); if (!Utils.isNullOrEmpty(transform)) { final String subObjectIDKey = (String) rulesObj.get("subobject-id-key"); final String targetObjectType = (String) rulesObj.get("target-object-type"); final Integer targetObjectTypeVersion = (Integer) rulesObj.get("target-object-type-version"); final String[] tranSplt = transform.split("\\.", 2); final String transProp = tranSplt.length == 1 ? null : tranSplt[1]; irBuilder.withTransform(Transform.unknown(tranSplt[0], transProp, targetObjectType, targetObjectTypeVersion, subObjectIDKey)); } if (getBool(rulesObj.get("not-indexed"))) { irBuilder.withNotIndexed(); } irBuilder.withNullableDefaultValue(rulesObj.get("optional-default-value")); irBuilder.withNullableUIName((String) rulesObj.get("ui-name")); if (getBool(rulesObj.get("ui-hidden"))) { irBuilder.withUIHidden(); } irBuilder.withNullableUILinkKey((String) rulesObj.get("ui-link-key")); return irBuilder.build(); } private static boolean getBool(final Object putativeBool) { //TODO CODE precheck cast exception return putativeBool != null && (Boolean) putativeBool; } private static String getMissingKeyParseMessage(final String key) { return String.format("Missing key %s", key); } private static ObjectJsonPath getPath(String path) throws ObjectParseException { return path == null ? null : new ObjectJsonPath(path); } }
<filename>spec/dry/elastic_model_spec.rb # frozen_string_literal: true require "spec_helper" RSpec.describe Dry::ElasticModel do it "has a version number" do expect(Dry::ElasticModel::VERSION).not_to be nil end end
The best way to optimize a website for faster loading speed is to minify the HTML, CSS, and JavaScript files, reduce the size of images, enable browser caching, and reduce the number of HTTP requests. To minify the HTML, CSS, and JavaScript files, these files can be run through online tools that will remove comments and unnecessary whitespace, as well as optimize certain elements of the code. To reduce the size of images, the images should be cropped and resized to no more than the necessary dimensions and a file format that maintains a good quality but also keeps a small file size such as JPEG. GIF files are best used for images with fewer than 256 colors and PNG files are best used for images that have many colors. Browser caching allows the browser to locally store certain elements of a website, so that the elements do not need to be downloaded from the server each time the page is visited. This can help a website load faster for returning visitors. Finally, reducing the number of HTTP requests will help to speed up loading times. This can be done by combining multiple CSS and JavaScript files into one, or by loading certain elements asynchronously or even deferring the loading of certain elements till the rest of the page has already loaded.
# frozen_string_literal: true require 'grpc' require_relative '../lib/graphd' def client_stub # if timeout is exceeded will throw `GRPC::DeadlineExceeded` @client_stub ||= Graphd::ClientStub.new('localhost:9080', timeout: 1) end def client(client_stub) @client ||= Graphd::Client.new(client_stub) end def run dgraph_client = client(client_stub) version = dgraph_client.check_version p version end run
SELECT COUNT(*) AS 'Number of Products' FROM products UNION SELECT COUNT(*) AS 'Number of Accounts' FROM accounts;
import React, {useState} from 'react'; import {RenderInput, RenderSearchResults, RenderSearchBar} from './components'; const App = () => { // state to store search results const [searchResults, setSearchResults] = useState([]); // state to store search queries const [inputValues, setInputValues] = useState({ name: '', email: '', phoneNumber: '', jobTitle: '', department: '' }); const searchEmployees = (inputValues) => { // search employees and set the results to the state }; return ( <> <RenderSearchBar inputValues={inputValues} setInputValues={setInputValues} submitSearch={searchEmployees} /> <RenderInput values={inputValues} setValues={setInputValues} /> <RenderSearchResults results={searchResults} /> </> ) } export default App;
(function(cornerstoneTools) { 'use strict'; function drawCircle(context, start, color, lineWidth) { var handleRadius = 6; context.beginPath(); context.strokeStyle = color; context.lineWidth = lineWidth; context.arc(start.x, start.y, handleRadius, 0, 2 * Math.PI); context.stroke(); } // Module exports cornerstoneTools.drawCircle = drawCircle; })(cornerstoneTools);
<reponame>SamuelThulin/SensingSugar import HomeIcon from '@mui/icons-material/Home'; import { Box, Button, ButtonGroup, IconButton, Stack, Typography } from '@mui/material'; import React, { FC } from 'react'; import { useNavigate } from 'react-router-dom'; import * as Sound from './Sound'; import * as Visuals from './Visuals'; const Playground: FC = () => { const navigate = useNavigate(); const handleClickPlayground = () => navigate('/'); const handlePlaySimple = () => Sound.playSimple(); const handlePlayTime = () => Sound.playTimeControl(); const handlePlaySequence = () => Sound.playSquence(); const handlePlayScheduling = () => Sound.playScheduling(); const handlePlayInstrument = () => Sound.playInstruments(); const handlePlaySamples = () => Sound.playSamples(); const handlePlaySampler = () => Sound.playSampler(); const handlePlayEffects = () => Sound.playEffectts(); const handlePlaySignal = () => Sound.playSignal(); const handleStartHydra = () => Visuals.start(); const handlefx1 = () => Visuals.fx1(); const handlefx2 = () => Visuals.fx2(); return ( <Stack alignItems="center" justifyContent="center" gap={7} pt={1}> <Stack direction="row" alignItems="left" width="100%" px={2}> <IconButton color="info" onClick={handleClickPlayground} size="small"> <HomeIcon fontSize="inherit" /> </IconButton> </Stack> <Typography align="center" color="secondary" component="h1" variant="h1"> Sensing Sugar </Typography> <Box> <Stack justifyContent="center" gap={2}> <Typography align="center" color="secondary" component="h2" variant="h4"> Tone JS </Typography> <ButtonGroup variant="outlined" aria-label="outlined button group"> <Button onClick={handlePlaySimple}>Simple</Button> <Button onClick={handlePlayTime}>Time</Button> <Button onClick={handlePlaySequence}>Sequence</Button> <Button onClick={handlePlayScheduling}>Scheduling</Button> <Button onClick={handlePlayInstrument}>Instrument</Button> <Button onClick={handlePlaySamples}>Samples</Button> <Button onClick={handlePlaySampler}>Sampler</Button> <Button onClick={handlePlayEffects}>Effects</Button> <Button onClick={handlePlaySignal}>Signal</Button> </ButtonGroup> </Stack> </Box> <Box> <Stack justifyContent="center" gap={2}> <Typography align="center" color="secondary" component="h2" variant="h4"> Hydra </Typography> <Box margin="auto"> <Button onClick={handleStartHydra} variant="outlined"> Start </Button> </Box> <Box margin="auto"> <ButtonGroup variant="outlined" aria-label="outlined button group"> <Button onClick={handlefx1}>fx1</Button> <Button onClick={handlefx2}>fx2</Button> </ButtonGroup> </Box> <Box> <canvas id="visuals" width={1280} height={1024} style={{ position: 'absolute', top: 0, left: 0, zIndex: -1, }} /> </Box> </Stack> </Box> </Stack> ); }; export default Playground;