text
stringlengths
1
1.05M
#!/bin/bash # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. exec $(dirname $0)/run-class.sh org.apache.samza.checkpoint.CheckpointTool $@
# Copyright 2016-present CERN – European Organization for Nuclear Research # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from datetime import datetime from typing import Sequence, Union from qf_lib.common.enums.expiration_date_field import ExpirationDateField from qf_lib.common.enums.frequency import Frequency from qf_lib.common.enums.price_field import PriceField from qf_lib.common.tickers.tickers import Ticker from qf_lib.common.utils.miscellaneous.to_list_conversion import convert_to_list from qf_lib.containers.futures.future_tickers.future_ticker import FutureTicker from qf_lib.data_providers.helpers import chain_tickers_within_range from qf_lib.data_providers.preset_data_provider import PresetDataProvider from qf_lib.data_providers.data_provider import DataProvider class PrefetchingDataProvider(PresetDataProvider): """ Optimises running of the DataProvider by pre-fetching all the data at startup and then using the cached data instead of sending over-the-network requests every time the data is requested. If not all data requested is available the ValueError will be raised. Parameters ----------- data_provider: DataProvider data provider used to download the data tickers: Ticker, Sequence[Ticker] one or a list of tickers, used further to download the futures contracts related data. The list can contain either Tickers or FutureTickers. In case of the Tickers, simply the given fields are being downloaded and stored using the PresetDataProvider. In case of the FutureTickers, the future chain tickers and their corresponding prices are being downloaded and stored. fields: PriceField, Sequence[PriceField] fields that should be downloaded start_date: datetime first date to be downloaded end_date: datetime last date to be downloaded frequency: Frequency frequency of the data """ def __init__(self, data_provider: DataProvider, tickers: Union[Ticker, Sequence[Ticker]], fields: Union[PriceField, Sequence[PriceField]], start_date: datetime, end_date: datetime, frequency: Frequency): # Convert fields into list in order to return a QFDataArray as the result of get_price function fields, _ = convert_to_list(fields, PriceField) # Convert the tickers to list and remove duplicates tickers, _ = convert_to_list(tickers, Ticker) tickers = list(dict.fromkeys(tickers)) future_tickers = [ticker for ticker in tickers if isinstance(ticker, FutureTicker)] non_future_tickers = [ticker for ticker in tickers if not isinstance(ticker, FutureTicker)] exp_dates = None all_tickers = non_future_tickers if future_tickers: exp_dates = data_provider.get_futures_chain_tickers(future_tickers, ExpirationDateField.all_dates()) # Filter out all theses specific future contracts, which expired before start_date for ft in future_tickers: all_tickers.extend(chain_tickers_within_range(ft, exp_dates[ft], start_date, end_date)) data_array = data_provider.get_price(all_tickers, fields, start_date, end_date, frequency) super().__init__(data=data_array, exp_dates=exp_dates, start_date=start_date, end_date=end_date, frequency=frequency)
#!/bin/bash # Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # A library of helper functions and constants for the local config. # Use the config file specified in $KUBE_CONFIG_FILE, or default to # config-default.sh. KUBE_ROOT=$(dirname "${BASH_SOURCE}")/../.. source "${KUBE_ROOT}/cluster/vsphere/config-common.sh" source "${KUBE_ROOT}/cluster/vsphere/${KUBE_CONFIG_FILE-"config-default.sh"}" # Detect the IP for the master # # Assumed vars: # MASTER_NAME # Vars set: # KUBE_MASTER # KUBE_MASTER_IP function detect-master { KUBE_MASTER=${MASTER_NAME} if [[ -z "${KUBE_MASTER_IP-}" ]]; then KUBE_MASTER_IP=$(govc vm.ip ${MASTER_NAME}) fi if [[ -z "${KUBE_MASTER_IP-}" ]]; then echo "Could not detect Kubernetes master node. Make sure you've launched a cluster with 'kube-up.sh'" >&2 exit 1 fi echo "Using master: $KUBE_MASTER (external IP: $KUBE_MASTER_IP)" } # Detect the information about the minions # # Assumed vars: # MINION_NAMES # Vars set: # KUBE_MINION_IP_ADDRESS (array) function detect-minions { KUBE_MINION_IP_ADDRESSES=() for (( i=0; i<${#MINION_NAMES[@]}; i++)); do local minion_ip=$(govc vm.ip ${MINION_NAMES[$i]}) if [[ -z "${minion_ip-}" ]] ; then echo "Did not find ${MINION_NAMES[$i]}" >&2 else echo "Found ${MINION_NAMES[$i]} at ${minion_ip}" KUBE_MINION_IP_ADDRESSES+=("${minion_ip}") fi done if [[ -z "${KUBE_MINION_IP_ADDRESSES-}" ]]; then echo "Could not detect Kubernetes minion nodes. Make sure you've launched a cluster with 'kube-up.sh'" >&2 exit 1 fi } function trap-add { local handler="$1" local signal="${2-EXIT}" local cur cur="$(eval "sh -c 'echo \$3' -- $(trap -p ${signal})")" if [[ -n "${cur}" ]]; then handler="${cur}; ${handler}" fi trap "${handler}" ${signal} } function verify-prereqs { which "govc" >/dev/null || { echo "Can't find govc in PATH, please install and retry." echo "" echo " go install github.com/vmware/govmomi/govc" echo "" exit 1 } } function verify-ssh-prereqs { local rc rc=0 ssh-add -L 1> /dev/null 2> /dev/null || rc="$?" # "Could not open a connection to your authentication agent." if [[ "${rc}" -eq 2 ]]; then eval "$(ssh-agent)" > /dev/null trap-add "kill ${SSH_AGENT_PID}" EXIT fi rc=0 ssh-add -L 1> /dev/null 2> /dev/null || rc="$?" # "The agent has no identities." if [[ "${rc}" -eq 1 ]]; then # Try adding one of the default identities, with or without passphrase. ssh-add || true fi # Expect at least one identity to be available. if ! ssh-add -L 1> /dev/null 2> /dev/null; then echo "Could not find or add an SSH identity." echo "Please start ssh-agent, add your identity, and retry." exit 1 fi } # Create a temp dir that'll be deleted at the end of this bash session. # # Vars set: # KUBE_TEMP function ensure-temp-dir { if [[ -z ${KUBE_TEMP-} ]]; then KUBE_TEMP=$(mktemp -d -t kubernetes.XXXXXX) trap-add 'rm -rf "${KUBE_TEMP}"' EXIT fi } # Verify and find the various tar files that we are going to use on the server. # # Vars set: # SERVER_BINARY_TAR # SALT_TAR function find-release-tars { SERVER_BINARY_TAR="${KUBE_ROOT}/server/kubernetes-server-linux-amd64.tar.gz" if [[ ! -f "$SERVER_BINARY_TAR" ]]; then SERVER_BINARY_TAR="${KUBE_ROOT}/_output/release-tars/kubernetes-server-linux-amd64.tar.gz" fi if [[ ! -f "$SERVER_BINARY_TAR" ]]; then echo "!!! Cannot find kubernetes-server-linux-amd64.tar.gz" exit 1 fi SALT_TAR="${KUBE_ROOT}/server/kubernetes-salt.tar.gz" if [[ ! -f "$SALT_TAR" ]]; then SALT_TAR="${KUBE_ROOT}/_output/release-tars/kubernetes-salt.tar.gz" fi if [[ ! -f "$SALT_TAR" ]]; then echo "!!! Cannot find kubernetes-salt.tar.gz" exit 1 fi } # Take the local tar files and upload them to the master. # # Assumed vars: # MASTER_NAME # SERVER_BINARY_TAR # SALT_TAR function upload-server-tars { local vm_ip vm_ip=$(govc vm.ip "${MASTER_NAME}") kube-ssh ${vm_ip} "mkdir -p /home/kube/cache/kubernetes-install" local tar for tar in "${SERVER_BINARY_TAR}" "${SALT_TAR}"; do kube-scp ${vm_ip} "${tar}" "/home/kube/cache/kubernetes-install/${tar##*/}" done } # Ensure that we have a password created for validating to the master. Will # read from $HOME/.kubernetes_auth if available. # # Vars set: # KUBE_USER # KUBE_PASSWORD function get-password { local file="$HOME/.kubernetes_auth" if [[ -r "$file" ]]; then KUBE_USER=$(cat "$file" | python -c 'import json,sys;print json.load(sys.stdin)["User"]') KUBE_PASSWORD=$(cat "$file" | python -c 'import json,sys;print json.load(sys.stdin)["Password"]') return fi KUBE_USER=admin KUBE_PASSWORD=$(python -c 'import string,random; print "".join(random.SystemRandom().choice(string.ascii_letters + string.digits) for _ in range(16))') # Store password for reuse. cat << EOF > "$file" { "User": "$KUBE_USER", "Password": "$KUBE_PASSWORD" } EOF chmod 0600 "$file" } # Run command over ssh function kube-ssh { local host="$1" shift ssh ${SSH_OPTS-} "kube@${host}" "$@" 2> /dev/null } # Copy file over ssh function kube-scp { local host="$1" local src="$2" local dst="$3" scp ${SSH_OPTS-} "${src}" "kube@${host}:${dst}" } # Instantiate a generic kubernetes virtual machine (master or minion) # # Usage: # kube-up-vm VM_NAME [options to pass to govc vm.create] # # Example: # kube-up-vm "vm-name" -c 2 -m 4096 # # Assumed vars: # DISK # GUEST_ID function kube-up-vm { local vm_name="$1" shift govc vm.create \ -debug \ -disk="${DISK}" \ -g="${GUEST_ID}" \ -link=true \ "$@" \ "${vm_name}" # Retrieve IP first, to confirm the guest operations agent is running. govc vm.ip "${vm_name}" > /dev/null govc guest.mkdir \ -vm="${vm_name}" \ -p \ /home/kube/.ssh ssh-add -L > "${KUBE_TEMP}/${vm_name}-authorized_keys" govc guest.upload \ -vm="${vm_name}" \ -f \ "${KUBE_TEMP}/${vm_name}-authorized_keys" \ /home/kube/.ssh/authorized_keys } # Kick off a local script on a kubernetes virtual machine (master or minion) # # Usage: # kube-run VM_NAME LOCAL_FILE function kube-run { local vm_name="$1" local file="$2" local dst="/tmp/$(basename "${file}")" govc guest.upload -vm="${vm_name}" -f -perm=0755 "${file}" "${dst}" local vm_ip vm_ip=$(govc vm.ip "${vm_name}") kube-ssh ${vm_ip} "nohup sudo ${dst} < /dev/null 1> ${dst}.out 2> ${dst}.err &" } # Instantiate a kubernetes cluster # # Assumed vars: # KUBE_ROOT # <Various vars set in config file> function kube-up { verify-ssh-prereqs find-release-tars ensure-temp-dir get-password python "${KUBE_ROOT}/third_party/htpasswd/htpasswd.py" \ -b -c "${KUBE_TEMP}/htpasswd" "$KUBE_USER" "$KUBE_PASSWORD" local htpasswd htpasswd=$(cat "${KUBE_TEMP}/htpasswd") echo "Starting master VM (this can take a minute)..." ( echo "#! /bin/bash" echo "readonly MY_NAME=${MASTER_NAME}" grep -v "^#" "${KUBE_ROOT}/cluster/vsphere/templates/hostname.sh" echo "cd /home/kube/cache/kubernetes-install" echo "readonly MASTER_NAME='${MASTER_NAME}'" echo "readonly NODE_INSTANCE_PREFIX='${INSTANCE_PREFIX}-minion'" echo "readonly PORTAL_NET='${PORTAL_NET}'" echo "readonly ENABLE_NODE_MONITORING='${ENABLE_NODE_MONITORING:-false}'" echo "readonly ENABLE_NODE_LOGGING='${ENABLE_NODE_LOGGING:-false}'" echo "readonly LOGGING_DESTINATION='${LOGGING_DESTINATION:-}'" echo "readonly ENABLE_CLUSTER_DNS='${ENABLE_CLUSTER_DNS:-false}'" echo "readonly DNS_SERVER_IP='${DNS_SERVER_IP:-}'" echo "readonly DNS_DOMAIN='${DNS_DOMAIN:-}'" echo "readonly SERVER_BINARY_TAR='${SERVER_BINARY_TAR##*/}'" echo "readonly SALT_TAR='${SALT_TAR##*/}'" echo "readonly MASTER_HTPASSWD='${htpasswd}'" grep -v "^#" "${KUBE_ROOT}/cluster/vsphere/templates/create-dynamic-salt-files.sh" grep -v "^#" "${KUBE_ROOT}/cluster/vsphere/templates/install-release.sh" grep -v "^#" "${KUBE_ROOT}/cluster/vsphere/templates/salt-master.sh" ) > "${KUBE_TEMP}/master-start.sh" kube-up-vm ${MASTER_NAME} -c ${MASTER_CPU-1} -m ${MASTER_MEMORY_MB-1024} upload-server-tars kube-run ${MASTER_NAME} "${KUBE_TEMP}/master-start.sh" # Print master IP, so user can log in for debugging. detect-master echo echo "Starting minion VMs (this can take a minute)..." for (( i=0; i<${#MINION_NAMES[@]}; i++)); do ( echo "#! /bin/bash" echo "readonly MY_NAME=${MINION_NAMES[$i]}" grep -v "^#" "${KUBE_ROOT}/cluster/vsphere/templates/hostname.sh" echo "KUBE_MASTER=${KUBE_MASTER}" echo "KUBE_MASTER_IP=${KUBE_MASTER_IP}" echo "MINION_IP_RANGE=${MINION_IP_RANGES[$i]}" grep -v "^#" "${KUBE_ROOT}/cluster/vsphere/templates/salt-minion.sh" ) > "${KUBE_TEMP}/minion-start-${i}.sh" ( kube-up-vm "${MINION_NAMES[$i]}" -c ${MINION_CPU-1} -m ${MINION_MEMORY_MB-1024} kube-run "${MINION_NAMES[$i]}" "${KUBE_TEMP}/minion-start-${i}.sh" ) & done local fail=0 local job for job in $(jobs -p); do wait "${job}" || fail=$((fail + 1)) done if (( $fail != 0 )); then echo "${fail} commands failed. Exiting." >&2 exit 2 fi # Print minion IPs, so user can log in for debugging. detect-minions echo echo "Waiting for master and minion initialization." echo echo " This will continually check to see if the API for kubernetes is reachable." echo " This might loop forever if there was some uncaught error during start up." echo printf "Waiting for ${KUBE_MASTER} to become available..." until curl --insecure --user "${KUBE_USER}:${KUBE_PASSWORD}" --max-time 5 \ --fail --output /dev/null --silent "https://${KUBE_MASTER_IP}/api/v1beta1/pods"; do printf "." sleep 2 done printf " OK\n" local i for (( i=0; i<${#MINION_NAMES[@]}; i++)); do printf "Waiting for ${MINION_NAMES[$i]} to become available..." until curl --max-time 5 \ --fail --output /dev/null --silent "http://${KUBE_MINION_IP_ADDRESSES[$i]}:10250/healthz"; do printf "." sleep 2 done printf " OK\n" done echo echo "Sanity checking cluster..." sleep 5 # Basic sanity checking local i for (( i=0; i<${#MINION_NAMES[@]}; i++)); do # Make sure docker is installed kube-ssh "${KUBE_MINION_IP_ADDRESSES[$i]}" which docker > /dev/null || { echo "Docker failed to install on ${MINION_NAMES[$i]}. Your cluster is unlikely" >&2 echo "to work correctly. Please run ./cluster/kube-down.sh and re-create the" >&2 echo "cluster. (sorry!)" >&2 exit 1 } done echo echo "Kubernetes cluster is running. The master is running at:" echo echo " https://${KUBE_MASTER_IP}" echo echo "The user name and password to use is located in ~/.kubernetes_auth." echo local kube_cert=".kubecfg.crt" local kube_key=".kubecfg.key" local ca_cert=".kubernetes.ca.crt" ( umask 077 kube-ssh "${KUBE_MASTER_IP}" sudo cat /srv/kubernetes/kubecfg.crt >"${HOME}/${kube_cert}" 2>/dev/null kube-ssh "${KUBE_MASTER_IP}" sudo cat /srv/kubernetes/kubecfg.key >"${HOME}/${kube_key}" 2>/dev/null kube-ssh "${KUBE_MASTER_IP}" sudo cat /srv/kubernetes/ca.crt >"${HOME}/${ca_cert}" 2>/dev/null cat << EOF > ~/.kubernetes_auth { "User": "$KUBE_USER", "Password": "$KUBE_PASSWORD", "CAFile": "$HOME/$ca_cert", "CertFile": "$HOME/$kube_cert", "KeyFile": "$HOME/$kube_key" } EOF chmod 0600 ~/.kubernetes_auth "${HOME}/${kube_cert}" \ "${HOME}/${kube_key}" "${HOME}/${ca_cert}" ) } # Delete a kubernetes cluster function kube-down { govc vm.destroy ${MASTER_NAME} & for (( i=0; i<${#MINION_NAMES[@]}; i++)); do govc vm.destroy ${MINION_NAMES[i]} & done wait } # Update a kubernetes cluster with latest source function kube-push { verify-ssh-prereqs find-release-tars detect-master upload-server-tars ( echo "#! /bin/bash" echo "cd /home/kube/cache/kubernetes-install" echo "readonly SERVER_BINARY_TAR='${SERVER_BINARY_TAR##*/}'" echo "readonly SALT_TAR='${SALT_TAR##*/}'" grep -v "^#" "${KUBE_ROOT}/cluster/vsphere/templates/install-release.sh" echo "echo Executing configuration" echo "sudo salt '*' mine.update" echo "sudo salt --force-color '*' state.highstate" ) | kube-ssh "${KUBE_MASTER_IP}" get-password echo echo "Kubernetes cluster is running. The master is running at:" echo echo " https://${KUBE_MASTER_IP}" echo echo "The user name and password to use is located in ~/.kubernetes_auth." echo } # Execute prior to running tests to build a release if required for env function test-build-release { echo "TODO" } # Execute prior to running tests to initialize required structure function test-setup { echo "TODO" } # Execute after running tests to perform any required clean-up function test-teardown { echo "TODO" } function setup-monitoring { echo "TODO" } function teardown-monitoring { echo "TODO" } function setup-logging { echo "TODO: setup logging" } function teardown-logging { echo "TODO: teardown logging" }
class UserModel { // Other methods and properties public function setDelete() { // Implement the logic to delete the user details // For example, using a database query or API call // Return true if deletion is successful, false otherwise // Example: // if ($this->deleteFromDatabase()) { // return true; // } // return false; } // Other methods }
<gh_stars>1-10 var util = require('util'); var EventEmitter = require('events').EventEmitter; var requestHandler = require('dvp-ardscommon/RequestHandler.js'); var dbConn = require('dvp-dbmodels'); var messageFormatter = require('dvp-common/CommonMessageGenerator/ClientMessageJsonFormatter.js'); var logger = require('dvp-common/LogHandler/CommonLogHandler.js').logger; var redisHandler = require('dvp-ardscommon/RedisHandler.js'); var SplitAndGetStatus = function (logKey, requestlist) { var e = new EventEmiter(); process.nextTick(function () { var count = 0; var reqlistCount = requestlist.length; for (var i in requestlist) { var val = requestlist[i]; console.log(" " + i + ": " + val); var requestObj = val.Obj; var requestVid = val.Vid; requestHandler.GetRequestState(logKey, requestObj.Company, requestObj.Tenant, requestObj.SessionId, function (err, reqstate) { if (err) { console.log(err); } e.emit('result', requestlist[count].Obj, requestlist[count].Vid, reqstate); count++; if (reqlistCount === count) { console.log("end", count); e.emit('end'); } }); } }); return (e); }; var SearchRequestByTags = function (logkey, searchTags, callback) { requestHandler.SearchRequestByTags(logkey, searchTags, function (err, requestlist) { if (err) { console.log(err); callback(err, []); } else { var returnlist = []; if (requestlist.length > 0) { var gobtk = SplitAndGetStatus(logkey, requestlist); gobtk.on('result', function (requestObj, requestVid, reqstate) { var obj = { Request: requestObj, Status: reqstate, Vid: requestVid }; returnlist.push(obj); }); gobtk.on('end', function () { callback(null, returnlist); }); } else { callback(null, returnlist); } } }); }; var GetAllRequests = function (logkey, company, tenant, callback) { var searchTags = ["company_" + company, "tenant_" + tenant]; SearchRequestByTags(logkey, searchTags, function (err, returnlist) { callback(err, returnlist); }); }; var GetRequestFilterByClassTypeCategory = function (logkey, company, tenant, reqServer, reqType, callback) { var searchTags = ["company_" + company, "tenant_" + tenant, "serverType_" + reqServer, "requestType_" + reqType]; SearchRequestByTags(logkey, searchTags, function (err, returnlist) { callback(err, returnlist); }); }; var GetAllQueueDetails = function (logkey, company, tenant, callback) { GetAllRequests(logkey, company, tenant, function (err, requestlist) { var returnlist = []; if (err) { console.log(err); } else { if (requestlist.length > 0) { requestlist.reduce(function (result, o) { if (o.Status == "QUEUED" || o.Status =="TRYING") { var unit = o.Request.QueueId; if (!(unit in returnlist)) { returnlist.push(returnlist[unit] = { Queue: o.Request.QueueId, Items: [o] }); } else { returnlist[unit].Items.push(o); } } return result; }, { arr: [] }).arr; } } callback(err, returnlist); }); }; var GetQueueDetailsFilterByClassTypeCategory = function (logkey, company, tenant, reqServer, reqType, callback) { var searchTags = ["company_" + company, "tenant_" + tenant, "serverType_" + reqServer, "requestType_" + reqType]; SearchRequestByTags(logkey, searchTags, function (err, requestlist) { var returnlist = []; if (err) { console.log(err); } else { if (requestlist.length > 0) { requestlist.reduce(function (result, o) { if (o.Status == "QUEUED") { var unit = o.Request.QueueId; if (!(unit in returnlist)) { returnlist.push(returnlist[unit] = { Queue: o.Request.QueueId, Items: [o] }); } else { returnlist[unit].Items.push(o); } } return result; }, { arr: [] }).arr; } } callback(err, returnlist); }); }; var FilterObjFromArray = function(itemArray, field, value){ var resultObj; for(var i = 0; i< itemArray.length;i++){ var item = itemArray[i]; if(item[field] == value){ resultObj = item; break; } } return resultObj; }; var GenerateQueueName = function(logKey, queueId, callback){ redisHandler.GetHashValue(logKey, "QueueNameHash", queueId, function(err, name){ if(err){ callback(err, null); }else{ callback(null, name); } }); }; var SetQueueName = function(summary, callback){ var queue = summary.Queue.replace(/-/g, ":"); var queueParams = queue.split(':'); var queuePriority = queueParams.pop(); var queueSettingId = queueParams.join(':'); redisHandler.GetHashValue("GetQueueName", "QueueNameHash", queueSettingId, function(err, result){ if(err){ callback(summary); }else{ if(result) { var queueSetting = JSON.parse(result); if(queueSetting && queueSetting.QueueName) { var queueName = queueSetting.QueueName; queueName = (queuePriority !== '0')? util.format('%s-P%s', queueName, queuePriority): queueName; summary.Queue = queueName; } } callback(summary); } }); }; var ExtractSummary = function(date, summaries){ var e = new EventEmitter(); process.nextTick(function () { var count = 0; var newSummaries = []; for(var i in summaries){ SetQueueName(summaries[i], function(newSummary){ count++; newSummaries.push(newSummary); if(count == summaries.length){ e.emit('endSummary', date, newSummaries); } }); } }); return (e); }; var ExtractDailySummary = function(dailySummary, callback){ //var e = new EventEmitter(); //process.nextTick(function () { var count = 0; var newDailySummary = []; for(var i in dailySummary){ var es = ExtractSummary(dailySummary[i].Date, dailySummary[i].Summary); es.on('endSummary', function(date ,summary){ count++; newDailySummary.push({Date: date, Summary: summary}); if(count == dailySummary.length){ callback(newDailySummary); } }); } //}); //return (e); }; var GetDailySummaryRecords = function(tenant, company, summaryFromDate, summaryToDate, callback){ dbConn.SequelizeConn.query("SELECT * FROM \"Dashboard_DailySummaries\" WHERE \"Company\" = '"+company+"' and \"Tenant\" = '"+tenant+"' and \"SummaryDate\"::date >= date '"+summaryFromDate+"' and \"SummaryDate\"::date <= date '"+summaryToDate+"' and \"WindowName\" in (SELECT \"WindowName\" FROM \"Dashboard_DailySummaries\" WHERE \"WindowName\" = 'QUEUE' or \"WindowName\" = 'QUEUEDROPPED' or \"WindowName\" = 'QUEUEANSWERED')", { type: dbConn.SequelizeConn.QueryTypes.SELECT}) .then(function(records) { if (records && records.length >0) { logger.info('[DVP-ARDSMonitoring.GetDailySummaryRecords] - [%s] - [PGSQL] - Data found - %s-[%s]', tenant, company, JSON.stringify(records)); var Queues = []; for(var i in records){ var record = records[i]; var queueDateInfo = FilterObjFromArray(Queues, "queueDate", record.SummaryDate.toDateString()); if(!queueDateInfo){ queueDateInfo = {queueDate:record.SummaryDate.toDateString(), queueInfos:[]}; Queues.push(queueDateInfo); } var queueInfo = FilterObjFromArray(queueDateInfo.queueInfos, "queueId", record.Param1); if (queueInfo) { queueInfo.records.push(record); } else { queueDateInfo.queueInfos.push({queueId: record.Param1, records: [record]}); } } var DailySummary = []; for(var t in Queues) { var date = Queues[t]; for (var j in date.queueInfos) { var reqQueue = date.queueInfos[j]; var queue = FilterObjFromArray(reqQueue.records, "WindowName", "QUEUE"); var queueAnswered = FilterObjFromArray(reqQueue.records, "WindowName", "QUEUEANSWERED"); var queueDropped = FilterObjFromArray(reqQueue.records, "WindowName", "QUEUEDROPPED"); var summary = {}; if (queue) { var summaryDate = FilterObjFromArray(DailySummary, "Date", queue.SummaryDate.toDateString()); if(!summaryDate){ summaryDate = {Date: queue.SummaryDate.toDateString(), Summary: []}; DailySummary.push(summaryDate); } summary.Queue = queue.Param1; summary.Date = queue.SummaryDate; summary.TotalQueued = queue.TotalCount; summary.TotalQueueTime = queue.TotalTime; summary.MaxTime = queue.MaxTime; summary.QueueAnswered = 0; summary.QueueDropped = 0; summary.ThresholdValue = queue.ThresholdValue; if (summary.TotalQueued > 0) { summary.AverageQueueTime = summary.TotalQueueTime / summary.TotalQueued; var sla = ((summary.TotalQueued - queue.ThresholdValue) / summary.TotalQueued) * 100; summary.SLA = sla; } if (queueAnswered) { summary.QueueAnswered = queueAnswered.TotalCount; } if (queueDropped) { summary.QueueDropped = queueDropped.TotalCount; } summaryDate.Summary.push(summary); } } } ExtractDailySummary(DailySummary, function(dailySummaryWithQueueName){ var jsonString = messageFormatter.FormatMessage(undefined, "SUCCESS", true, dailySummaryWithQueueName); callback.end(jsonString); }); } else { logger.error('[DVP-ARDSMonitoring.GetDailySummaryRecords] - [PGSQL] - No record found for %s - %s ', tenant, company); var jsonString = messageFormatter.FormatMessage(new Error('No record'), "No records found", false, undefined); callback.end(jsonString); } }).error(function (err) { logger.error('[DVP-ARDSMonitoring.GetDailySummaryRecords] - [%s] - [%s] - [PGSQL] - Error in searching.-[%s]', tenant, company, err); var jsonString = messageFormatter.FormatMessage(err, "EXCEPTION", false, undefined); callback.end(jsonString); }); }; var GetQueueSlaHourlyBreakDownRecords = function(tenant, company, summaryFromDate, callback){ dbConn.SequelizeConn.query("SELECT t1.\"Param1\" as \"Queue\", t1.\"TotalCount\", t2.\"BreakDown\", t2.\"ThresholdCount\", t2.\"SummaryDate\", t2.\"Hour\", round((t2.\"ThresholdCount\"::numeric/t1.\"TotalCount\"::numeric) *100,2) as \"Average\" FROM \"Dashboard_DailySummaries\" t1, \"Dashboard_ThresholdBreakDowns\" t2 WHERE t1.\"Company\"='"+company+"' AND t1.\"Tenant\"='"+tenant+"' AND t1.\"Param1\"=t2.\"Param1\" AND t1.\"WindowName\"='QUEUE' AND t1.\"SummaryDate\"::date = date '"+summaryFromDate+"' AND t2.\"SummaryDate\"::date = date '"+summaryFromDate+"' ORDER BY t2.\"Hour\", t1.\"Param1\"", { type: dbConn.SequelizeConn.QueryTypes.SELECT}) .then(function(records) { if (records && records.length >0) { logger.info('[DVP-ARDSMonitoring.GetQueueSlaHourlyBreakDownRecords] - [%s] - [PGSQL] - Data found - %s-[%s]', tenant, company, JSON.stringify(records)); var count = 0; var newSummaries = []; for(var i in records){ SetQueueName(records[i], function(newSummary){ count++; if(newSummary){ newSummary.SlaViolated = "True"; if(newSummary.BreakDown && newSummary.BreakDown.indexOf("gt") > -1){ newSummary.BreakDown = newSummary.BreakDown.replace("-gt", " <"); } if(newSummary.BreakDown && newSummary.BreakDown.indexOf("lt") > -1){ newSummary.SlaViolated = "False"; newSummary.BreakDown = newSummary.BreakDown.replace("lt-", " <"); } newSummaries.push(newSummary); } if(count == records.length){ var jsonString = messageFormatter.FormatMessage(undefined, "SUCCESS", true, newSummaries); callback.end(jsonString); } }); } } else { logger.error('[DVP-ARDSMonitoring.GetQueueSlaHourlyBreakDownRecords] - [PGSQL] - No record found for %s - %s ', tenant, company); var jsonString = messageFormatter.FormatMessage(new Error('No record'), "No records found", false, undefined); callback.end(jsonString); } }).error(function (err) { logger.error('[DVP-ARDSMonitoring.GetQueueSlaHourlyBreakDownRecords] - [%s] - [%s] - [PGSQL] - Error in searching.-[%s]', tenant, company, err); var jsonString = messageFormatter.FormatMessage(err, "EXCEPTION", false, undefined); callback.end(jsonString); }); }; var GetQueueSlaBreakDownRecords = function(tenant, company, summaryFromDate, callback){ dbConn.SequelizeConn.query("SELECT t1.\"Param1\" as \"Queue\", t1.\"TotalCount\", t2.\"BreakDown\", t2.\"ThresholdCount\", t2.\"SummaryDate\", t2.\"Hour\", round((t2.\"ThresholdCount\"::numeric/t1.\"TotalCount\"::numeric) *100,2) as \"Average\" FROM \"Dashboard_DailySummaries\" t1, \"Dashboard_ThresholdBreakDowns\" t2 WHERE t1.\"Company\"='"+company+"' AND t1.\"Tenant\"='"+tenant+"' AND t1.\"Param1\"=t2.\"Param1\" AND t1.\"WindowName\"='QUEUE' AND t1.\"SummaryDate\"::date = date '"+summaryFromDate+"' AND t2.\"SummaryDate\"::date = date '"+summaryFromDate+"' ORDER BY t2.\"Hour\", t1.\"Param1\"", { type: dbConn.SequelizeConn.QueryTypes.SELECT}) .then(function(records) { if (records && records.length >0) { logger.info('[DVP-ARDSMonitoring.GetQueueSlaHourlyBreakDownRecords] - [%s] - [PGSQL] - Data found - %s-[%s]', tenant, company, JSON.stringify(records)); var count = 0; var newSummaries = []; for(var i in records){ SetQueueName(records[i], function(newSummary){ count++; if(newSummary) { newSummary.SlaViolated = "True"; if (newSummary.BreakDown && newSummary.BreakDown.indexOf("gt") > -1) { newSummary.BreakDown = newSummary.BreakDown.replace("-gt", " <"); } if (newSummary.BreakDown && newSummary.BreakDown.indexOf("lt") > -1) { newSummary.SlaViolated = "False"; newSummary.BreakDown = newSummary.BreakDown.replace("lt-", " <"); } var queue = FilterObjFromArray(newSummaries, 'Queue', newSummary.Queue); if (queue) { var timeRange = FilterObjFromArray(newSummaries, 'BreakDown', newSummary.BreakDown); if (timeRange) { timeRange.ThresholdCount = timeRange.ThresholdCount + newSummary.ThresholdCount; timeRange.Average = (timeRange.ThresholdCount / timeRange.TotalCount) * 100; } else { newSummaries.push(newSummary); } } else { newSummaries.push(newSummary); } } if(count == records.length){ var jsonString = messageFormatter.FormatMessage(undefined, "SUCCESS", true, newSummaries); callback.end(jsonString); } }); } } else { logger.error('[DVP-ARDSMonitoring.GetQueueSlaHourlyBreakDownRecords] - [PGSQL] - No record found for %s - %s ', tenant, company); var jsonString = messageFormatter.FormatMessage(new Error('No record'), "No records found", false, undefined); callback.end(jsonString); } }).error(function (err) { logger.error('[DVP-ARDSMonitoring.GetQueueSlaHourlyBreakDownRecords] - [%s] - [%s] - [PGSQL] - Error in searching.-[%s]', tenant, company, err); var jsonString = messageFormatter.FormatMessage(err, "EXCEPTION", false, undefined); callback.end(jsonString); }); }; module.exports.GetAllRequests = GetAllRequests; module.exports.GetRequestFilterByClassTypeCategory = GetRequestFilterByClassTypeCategory; module.exports.GetAllQueueDetails = GetAllQueueDetails; module.exports.GetQueueDetailsFilterByClassTypeCategory = GetQueueDetailsFilterByClassTypeCategory; module.exports.GetDailySummaryRecords = GetDailySummaryRecords; module.exports.GenerateQueueName = GenerateQueueName; module.exports.GetQueueSlaHourlyBreakDownRecords = GetQueueSlaHourlyBreakDownRecords; module.exports.GetQueueSlaBreakDownRecords = GetQueueSlaBreakDownRecords;
#!/bin/bash ./build_ui.sh ./start_ui.sh docker container logs --follow streamtagger_ui
<filename>app/controllers/admin/topics_controller.rb class Admin::TopicsController < Admin::BaseController before_action :set_topic, only: [:show, :edit, :update, :destroy] def index @topics = Topic.order(id: :desc).page(params[:page]) end def show end def new @topic = Topic.new end def create @topic = Topic.new topic_params if @topic.save redirect_to admin_topic_url(@topic), notice: t('flash.topic_is_successfully_created') else render 'update_form' end end def edit end def update if @topic.update topic_params redirect_to admin_topic_url(@topic), notice: t('flash.topic_is_successfully_updated') else render 'update_form' end end def destroy end private def topic_params params.require(:topic).permit(:name, :slug, :description) end def set_topic @topic = Topic.find params[:id] end end
import math # Faster loop implementation def fast_loop(num): result = 1 for i in range(int(num)): result += math.sqrt(i*i + 1) return result
// Text parser to identify embedded IP addresses string input = "172.16.254.1"; string pattern = @"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}"; // Regex to match an IP address Match match = Regex.Match(input, pattern); if (match.Success) { Console.WriteLine("IP address embedded in text."); }
/* * Copyright © 2020 Lisk Foundation * * See the LICENSE file at the top-level directory of this distribution * for licensing information. * * Unless otherwise agreed in a custom licensing agreement with the Lisk Foundation, * no part of this software, including this file, may be copied, modified, * propagated, or distributed except according to the terms contained in the * LICENSE file. * * Removal or modification of this copyright notice is prohibited. */ import * as bufferArray from '../../src/objects/buffer_array'; describe('buffer arrays', () => { describe('bufferArrayIncludes', () => { it('should return true if array includes the value', () => { expect( bufferArray.bufferArrayIncludes( [Buffer.from('val2'), Buffer.from('target'), Buffer.from('val1')], Buffer.from('target'), ), ).toBeTrue(); }); it('should return false if array does not include the value', () => { expect( bufferArray.bufferArrayIncludes( [Buffer.from('val2'), Buffer.from('val1')], Buffer.from('target'), ), ).toBeFalse(); }); }); describe('bufferArrayContains', () => { it('should return true if array contains the values', () => { expect( bufferArray.bufferArrayContains( [Buffer.from('val2'), Buffer.from('target'), Buffer.from('val1')], [Buffer.from('target'), Buffer.from('val1')], ), ).toBeTrue(); }); it('should return false if array contains the partial value', () => { expect( bufferArray.bufferArrayContains( [Buffer.from('val2'), Buffer.from('target'), Buffer.from('val1')], [Buffer.from('target'), Buffer.from('val4')], ), ).toBeFalse(); }); it('should return false if array does not contain the value', () => { expect( bufferArray.bufferArrayContains( [Buffer.from('val2'), Buffer.from('target'), Buffer.from('val1')], [Buffer.from('val3'), Buffer.from('val4')], ), ).toBeFalse(); }); }); describe('bufferArrayContainsSome', () => { it('should return true if array contains the values', () => { expect( bufferArray.bufferArrayContainsSome( [Buffer.from('val2'), Buffer.from('target'), Buffer.from('val1')], [Buffer.from('target'), Buffer.from('val1')], ), ).toBeTrue(); }); it('should return false if array contains the partial value', () => { expect( bufferArray.bufferArrayContainsSome( [Buffer.from('val2'), Buffer.from('target'), Buffer.from('val1')], [Buffer.from('target'), Buffer.from('val4')], ), ).toBeTrue(); }); it('should return false if array does not contain the value', () => { expect( bufferArray.bufferArrayContainsSome( [Buffer.from('val2'), Buffer.from('target'), Buffer.from('val1')], [Buffer.from('val3'), Buffer.from('val4')], ), ).toBeFalse(); }); }); describe('bufferArrayEqual', () => { it('should return true if array are equal', () => { expect( bufferArray.bufferArrayEqual( [Buffer.from('val2'), Buffer.from('target'), Buffer.from('val1')], [Buffer.from('val2'), Buffer.from('target'), Buffer.from('val1')], ), ).toBeTrue(); }); it('should return false if array are not equal', () => { expect( bufferArray.bufferArrayEqual( [Buffer.from('val2'), Buffer.from('target'), Buffer.from('val1')], [Buffer.from('val2'), Buffer.from('target'), Buffer.from('val11')], ), ).toBeFalse(); }); }); describe('bufferArraySubtract', () => { it('should remove matching buffer', () => { const result = bufferArray.bufferArraySubtract( [Buffer.from('val2'), Buffer.from('target'), Buffer.from('val1')], [Buffer.from('val2'), Buffer.from('val1')], ); expect(result).toHaveLength(1); expect(result).toEqual([Buffer.from('target')]); }); it('should not remove if non of them matches', () => { const result = bufferArray.bufferArraySubtract( [Buffer.from('val2'), Buffer.from('target'), Buffer.from('val1')], [Buffer.from('val3'), Buffer.from('val4')], ); expect(result).toHaveLength(3); }); }); describe('bufferArrayOrderByLex', () => { it('should not mutate the original array', () => { const original = [Buffer.from('val2'), Buffer.from('target'), Buffer.from('val1')]; bufferArray.bufferArrayOrderByLex(original); expect(original[0]).toEqual(Buffer.from('val2')); }); it('should return true if ordered lexicographically', () => { expect( bufferArray.bufferArrayOrderByLex([ Buffer.from('target'), Buffer.from('val1'), Buffer.from('val1'), Buffer.from('val2'), ]), ).toBeTrue(); }); it('should return false if ordered lexicographically', () => { expect( bufferArray.bufferArrayOrderByLex([ Buffer.from('val1'), Buffer.from('target'), Buffer.from('val1'), Buffer.from('val2'), ]), ).toBeFalse(); }); }); describe('bufferArrayUniqueItems', () => { it('should return false if array contain duplicate buffer', () => { expect( bufferArray.bufferArrayUniqueItems([ Buffer.from('val1'), Buffer.from('val2'), Buffer.from('val1'), ]), ).toBeFalse(); }); it('should return false if array contain reference to same instance', () => { const instance = Buffer.from('val1'); expect( bufferArray.bufferArrayUniqueItems([ Buffer.from('val2'), instance, Buffer.from('target'), instance, ]), ).toBeFalse(); }); it('should return true if all items are unique', () => { expect( bufferArray.bufferArrayUniqueItems([ Buffer.from('val1'), Buffer.from('val2'), Buffer.from('target'), ]), ).toBeTrue(); }); }); });
module.exports = [{ "layout": "./charts.hbs", "filename": "./zoom_line.html", "data": { "title": "Zoom Line Chart", "type": "zoom_line" } },{ "layout": "./charts.hbs", "filename": "./zoom_line_multi.html", "data": { "title": "Zoom Line Multi Chart", "type": "zoom_line_multi" } },{ "layout": "./charts.hbs", "filename": "./zoom_scatter.html", "data": { "title": "Zoom Scatter Chart", "type": "zoom_scatter" } },{ "layout": "./charts.hbs", "filename": "./zoom_area_stack.html", "data": { "title": "Zoom Area Stack Chart", "type": "zoom_area_stack" } },{ "layout": "./charts.hbs", "filename": "./zoom_area_stack2.html", "data": { "title": "Zoom Area Stack Chart", "type": "zoom_area_stack2" } },{ "layout": "./charts.hbs", "filename": "./zoom_bar.html", "data": { "title": "Zoom Bar Chart", "type": "zoom_bar" } },{ "layout": "./charts.hbs", "filename": "./zoom_bar_group.html", "data": { "title": "Zoom Bar Group Chart", "type": "zoom_bar_group" } },{ "layout": "./charts.hbs", "filename": "./zoom_bar_stack.html", "data": { "title": "Zoom Bar Stack Chart", "type": "zoom_bar_stack" } }]
#!/bin/sh java -Dfile.encoding=UTF-8 -Xmx8G -jar /opt/spotlight/dbpedia-spotlight-1.0.0.jar /opt/spotlight/pt http://0.0.0.0:80/rest
import { Injectable } from '@angular/core'; import { Observable, of, throwError } from 'rxjs'; import { map, catchError } from 'rxjs/operators'; import { Contact } from '../models/contact'; import { AuthService } from './auth.service'; import { HttpClient, HttpHeaders } from '@angular/common/http'; @Injectable({ providedIn: 'root' }) export class ContactService { private apiUrl = 'api/contacts'; // Replace with actual API endpoint constructor(private authService: AuthService, private http: HttpClient) {} getContacts(): Observable<Contact[]> { if (this.authService.isAuthenticated()) { return this.http.get<Contact[]>(this.apiUrl).pipe( catchError((error: any) => { console.error('Error fetching contacts', error); return throwError('Error fetching contacts'); }) ); } else { return throwError('User is not authenticated'); } } addContact(contact: Contact): Observable<Contact> { if (this.authService.isAuthenticated()) { return this.http.post<Contact>(this.apiUrl, contact).pipe( catchError((error: any) => { console.error('Error adding contact', error); return throwError('Error adding contact'); }) ); } else { return throwError('User is not authenticated'); } } updateContact(contact: Contact): Observable<Contact> { if (this.authService.isAuthenticated()) { const url = `${this.apiUrl}/${contact.id}`; return this.http.put<Contact>(url, contact).pipe( catchError((error: any) => { console.error('Error updating contact', error); return throwError('Error updating contact'); }) ); } else { return throwError('User is not authenticated'); } } deleteContact(contactId: string): Observable<void> { if (this.authService.isAuthenticated()) { const url = `${this.apiUrl}/${contactId}`; return this.http.delete<void>(url).pipe( catchError((error: any) => { console.error('Error deleting contact', error); return throwError('Error deleting contact'); }) ); } else { return throwError('User is not authenticated'); } } }
java -jar ./build/historyvalidation.jar -g ./config/ProtegeHistoryQA.properties -p file:///URI_of_previous_Thesaurus -u file:///URI_of_current_Thesaurus -c file:///URI_of_concept_history -e file:///URI_of_evs_history -o file:///URI_of_output_file
<filename>lib/stak.rb require 'stak/version' require 'stak/helper' require 'stak/router' require 'stak/config' require 'stak/controller' require 'stak/mapper' module Stak class Application attr_reader :router def initialize @router = Stak::Router.new end def call(env) mapp(env).call(env) end def mapp(env) router.match(env['PATH_INFO'], env['REQUEST_METHOD'].downcase.to_sym, env) end end end
set -x redis-cli keys 'fr:*' redis-cli xread streams fr:mystream:x 0 redis-cli xrange fr:mystream:x - +
<filename>fame-server/src/main/java/com/designre/blog/model/dto/SiteConfig.java package com.designre.blog.model.dto; import lombok.Builder; import lombok.Data; @Data @Builder public class SiteConfig { private String title; private String description; private String keywords; private boolean emailSend; private String emailHost; private Integer emailPort; private String emailUsername; private String emailPassword; }
package sma.rhythmtapper.game.models; import java.io.Serializable; import sma.rhythmtapper.game.models.Skill.Skill; public class Card implements Serializable { int vocal; int visual; int dance; CenterSkill centerSkill; Skill skill; ColorType colorType; int life; public Card(ColorType colorType, int voc, int vis, int dan, int life, CenterSkill cs, Skill sk) { this.colorType = colorType; this.vocal = voc; this.visual = vis; this.dance = dan; this.life = life; this.centerSkill = cs; this.skill = sk; } public static final Card nullCard = new Card(ColorType.ANY, 0, 0, 0, 0, CenterSkill.nullCenterSkill, Skill.nullSkill); }
#!/bin/sh # This file was copied from the HBase source code, and slightly adapted # by NGDATA. # This file is used to generate the annotation of package info that # records the user, url, revision and timestamp. # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. unset LANG unset LC_CTYPE version=$1 outputDirectory=$2 sourceRoot=$3 user=`whoami` date=`date` cwd=`pwd` if [ -n "${COMPONENT_HASH}" ]; then revision="${COMPONENT_HASH}" url="http://github.com/cloudera/hbase-indexer" elif [ -d $sourceRoot/.svn ]; then revision=`svn info | sed -n -e 's/Last Changed Rev: \(.*\)/\1/p'` url=`svn info | sed -n -e 's/URL: \(.*\)/\1/p'` elif [ -d $sourceRoot/.git ]; then revision=`git log -1 --pretty=format:"%H"` hostname=`hostname` url="git://${hostname}${cwd}" else revision="Unknown" url="file://$cwd" fi mkdir -p "$outputDirectory/com/ngdata/hbaseindexer" cat >"$outputDirectory/com/ngdata/hbaseindexer/package-info.java" <<EOF /* * Generated by src/saveVersion.sh */ @VersionAnnotation(version="$version", revision="$revision", user="$user", date="$date", url="$url") package com.ngdata.hbaseindexer; EOF
rsync -aAXv --exclude={"/dev/*","/proc/*","/sys/*","/tmp/*","/run/*","/mnt/*","/media/*","/lost+found","/home"} / /mnt/Storage/Backups/asus_arch_04282016/
#pragma once #include <cstdint> #include <vector> #include "capstone/capstone.h" #include "analyzers_code/BaseCodeAnalyzer.hpp" struct Block; struct Symbol; class RetpolineCA : public BaseCodeAnalyzer { public: RetpolineCA(cs_arch arch, cs_mode mode); int run(cs_insn insn, const Block *block, const Symbol *call_sym) override; int process_results() override; private: bool m_has_pause; bool m_has_lfence; bool m_retpoline; std::vector<uint64_t> m_call_sites; uint64_t m_block_addr; };
json.partial! "proposals/proposal", proposal: @proposal
#!/bin/bash rm src/pometo_lexer.erl rm src/pometo_parser.erl rebar3 compile && erl -pa _build/default/lib/pometo/ebin/ -pa _build/default/lib/base16/ebin -s runner run #rebar3 compile && erl -pa _build/default/lib/pometo/ebin/ -pa _build/default/lib/base16/ebin -s runner noodle
#!/bin/sh npm i vscode-jsonrpc@6.0.0-next.5 npm i vscode-languageclient@7.0.0-next.9 npm i vscode-languageserver@7.0.0-next.7 npm i vscode-languageserver-protocol@3.16.0-next.7 npm i vscode-languageserver-types@3.16.0-next.3 cp -r ../Server/bin/Debug/net5.0 ./server npm install npm run compile vsce package
#!/bin/sh # $1==appVersion # $2==tmpFolder, gets created before this script is called and deleted afterwards wget -q "https://github.com/gopasspw/gopass/releases/download/v$1/gopass-$1-linux-amd64.tar.gz" -O "$2/gopass-$1-linux-amd64.tar.gz" mkdir $2/extracted sudo tar -xzf "$2/gopass-$1-linux-amd64.tar.gz" -C "$2/extracted" sudo install -D -g root -o root -m 755 "$2/extracted/gopass" "/usr/local/bin/gopass" # install SRC DEST: copies SRC to DEST, changes DEST permissions, owners in one command # install -D: create all leading components of DEST except the last # install -g root: change group ownership of DEST # install -o root: change user ownership of DEST # install -m XXX: set permissions
import { Component, ElementRef } from '@angular/core'; /* tslint:disable:no-unused-variable */ import { ComponentFixture, TestBed } from '@angular/core/testing'; import { SfeirBadgeDirective } from './sfeir-badge.directive'; export class MockElementRef extends ElementRef { constructor() { super(null); } } export class MockRenderer { setElementProperty() {} } @Component({ selector: 'test-badge-directive', template: `` }) export class HostComponentForBadgeDirective { person = {}; } // const MANAGER_BADGE_HTML = '<i class="material-icons">supervisor_account</i>'; xdescribe('SfeirBadgeDirective', () => { beforeEach(() => { TestBed.configureTestingModule({ declarations: [HostComponentForBadgeDirective, SfeirBadgeDirective] }); }); it('should create an instance of sfeir-badge', () => { const fixture = createTestComponent('<div sfeir-badge></div>'); console.log(fixture); expect(true).toBe(false); }); it('should not add badge icon when isManager === false', () => { // todo expect(true).toBe(false); }); it('should add badge icon when isManager === true', () => { // todo expect(true).toBe(false); }); }); function createTestComponent(template: string): ComponentFixture<HostComponentForBadgeDirective> { return TestBed.overrideComponent(HostComponentForBadgeDirective, { set: { template } }).createComponent( HostComponentForBadgeDirective ); }
'use strict'; // https://stackoverflow.com/a/14873282 var erf = function(x) { // save the sign of x var sign = x >= 0 ? 1 : -1; x = Math.abs(x); // constants var a1 = 0.254829592; var a2 = -0.284496736; var a3 = 1.421413741; var a4 = -1.453152027; var a5 = 1.061405429; var p = 0.3275911; // A&S formula 7.1.26 var t = 1 / (1 + p * x); var y = 1 - ((((a5 * t + a4) * t + a3) * t + a2) * t + a1) * t * Math.exp(-x * x); return sign * y; // erf(-x) = -erf(x); }; var tScoreToPercentile = function(tScore) { return 50 * erf(Math.SQRT2 * (tScore - 50) / 20) + 50; }; // https://stackoverflow.com/a/69510308 var erfinv = function(x) { var a = 0.147; var b = 2 / (Math.PI * a) + Math.log(1 - Math.pow(x, 2)) / 2; var sqrt1 = Math.sqrt(Math.pow(b, 2) - Math.log(1 - Math.pow(x, 2)) / a); var sqrt2 = Math.sqrt(sqrt1 - b); // https://stackoverflow.com/a/9079549 return sqrt2 * (x ? x < 0 ? -1 : 1 : 0); // sqrt2 * Math.sign(x) }; var percentileToTScore = function(percentile) { return 10 * Math.SQRT2 * erfinv(percentile / 50 - 1) + 50; }; var tScoreInput = $('#t-score'); var toPercentile = $('#to-percentile'); var percentileInput = $('#percentile'); var toTScore = $('#to-t-score'); var convertToPercentile = function() { var tScore = +tScoreInput.val(); var percentile = tScoreToPercentile(tScore); percentileInput.val(percentile); }; toPercentile.click(function() { convertToPercentile(); toPercentile.focus(); }); tScoreInput.keyup(function(e) { if (e.key === 'Enter' || e.keyCode === 13) { convertToPercentile(); } }); var convertToTScore = function() { var percentile = +percentileInput.val(); var tScore = percentileToTScore(percentile); tScoreInput.val(tScore); }; toTScore.click(function() { convertToTScore(); toTScore.focus(); }); percentileInput.keyup(function(e) { if (e.key === 'Enter' || e.keyCode === 13) { convertToTScore(); } }); $(document).on('touchstart', $.noop);
<reponame>RoCci/FrontendStarter-Boilerplate<gh_stars>0 'use strict'; import gulp from 'gulp'; import del from 'del'; const config = require( 'rc' )( 'app' ); gulp.task( 'clean', function() { return del( [ config.distDir ] ); } ); gulp.task( 'clean:test', function() { return del( [ config.distDir + config.assetDir + 'js/main.spec.js' ] ); } );
import React, { Component } from 'react'; import { getLedetekst } from '@navikt/digisyfo-npm'; export default class Lenke extends Component { constructor(props) { super(props); this.state = { ikon: 'utbetalinger.svg', }; this.onMouseEnter = this.onMouseEnter.bind(this); this.onMouseLeave = this.onMouseLeave.bind(this); } onMouseEnter() { this.setState({ ikon: 'utbetalinger--hover.svg', }); } onMouseLeave() { this.setState({ ikon: 'utbetalinger.svg', }); } render() { const URL = 'https://www.nav.no/no/NAV+og+samfunn/Kontakt+NAV/Utbetalinger/Utbetalinger/Utbetalingsdatoer%2C+feriepenger+og+skattetrekk?kap=499628'; return (<a onMouseEnter={this.onMouseEnter} onMouseLeave={this.onMouseLeave} href={URL} target="_blank" className="inngangspanel inngangspanel--ekstern blokk--l"> <span className="inngangspanel__ikon"> <img alt="" className="js-ikon" src={`${process.env.REACT_APP_CONTEXT_ROOT}/img/svg/${this.state.ikon}`} /> </span> <div className="inngangspanel__innhold"> <h2 className="inngangspanel__tittel">{getLedetekst('soknader.sykepenger.tittel')}</h2> </div> </a>); } }
#!/usr/bin/env bash # # Copyright 2017 Marco Vermeulen # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # function __sdk_selfupdate { local force_selfupdate force_selfupdate="$1" if [[ "$SDKMAN_AVAILABLE" == "false" ]]; then echo "This command is not available while offline." elif [[ "$SDKMAN_REMOTE_VERSION" == "$SDKMAN_VERSION" && "$force_selfupdate" != "force" ]]; then echo "No update available at this time." else export sdkman_debug_mode export sdkman_beta_channel __sdkman_secure_curl "${SDKMAN_CANDIDATES_API}/selfupdate?beta=${sdkman_beta_channel}" | bash fi unset SDKMAN_FORCE_SELFUPDATE } function __sdkman_auto_update { local remote_version version delay_upgrade remote_version="$1" version="$2" delay_upgrade="${SDKMAN_DIR}/var/delay_upgrade" if [[ -n "$(find "$delay_upgrade" -mtime +1)" && "$remote_version" != "$version" ]]; then echo "" echo "" __sdkman_echo_yellow "ATTENTION: A new version of SDKMAN is available..." echo "" __sdkman_echo_no_colour "The current version is $remote_version, but you have $version." echo "" if [[ "$sdkman_auto_selfupdate" != "true" ]]; then __sdkman_echo_confirm "Would you like to upgrade now? (Y/n): " read upgrade fi if [[ -z "$upgrade" ]]; then upgrade="Y"; fi if [[ "$upgrade" == "Y" || "$upgrade" == "y" ]]; then __sdk_selfupdate unset upgrade else __sdkman_echo_no_colour "Not upgrading today..." fi touch "$delay_upgrade" fi }
// (C) Copyright <NAME> 2021 // Use, modification and distribution are subject to the // Boost Software License, Version 1.0. (See accompanying file // LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) // See http://www.boost.org/libs/config for more information. // MACRO: BOOST_NO_CXX20_HDR_CONCEPTS // TITLE: C++20 <concepts> header is either not present or too broken to be used // DESCRIPTION: The compiler does not support the C++20 header <concepts> #include <concepts> namespace boost_no_cxx20_hdr_concepts { using std::same_as; using std::derived_from; using std::convertible_to; using std::common_reference_with; using std::common_with; using std::integral; using std::signed_integral; using std::unsigned_integral; using std::floating_point; using std::assignable_from; using std::swappable; using std::swappable_with; using std::destructible; using std::constructible_from; using std::default_initializable; using std::move_constructible; using std::copy_constructible; using std::equality_comparable; using std::equality_comparable_with; using std::totally_ordered; using std::totally_ordered_with; using std::movable; using std::copyable; using std::semiregular; using std::regular; using std::invocable; using std::regular_invocable; using std::predicate; using std::relation; using std::equivalence_relation; using std::strict_weak_order; using std::swap; int test() { return 0; } }
/* * Copyright (c) 2014 Spotify AB. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.spotify.docker; import com.spotify.docker.client.AnsiProgressHandler; import com.spotify.docker.client.DockerClient; import org.apache.maven.plugin.MojoExecutionException; import org.apache.maven.plugin.logging.Log; import org.junit.Test; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; public class UtilsTest { private static final String TAG = "tag"; private static final String IMAGE = "image"; private static final String IMAGE_WITH_SEPERATOR = "image:"; private static final String IMAGE_WITH_LIBRARY = "library/image"; private static final String IMAGE_FROM_REGISTRY = "registry:80/library/image"; private static final String IMAGE_WITH_TAG = "image:tag"; private static final String IMAGE_FROM_LIB_WITH_TAG = "library/image:tag"; private static final String IMAGE_FROM_REG_WITH_TAG = "registry:80/library/image:tag"; @Test public void testParseImageName() throws MojoExecutionException { String[] result = Utils.parseImageName(IMAGE); assertThat(result).containsExactly(IMAGE, null); } @Test public void testParseImageNameWithSeperator() throws MojoExecutionException { String[] result = Utils.parseImageName(IMAGE_WITH_SEPERATOR); assertThat(result).containsExactly(IMAGE, null); } @Test public void testParseImageNameWithTag() throws MojoExecutionException { String[] result = Utils.parseImageName(IMAGE_WITH_TAG); assertThat(result).containsExactly(IMAGE, TAG); } @Test public void testParseImageNameWithLibrary() throws MojoExecutionException { String[] result = Utils.parseImageName(IMAGE_WITH_LIBRARY); assertThat(result).containsExactly(IMAGE_WITH_LIBRARY, null); } @Test public void testParseImageNameWithLibraryAndTag() throws MojoExecutionException { String[] result = Utils.parseImageName(IMAGE_FROM_LIB_WITH_TAG); assertThat(result).containsExactly(IMAGE_WITH_LIBRARY, TAG); } @Test public void testParseImageNameFromRegistryAndTag() throws MojoExecutionException { String[] result = Utils.parseImageName(IMAGE_FROM_REG_WITH_TAG); assertThat(result).containsExactly(IMAGE_FROM_REGISTRY, TAG); } @Test public void testPushImage() throws Exception { DockerClient dockerClient = mock(DockerClient.class); Log log = mock(Log.class); final DockerBuildInformation buildInfo = mock(DockerBuildInformation.class); Utils.pushImage(dockerClient, IMAGE, log, buildInfo, 0, 1); verify(dockerClient).push(eq(IMAGE), any(AnsiProgressHandler.class)); } }
import React from 'react'; import PropTypes from 'prop-types'; import FormStyle from './style'; export const Form = (props) => { return ( <FormStyle id="form-group">{props.children}</FormStyle> ) } export default Form
#!/usr/bin/env bash #=============================================================================== # Combine multiple braches, which match the search criteria into one new brach. # # Requirements: gh (github-cli), git #=============================================================================== set -o nounset # Treat unset variables as an error set -o pipefail # Exit when a command in a pipeline fails function print_usage() { cat <<EOF usage: ./combine-prs.sh [OPTIONS] Options: -b default head branch to be checkout from. Default: master. -c feature branch name to be used. Default: Update-dependencies. -s earch criteria for branches to be considered in the combination. Default: 'dependabot-'. * show this usage. EOF exit 1 } function run() { local RED='\033[0;31m' local GREEN='\033[0;32m' local YELLOW='\033[0;33m' local NC='\033[0m' # No Color # Parse arguments local base_branch="master" local combine_branch_name="Update-repo-dependencies" local search_branch_name="dependabot-" while getopts ":b:c:s:" option; do case "${option}" in b) base_branch=${OPTARG} ;; c) combine_branch_name=${OPTARG} ;; s) search_branch_name=${OPTARG} ;; *) print_usage ;; esac done shift $((OPTIND - 1)) # Update and checkout to new branch local current_branch current_branch=$(git branch | grep -F '*' | cut -d' ' -f2) if [[ "$current_branch" != "$combine_branch_name" ]]; then git stash git checkout "$base_branch" git pull --ff-only git branch -D "$combine_branch_name" git checkout -b "$combine_branch_name" echo "" fi # Search and apply patches local pr_count local id local msg pr_count=$(gh pr list | grep -c "$search_branch_name") echo -e "${GREEN}about to apply ${pr_count} PRs${NC}" gh pr list | grep "$search_branch_name" | while read -r pr; do id=$(echo "$pr" | cut -f1 | xargs) msg=$(echo "$pr" | cut -f2 | xargs) echo -e "${GREEN}try to apply pr #${id}...${NC}" if gh pr diff "$id" | git apply; then git commit --all --no-verify --message "$msg" echo -e "${GREEN}pr #${id}: '${msg}' apply successfully${NC}\n" else echo -e "${RED}failed to apply pull request, try with merge with 'theirs' strategy${NC}" git merge "origin/$(echo "$pr" | cut -f3 | xargs)" \ --message "$msg" \ --strategy-option theirs \ --verbose echo -e "${YELLOW}merge pr #${id}: '${msg}'${NC}" fi echo "" done git rebase --interactive "HEAD~${pr_count}" } ############## # RUN SCRIPT # ############## run "$@"
<filename>src/provisioner/provisioner/errors.go package provisioner type TimeoutError struct{} func (t *TimeoutError) Error() string { return "timeout error" }
def wrap(func): def call(*args, **kwargs): return func(*args, **kwargs) call.__doc__ == func.__doc__ call.__name__ == func.__name__ call.__dict__.update(func.__dict__) return call
'use strict'; const { RuleTester } = require('eslint'); const rule = require('../../../lib/rules/no-branch-in-dependencies'); const preprocess = require('../../helpers/preprocess'); new RuleTester().run('no-branch-in-dependencies', rule, preprocess({ valid: [ { code: '{ "dependencies": { "lodash": "1.2.3" } }', filename: 'package.json' }, { code: '{ "devDependencies": { "lodash": "1.2.3" } }', filename: 'package.json' }, { code: '{ "optionalDependencies": { "lodash": "1.2.3" } }', filename: 'package.json' }, { code: '{ "foo": { "lodash": "lodash/lodash" } }', filename: 'package.json' }, { code: '{ "dependencies": { "lodash": "lodash/lodash" } }', filename: 'package.json', options: [{ keys: ['foo'] }] }, { code: '{ "dependencies": { "lodash": "lodash/lodash" } }', filename: 'package.json', options: [{ ignore: ['lodash'] }] }, { code: '{ "dependencies": { "lodash": "lodash/lodash" } }', filename: 'not-package.json' } ], invalid: [ { code: '{ "dependencies": { "lodash": "lodash/lodash" } }', filename: 'package.json', errors: [{ message: 'Don\'t use branches.', type: 'Literal' }] }, { code: '{ "devDependencies": { "lodash": "lodash/lodash" } }', filename: 'package.json', errors: [{ message: 'Don\'t use branches.', type: 'Literal' }] }, { code: '{ "optionalDependencies": { "lodash": "lodash/lodash" } }', filename: 'package.json', errors: [{ message: 'Don\'t use branches.', type: 'Literal' }] } ] }));
package org.gbif.converters.parser.xml.identifier; import java.util.UUID; import lombok.AllArgsConstructor; import lombok.EqualsAndHashCode; import lombok.Getter; import lombok.NonNull; @EqualsAndHashCode @Getter @AllArgsConstructor public class PublisherProvidedUniqueIdentifier implements UniqueIdentifier { @NonNull private final UUID datasetKey; @NonNull private final String publisherProvidedIdentifier; @Override public UUID getDatasetKey() { return datasetKey; } @Override public String getUniqueString() { return OccurrenceKeyHelper.buildKey(this); } @Override public String getUnscopedUniqueString() { return OccurrenceKeyHelper.buildUnscopedKey(this); } }
<filename>tests/testAPI.py<gh_stars>1-10 from zillowAPI import zillow from zillowAPI import ZillowDataType from zillowAPI import ZillowAPI from zillowAPI import ZillowError import unittest import xml.etree.ElementTree as ET class ZillowTest(unittest.TestCase): def setUp(self): with open('tests/apiKeys','r') as f: self.apikey = f.readline() self.apikey = self.apikey.strip('\n') def test_get_data_through_get_zestimate(self): url = 'GetZestimate.htm' payload = {'zws-id':self.apikey,'zpid':2146876387} z = zillow() r = z.get_data(url, payload) self.assertIsInstance(r,str,"return message is not string") self.assertTrue(r.startswith('<?xml')) with open('tests/temp','w') as f: f.write(r) def test_GetZestimate(self): a = zillow().GetZestimate(self.apikey,zpid=2146876387,rentzestimate=True) self.assertIsInstance(a,ZillowAPI.ZestimateData) self.assertIsInstance(a.address,ZillowDataType.address) self.assertIsInstance(a.zestimate,ZillowDataType.zestimate) self.assertIsInstance(a.rent_zestimate,ZillowDataType.rent_zestimate) self.assertIsInstance(a.links,ZillowDataType.links) #make sure the raw response text is saved self.assertIsInstance(a.text,str) self.assertTrue(int(a.zpid)==2146876387) self.assertTrue(a.text.startswith('<?xml')) with self.assertRaises(ZillowError.ZillowRequestError) as cm: zillow().GetZestimate(self.apikey,214687638711,True) RE = cm.exception self.assertTrue(RE.code == 500) def test_GetSearchResults(self): a = zillow().GetSearchResults(self.apikey,'2114 B<NAME>','Seattle, WA',True) self.assertIsInstance(a,ZillowAPI.SearchResultData) self.assertIsInstance(a.results,list) ins = a.results[0] self.assertIsInstance(ins.address,ZillowDataType.address) self.assertIsInstance(ins.zestimate,ZillowDataType.zestimate) self.assertIsInstance(ins.rent_zestimate,ZillowDataType.rent_zestimate) self.assertIsInstance(ins.links,ZillowDataType.links) with self.assertRaises(AttributeError): getattr(ins,'text') self.assertIsInstance(a.text,str) self.assertTrue(a.text.startswith('<?xml')) def test_GetComps(self): a = zillow().GetComps(self.apikey,48749425,5,True) self.assertIsInstance(a,ZillowAPI.ComparableResult) self.assertIsInstance(a.text,str) self.assertIsInstance(a.principal,ZillowAPI.ZestimateData) self.assertIsInstance(a.comps,list) self.assertTrue(len(a.comps)==5) self.assertIsInstance(a.comps[0],ZillowAPI.ComparableData) comp_instance = a.comps[0] self.assertIsInstance(comp_instance.score,float) def test_GetDeepComps(self): a = zillow().GetDeepComps(self.apikey,48749425,5,True) self.assertIsInstance(a,ZillowAPI.DeepComparableResult) principal = a.principal self.assertIsInstance(principal,ZillowAPI.DeepZestimateData) comps = a.comps self.assertIsInstance(comps,list) self.assertIsInstance(comps[0],ZillowAPI.DeepComparableData) self.assertIsInstance(comps[0].score,float) def test_GetDeepSearch(self): a = zillow().GetDeepSearch(self.apikey,'2114 B<NAME>ve','Seattle, WA') self.assertIsInstance(a,ZillowAPI.DeepSearchResultData) self.assertIsInstance(a.results,list) self.assertIsInstance(a.results[0],ZillowAPI.DeepZestimateData) def test_GetUpdatedPropertyDetails(self): a = zillow().GetUpdatedPropertyDetails(self.apikey,48749425) self.assertIsInstance(a,ZillowAPI.UpdatedPropertyDetails) class DataTypeTest(unittest.TestCase): def setUp(self): self.etree = ET.parse('tests/getZestimate.xml') def test_class_address(self): address = ZillowDataType.address(self.etree.find('./response/address')) self.assertIsInstance(address.longitude,float) self.assertIsInstance(address.latitude,float) self.assertIsInstance(address.zipcode,int) self.assertIsInstance(address.street,str) self.assertIsInstance(address.street,str) def test_class_links(self): links = ZillowDataType.links(self.etree.find('./response/links')) self.assertIsInstance(links.home_details,str) self.assertTrue(links.home_details.startswith('http://')) self.assertIsInstance(links.map_this_home,str) self.assertIsInstance(links.similar_sales,str) def test_class_zestimate(self): zestimate = ZillowDataType.zestimate(self.etree.find('./response/zestimate')) self.assertIsInstance(zestimate.zestimate,int) self.assertIsInstance(zestimate.last_updated,str) self.assertIsInstance(zestimate.percentile,str) self.assertIsInstance(zestimate.valuation_high,str) self.assertIsInstance(zestimate.valuation_low,str) def test_class_rent_zestimate(self): with self.assertRaises(AttributeError): rent_zestimate = ZillowDataType.rent_zestimate(self.etree.find('./response/rentzestimate'))
/** * Copyright 2014 isandlaTech * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.psem2m.isolates.services.dirs; import java.io.File; /** * Describes a simple file finder * * @author <NAME> */ public interface IFileFinderSvc { /** * Tries to find the given file in the platform folders * * @param aBaseFile * Base file reference (aFileName could be relative to it) * @param aFileName * The file to found (uses its absolute path then its name) * @return All found files with the given information, null if none found */ File[] find(File aBaseFile, String aFileName); /** * Tries to find the given file name in the platform folders. * * Tries in the home, then in the base and finally without prefix (for * complete paths). The file name must be a path from the root of a PSEM2M * base folder (home or base), a complete path or a path relative to the * working directory. * * @param aFileName * Name of the file to look for * @return All found files with the given name, null if none found */ File[] find(String aFileName); }
def generate_update_statement(params): placeholders = [f"{param}=%({param})s" for param in params] update_statement = "UPDATE table_name SET " + ", ".join(placeholders) return update_statement
#!/bin/bash -xe # build nanomsg and install (this one is a lil tricky to build statically) cd /_src/nanomsg cmake \ -DNN_STATIC_LIB=ON \ -DNN_ENABLE_DOC=OFF \ -DNN_MAX_SOCKETS=4096 \ -DCMAKE_C_FLAGS="-fPIC -DPIC" \ -DCMAKE_INSTALL_PREFIX=/_install/nanomsg \ -DCMAKE_INSTALL_LIBDIR=lib \ . make -j4 make install # pinba cd /_src/pinba2 ./buildconf.sh ./configure --prefix=/_install/pinba2 \ --with-mysql=/home/builder/rpm/mariadb-10.1.26 \ --with-boost=/usr \ --with-meow=/_src/meow \ --with-nanomsg=/_install/nanomsg make -j4 # FIXME: this needs to be easier # for install, just copy stuff to mysql plugin dir cp /_src/pinba2/mysql_engine/.libs/libpinba_engine2.so `mysql_config --plugindir`
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package login; import entities.User; import java.io.IOException; import java.io.PrintWriter; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.FlushModeType; import javax.persistence.Persistence; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpSession; /** * * @author Nikola */ public class InfoChanged extends HttpServlet { /** * Processes requests for both HTTP <code>GET</code> and <code>POST</code> * methods. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { HttpSession session = request.getSession(); String username = (String)session.getAttribute("username"); EntityManagerFactory emf = Persistence.createEntityManagerFactory("Zadatak9PU"); EntityManager em = emf.createEntityManager(); User user = em.find(User.class, username); String newUsername = request.getParameter("username"); if (!user.getUsername().equals(newUsername) && em.find(User.class, newUsername) != null) { response.sendRedirect("username_exists.html"); return; } String newPassword = request.getParameter("password"); String newFirstName = request.getParameter("first"); String newLastName = request.getParameter("last"); String newEmail = request.getParameter("email"); String newPhone = request.getParameter("phone"); em.setFlushMode(FlushModeType.COMMIT); em.getTransaction().begin(); user.setUsername(newUsername); user.setPassword(<PASSWORD>); user.setFirstName(newFirstName); user.setLastName(newLastName); user.setEmail(newEmail); user.setPhone(newPhone); em.getTransaction().commit(); session.setAttribute("username", newUsername); response.setContentType("text/html;charset=UTF-8"); try (PrintWriter out = response.getWriter()) { out.println("<!DOCTYPE html>"); out.println("<html>"); out.println("<head>"); out.println("<title>User page</title>"); out.println("</head>"); out.println("<body>"); out.println("<h1>Profile</h1>"); out.println("Username: "); out.println(user.getUsername()); out.println("<br />"); out.println("First name: "); out.println(user.getFirstName()); out.println("<br />"); out.println("Last name: "); out.println(user.getLastName()); out.println("<br />"); out.println("Email: "); out.println(user.getEmail()); out.println("<br />"); out.println("Phone number: "); out.println(user.getPhone()); out.println("<br />"); out.println("<a href=\"ChangeInfo\" >Change info</a>"); out.println("<a href=\"Logout\" >Logout</a>"); out.println("</body>"); out.println("</html>"); } } // <editor-fold defaultstate="collapsed" desc="HttpServlet methods. Click on the + sign on the left to edit the code."> /** * Handles the HTTP <code>GET</code> method. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); } /** * Handles the HTTP <code>POST</code> method. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ @Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); } /** * Returns a short description of the servlet. * * @return a String containing servlet description */ @Override public String getServletInfo() { return "Short description"; }// </editor-fold> }
#! /bin/bash # Start all tools to develop the website. # Works on my machine (TM) sudo docker run -i --rm --volume `pwd`:/srv/jekyll/ --name transit-api-frontend jekyll/jekyll:3.8 jekyll build --watch & cd _site webfsd -p 8081 firefox http://127.0.0.1:8081/index.html?debug=true
package kbasesearchengine.system; import java.util.Comparator; /** Orders {@link ObjectTypeParsingRules} such that parsing rules for subobjects are first in the * order. * @author <EMAIL> * */ public class ParsingRulesSubtypeFirstComparator implements Comparator<ObjectTypeParsingRules> { @Override public int compare(final ObjectTypeParsingRules o1, final ObjectTypeParsingRules o2) { if (o1.getSubObjectType().isPresent()) { if (o2.getSubObjectType().isPresent()) { return 0; } else { return -1; } } else if (o2.getSubObjectType().isPresent()) { return 1; } else { return 0; } } }
package mezz.jei.api.recipe; import java.util.Collection; import java.util.List; import net.minecraft.item.ItemStack; import net.minecraft.item.crafting.FurnaceRecipes; import mezz.jei.api.IJeiHelpers; import mezz.jei.api.IModRegistry; /** * Allows creation of vanilla recipes. * Get the instance from {@link IJeiHelpers#getStackHelper()}. * <p> * Use {@link IModRegistry#addRecipes(Collection, String)} to add the recipe. * * @since JEI 4.5.0 */ public interface IVanillaRecipeFactory { /** * Create an anvil recipe for the given inputs and output. * * @param leftInput The itemStack placed on the left slot. * @param rightInputs The itemStack(s) placed on the right slot. * @param outputs The resulting itemStack(s). * @return the {@link IRecipeWrapper} for this recipe. */ IRecipeWrapper createAnvilRecipe(ItemStack leftInput, List<ItemStack> rightInputs, List<ItemStack> outputs); /** * Create an anvil recipe for the given inputs and output. * The number of inputs in the left and right side must match. * * @param leftInputs The itemStack(s) placed on the left slot. * @param rightInputs The itemStack(s) placed on the right slot. * @param outputs The resulting itemStack(s). * @return the {@link IRecipeWrapper} for this recipe. * @since JEI 4.14.1 */ IRecipeWrapper createAnvilRecipe(List<ItemStack> leftInputs, List<ItemStack> rightInputs, List<ItemStack> outputs); /** * Create a new smelting recipe. * By default, all smelting recipes from {@link FurnaceRecipes#smeltingList} are already added by JEI. * * @param inputs the list of possible inputs to rotate through * @param output the output * @return the {@link IRecipeWrapper} for this recipe. */ IRecipeWrapper createSmeltingRecipe(List<ItemStack> inputs, ItemStack output); /** * Create a new brewing recipe. * By default, all brewing recipes are already detected and added by JEI. * * @param ingredients the ingredients added to a potion to create a new one. * Normally one ingredient, but a list will display several in rotation. * @param potionInput the input potion for the brewing recipe. * @param potionOutput the output potion for the brewing recipe. * @return the {@link IRecipeWrapper} for this recipe. */ IRecipeWrapper createBrewingRecipe(List<ItemStack> ingredients, ItemStack potionInput, ItemStack potionOutput); }
import subprocess import logging log = logging.getLogger(__name__) ENVIRONMENT = {'rel-version': None} def get_release_version(client_id: str) -> str: try: # Execute a command to retrieve the release version for the given client_id command = f'get_release_version_command {client_id}' # Replace with the actual command release_version = subprocess.check_output(command, shell=True, text=True).strip() ENVIRONMENT['rel-version'] = release_version except subprocess.CalledProcessError as ex: log.debug('Could not get servinfo for client: %s', ex) ENVIRONMENT['rel-version'] = 'unknown-development-version' return ENVIRONMENT['rel-version']
// Show registration form 'use strict'; module.exports = function (N, apiPath) { N.validate(apiPath, {}); // Kick logged-in members // N.wire.before(apiPath, function register_guest_only(env) { return N.wire.emit('internal:users.redirect_not_guest', env); }); // Fill page meta // N.wire.on(apiPath, function fill_page_head(env) { env.res.head.title = env.t('title'); }); // Fill oauth providers list & active one (if used to authenticate) // N.wire.after(apiPath, function fill_head_and_breadcrumbs(env) { // If user logged in via oauth, prefill email and oauth status if (env.session.oauth?.info) { env.res.oauth_active = env.session.oauth.info.provider; env.res.email = env.session.oauth.info.email; } let providers = N.config.oauth || {}; env.res.oauth = {}; Object.keys(providers, function (name) { env.res.oauth[name] = providers[name].client; }); }); };
#!/usr/bin/env ruby # # Advent of Code day 2 part 1 # https://adventofcode.com/2019/day/2 require_relative 'lib/orbit_map' map = OrbitMap.new(File.new('6.input').readlines.map(&:chomp)) puts map.total_orbits
<gh_stars>1-10 package com.upplication.cordova.config; import com.upplication.cordova.*; import com.upplication.cordova.internal.AndroidProject; import com.upplication.cordova.internal.XCodeProject; import com.upplication.cordova.junit.Condition; import com.upplication.cordova.junit.CordovaCLIRule; import com.upplication.cordova.junit.OnlyMacOSX; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import java.io.File; import java.io.IOException; import java.nio.charset.Charset; import java.nio.file.*; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.UUID; import static java.lang.Thread.sleep; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.*; public class IconIT { @Rule public CordovaCLIRule cordovaCLIRule = new CordovaCLIRule(); @Rule public TemporaryFolder folder = new TemporaryFolder(); private CordovaProject cordova; @Before public void setUp() throws IOException { cordova = cordovaCLIRule.get().create(folder.newFolder("cordova-path")); } @Test public void set_icon_android() throws IOException { for (IconAndroid icon : IconAndroid.values()) { String srcIcon = "res/icon/android/" + icon.getName(); Files.copy(Paths.get("src/test/resources/" + srcIcon), cordova.getProject().toPath().resolve(srcIcon), StandardCopyOption.REPLACE_EXISTING); cordova.config().icon().add(Icon.create().src(srcIcon).density(icon.getDensity())); } // check config.xml is modified List<Icon> iconsConfig = cordova.config().icon().getAll(); for (int i = 0; i < IconAndroid.values().length; i++) { IconAndroid iconAndroid = IconAndroid.values()[i]; Icon iconConfig = iconsConfig.get(i); assertEquals("res/icon/android/" + iconAndroid.getName(), iconConfig.getSrc()); assertEquals(iconAndroid.getDensity(), iconConfig.getDensity()); } cordova.platform().add(Platform.Android); // check icons is copied in the correct location for (IconAndroid icon : IconAndroid.values()) { Path originalIcon = cordova.getProject().toPath() .resolve("res/icon/android") .resolve(icon.getName()); Path pathIconCopied = new AndroidProject(cordova).getIcon(icon.getDensity()); assertEquals("the icon: " + icon + " is not processed correctly", new String(Files.readAllBytes(originalIcon)), new String(Files.readAllBytes(pathIconCopied))); } } @Test @Condition(OnlyMacOSX.class) public void set_icon_iphone() throws IOException { // see: https://developer.apple.com/ios/human-interface-guidelines/icons-and-images/app-icon/ // see: https://developer.apple.com/library/content/qa/qa1686/_index.html for (IconIos icon : IconIos.values()) { String srcIcon = "res/icon/ios/" + icon.getValue(); Files.copy(Paths.get("src/test/resources/" + srcIcon), cordova.getProject().toPath().resolve(srcIcon), StandardCopyOption.REPLACE_EXISTING); cordova.config().icon().add(Icon.create().src(srcIcon).width(icon.getWidth()).height(icon.getHeight())); } // check config.xml is modified List<Icon> iconsConfig = cordova.config().icon().getAll(); for (int i = 0; i < IconIos.values().length; i++) { IconIos iconIos = IconIos.values()[i]; Icon iconConfig = iconsConfig.get(i); assertEquals("res/icon/ios/" + iconIos.getValue(), iconConfig.getSrc()); assertEquals(iconIos.getWidth(), iconConfig.getWidth().intValue()); assertEquals(iconIos.getHeight(), iconConfig.getHeight().intValue()); } cordova.platform().add(Platform.IOs); XCodeProject xCodeProject = new XCodeProject(cordova); // check icons is copied in the correct location for (IconIos icon : IconIos.values()) { Path originalIcon = cordova.getProject().toPath() .resolve("res/icon/ios") .resolve(icon.getValue()); Path pathIconCopied = xCodeProject.getIcon(icon.getValue()); assertEquals("the icon: " + icon + " is not processed correctly", new String(Files.readAllBytes(originalIcon)), new String(Files.readAllBytes(pathIconCopied))); } // check no other copied // but cordova create four icons more based in our images: // icon-20@2x.png, AppIcon29x29@2x.png, AppIcon29x29@3x.png, AppIcon40x40@2x.png Path iosIconPlatformDefaultCordova = xCodeProject.getIconsFolder(); File[] iconsPlatformIos = iosIconPlatformDefaultCordova.toFile().listFiles(); List<String> iconsPlatformIosName = getFileNames(iconsPlatformIos); assertEquals(IconIos.values().length + 4, iconsPlatformIosName.size()); List<String> iconsGenerated = IconIos.getNames(); iconsGenerated.add("icon-20@2x.png"); iconsGenerated.add("AppIcon29x29@2x.png"); iconsGenerated.add("AppIcon29x29@3x.png"); iconsGenerated.add("AppIcon40x40@2x.png"); assertTrue(iconsPlatformIosName.containsAll(iconsGenerated)); // TODO: check file with the content for ios (generated by xcode) Path contentJson = xCodeProject.getIconsFolder().resolve("Contents.json"); } // set all ok but the res wrong @Test @Condition(OnlyMacOSX.class) public void set_icon_wrong_resolution_in_configXml() throws IOException { String srcIcon = "res/icon/ios/" + IconIos.Icon40.getValue(); Files.copy(Paths.get("src/test/resources/" + srcIcon), cordova.getProject().toPath().resolve(srcIcon), StandardCopyOption.REPLACE_EXISTING); // wrong width and height (must be 40) cordova.config().icon().add(Icon.create().src(srcIcon).width(35).height(35)); cordova.platform().add(Platform.IOs); XCodeProject xCodeProject = new XCodeProject(cordova); Path originalIcon = cordova.getProject().toPath() .resolve("res/icon/ios") .resolve(IconIos.Icon40.getValue()); Path pathIconcopied = xCodeProject.getIcon(IconIos.Icon40.getValue()); // is not the same image! assertNotEquals("the icon: " + IconIos.Icon40 + " is not processed correctly", new String(Files.readAllBytes(originalIcon)), new String(Files.readAllBytes(pathIconcopied))); } @Test @Condition(OnlyMacOSX.class) public void set_icon_wrong_name_but_good_res_in_configXml() throws IOException { String srcIcon = "res/icon/ios/icon-asdadasd.png"; Files.copy(Paths.get("src/test/resources/res/icon/ios/" + IconIos.Icon40.getValue()), cordova.getProject().toPath().resolve(srcIcon)); // wrong width and height (must be 40) cordova.config().icon().add(Icon.create().src(srcIcon).width(IconIos.Icon40.getWidth()).height(IconIos.Icon40.getHeight())); cordova.platform().add(Platform.IOs); XCodeProject xCodeProject = new XCodeProject(cordova); Path originalIcon = cordova.getProject().toPath().resolve(srcIcon); // changed with the correct name! Path pathIconCopied = xCodeProject.getIcon(IconIos.Icon40.getValue()); assertEquals("the icon: " + IconIos.Icon40 + " is not processed correctly", new String(Files.readAllBytes(originalIcon)), new String(Files.readAllBytes(pathIconCopied))); } @Test @Condition(OnlyMacOSX.class) public void set_icon_iphone_two_icons_with_same_resolution_the_first_take_preference() throws IOException { // we add first an invented name with the res: 40x40 String srcIcon40invented = "res/icon/ios/icon-asdasdsad.png"; Path originalIconInvented = Files.copy(Paths.get("src/test/resources/res/icon/ios/icon-40-different.png"), cordova.getProject().toPath().resolve(srcIcon40invented)); cordova.config().icon().add(Icon.create().src(srcIcon40invented).width(40).height(40)); // then we add another with res: 40x40 and the correct name String srcIcon40 = "res/icon/ios/" + IconIos.Icon40.getValue(); Path originalIcon = Files.copy(Paths.get("src/test/resources/" + srcIcon40), cordova.getProject().toPath().resolve(srcIcon40)); cordova.config().icon().add(Icon.create().src(srcIcon40).width(IconIos.Icon40.getWidth()).height(IconIos.Icon40.getHeight())); cordova.platform().add(Platform.IOs); XCodeProject xCodeProject = new XCodeProject(cordova); // check image // the content of the first (name invented) is used Path pathIconCopied = xCodeProject.getIcon(IconIos.Icon40.getValue()); assertEquals("the icon: " + IconIos.Icon40 + " is not processed correctly", new String(Files.readAllBytes(originalIconInvented)), new String(Files.readAllBytes(pathIconCopied))); // the second image is ignored and not used Path iosIconPlatformDefaultCordova = xCodeProject.getIconsFolder(); File[] iconsPlatformIos = iosIconPlatformDefaultCordova.toFile().listFiles(); for (File icon : iconsPlatformIos) { assertNotEquals(new String(Files.readAllBytes(originalIcon)), new String(Files.readAllBytes(icon.toPath()))); } } @Test @Condition(OnlyMacOSX.class) public void set_icon_iphone_custom_location() throws IOException { // this is a predefined icon for apple and cordova icon-72.png // we are going to save the file in the root project folder String srcIcon72 = IconIos.Icon72.getValue(); // copy the file in the root project folder Path pathIcon72 = Files.copy(Paths.get("src/test/resources/res/icon/ios/" + IconIos.Icon72.getValue()), cordova.getProject().toPath().resolve(srcIcon72)); cordova.config().icon().add(Icon.create().src(srcIcon72).width(72).height(72)); cordova.platform().add(Platform.IOs); XCodeProject xCodeProject = new XCodeProject(cordova); // check images is in the correct folder once the platform is added // the image exists and is the image we add. Path pathIcon72copied = xCodeProject.getIcon(IconIos.Icon72.getValue()); assertTrue(Files.exists(pathIcon72copied)); assertEquals(new String(Files.readAllBytes(pathIcon72)), new String(Files.readAllBytes(pathIcon72copied))); } @Test @Condition(OnlyMacOSX.class) public void cordova_ios_by_default_add_some_icons() throws IOException { // this is a predefined icon for cordova. // but is not processed and is only example, some names doesnt match the real names... Path iosIconDefaultCordova = cordova.getProject().toPath().resolve("res/icon/ios/"); File[] icons = iosIconDefaultCordova.toFile().listFiles(); // same number of files and same names List<String> iconsName = getFileNames(icons); assertEquals(4, iconsName.size()); assertTrue(iconsName.containsAll(Arrays.asList("icon-57-2x.png", "icon-57.png", "icon-72-2x.png", "icon-72.png"))); // but is not the same than the one added when add ios platform // see: IconIosName too know the real files you must change cordova.platform().add(Platform.IOs); XCodeProject xCodeProject = new XCodeProject(cordova); Path iosIconPlatformDefaultCordova = xCodeProject.getIconsFolder(); File[] iconsPlatformIos = iosIconPlatformDefaultCordova.toFile().listFiles(); List<String> iconsPlatformIosName = getFileNames(iconsPlatformIos); assertEquals(IconIos.values().length, iconsPlatformIosName.size()); assertTrue(iconsPlatformIosName.containsAll(IconIos.getNames())); } @Test @Condition(OnlyMacOSX.class) public void change_cordova_default_icons_without_changing_configXml_doesnt_work() throws IOException { String srcIcon72 = "res/icon/ios/icon-72.png"; Path pathIcon72 = Files.copy(Paths.get("src/test/resources/" + srcIcon72), cordova.getProject().toPath().resolve(srcIcon72), StandardCopyOption.REPLACE_EXISTING); cordova.platform().add(Platform.IOs); XCodeProject xCodeProject = new XCodeProject(cordova); Path pathIcon72Copied = xCodeProject.getIcon("icon-72.png"); assertNotEquals(new String(Files.readAllBytes(pathIcon72)), new String(Files.readAllBytes(pathIcon72Copied))); } private List<String> getFileNames(File[] icons) { List<String> result = new ArrayList<>(); for (File file : icons) { if (!file.getName().equals("Contents.json")) { result.add(file.getName()); } } return result; } }
#!/bin/bash # # Copyright 2014 The Kubernetes Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # # Used https://github.com/kubernetes/kubernetes/blob/master/hack/lib/version.sh as a template # ----------------------------------------------------------------------------- # Version management helpers. These functions help to set, save and load the # following variables: # # KUBEVIRT_GIT_COMMIT - The git commit id corresponding to this # source code. # KUBEVIRT_GIT_TREE_STATE - "clean" indicates no changes since the git commit id # "dirty" indicates source code changes after the git commit id # "archive" indicates the tree was produced by 'git archive' # KUBEVIRT_GIT_VERSION - "vX.Y" used to indicate the last release version. # Grovels through git to set a set of env variables. function kubevirt::version::get_version_vars() { # If the kubernetes source was exported through git archive, then # we likely don't have a git tree, but these magic values may be filled in. if [[ '$Format:%%$' == "%" ]]; then KUBE_GIT_COMMIT='$Format:%H$' KUBE_GIT_TREE_STATE="archive" # When a 'git archive' is exported, the '$Format:%D$' below will look # something like 'HEAD -> release-1.8, tag: v1.8.3' where then 'tag: ' # can be extracted from it. if [[ '$Format:%D$' =~ tag:\ (v[^ ,]+) ]]; then KUBE_GIT_VERSION="${BASH_REMATCH[1]}" fi fi local git=(git --work-tree "${KUBEVIRT_DIR}") if [[ -n ${KUBEVIRT_GIT_COMMIT-} ]] || KUBEVIRT_GIT_COMMIT=$("${git[@]}" rev-parse "HEAD^{commit}" 2>/dev/null); then if [[ -z ${KUBEVIRT_GIT_TREE_STATE-} ]]; then # Check if the tree is dirty. default to dirty if git_status=$("${git[@]}" status --porcelain 2>/dev/null) && [[ -z ${git_status} ]]; then KUBEVIRT_GIT_TREE_STATE="clean" else KUBEVIRT_GIT_TREE_STATE="dirty" fi fi # Use git describe to find the version based on tags. if [[ -n ${KUBEVIRT_GIT_VERSION-} ]] || KUBEVIRT_GIT_VERSION=$("${git[@]}" describe --tags --abbrev=14 "${KUBEVIRT_GIT_COMMIT}^{commit}" 2>/dev/null); then # This translates the "git describe" to an actual semver.org # compatible semantic version that looks something like this: # v1.1.0-alpha.0.6+84c76d1142ea4d # # TODO: We continue calling this "git version" because so many # downstream consumers are expecting it there. DASHES_IN_VERSION=$(echo "${KUBEVIRT_GIT_VERSION}" | sed "s/[^-]//g") if [[ "${DASHES_IN_VERSION}" == "---" ]]; then # We have distance to subversion (v1.1.0-subversion-1-gCommitHash) KUBEVIRT_GIT_VERSION=$(echo "${KUBEVIRT_GIT_VERSION}" | sed "s/-\([0-9]\{1,\}\)-g\([0-9a-f]\{14\}\)$/.\1\+\2/") elif [[ "${DASHES_IN_VERSION}" == "--" ]]; then # We have distance to base tag (v1.1.0-1-gCommitHash) KUBEVIRT_GIT_VERSION=$(echo "${KUBEVIRT_GIT_VERSION}" | sed "s/-g\([0-9a-f]\{14\}\)$/+\1/") fi if [[ "${KUBEVIRT_GIT_TREE_STATE}" == "dirty" ]]; then # git describe --dirty only considers changes to existing files, but # that is problematic since new untracked .go files affect the build, # so use our idea of "dirty" from git status instead. KUBEVIRT_GIT_VERSION+="-dirty" fi # If KUBEVIRT_GIT_VERSION is not a valid Semantic Version, then refuse to build. if ! [[ "${KUBEVIRT_GIT_VERSION}" =~ ^v([0-9]+)\.([0-9]+)(\.[0-9]+)?(-[0-9A-Za-z.-]+)?(\+[0-9A-Za-z.-]+)?$ ]]; then echo "KUBEVIRT_GIT_VERSION should be a valid Semantic Version" echo "Please see more details here: https://semver.org" exit 1 fi fi fi } function kubevirt::version::ldflag() { local key=${1} local val=${2} echo "-X kubevirt.io/kubevirt/pkg/version.${key}=${val}" } # Prints the value that needs to be passed to the -ldflags parameter of go build function kubevirt::version::ldflags() { kubevirt::version::get_version_vars SOURCE_DATE_EPOCH=$(git show -s --format=format:%ct HEAD) local buildDate [[ -z ${SOURCE_DATE_EPOCH-} ]] || buildDate="--date=@${SOURCE_DATE_EPOCH}" local -a ldflags=($(kubevirt::version::ldflag "buildDate" "$(date ${buildDate} -u +'%Y-%m-%dT%H:%M:%SZ')")) if [[ -n ${KUBEVIRT_GIT_COMMIT-} ]]; then ldflags+=($(kubevirt::version::ldflag "gitCommit" "${KUBEVIRT_GIT_COMMIT}")) ldflags+=($(kubevirt::version::ldflag "gitTreeState" "${KUBEVIRT_GIT_TREE_STATE}")) fi if [[ -n ${KUBEVIRT_GIT_VERSION-} ]]; then ldflags+=($(kubevirt::version::ldflag "gitVersion" "${KUBEVIRT_GIT_VERSION}")) fi # The -ldflags parameter takes a single string, so join the output. echo "${ldflags[*]-}" }
from django.conf.urls import url from . import views urlpatterns = [ url(r'^cryptocurrencyhistorical/', views.CryptoCurrencyHistoricalAPI.as_view(), name="update_de"), url(r'^metalcurrency/', views.MetalCurrencyAPI.as_view(), name="update_de"), url(r'^stockprice/', views.StockPriceAPI.as_view(), name="stock_price"), ]
$namespace(1, '@', function (exports) { var utils = $require('utils'), internals = exports.__internals__ = exports.__internals__ || {}; internals.PlainObjectCloner = PlainObjectCloner; /** * Clonador de objetos * * @param {object} target - Object to clone */ function PlainObjectCloner(target) { this.target = target; this.cloning = []; this.validTypes = [ (typeof true), (typeof 0), (typeof ''), (typeof {}) ]; } PlainObjectCloner.prototype.isValidProp = function (prop) { return this.validTypes.indexOf(typeof prop) >= 0; } PlainObjectCloner.prototype.cloneArray = function (target) { if (!utils.isArray(target)) return; var arr = []; for (var p in target) { var prop = target[p]; if (!this.isValidProp(prop)) continue; if (this.cloning.indexOf(prop) >= 0) throw new Error('Circular reference detected!'); this.cloning.push(prop); if (utils.isArray(prop)) { arr.push(this.cloneArray(prop)); } else if (utils.isObject(prop)) { arr.push(this.cloneObject(prop)); } else { arr.push(prop); } var cloningIdx = this.cloning.indexOf(prop); this.cloning.splice(cloningIdx, 1); } return arr; } PlainObjectCloner.prototype.cloneObject = function () { var target = arguments[0] || this.target; if (utils.isArray(target)) { return this.cloneArray(target); } if (!utils.isObject(target)) return; var clone = {}; for (var p in target) { var prop = target[p]; if (!this.isValidProp(prop)) continue; if (this.cloning.indexOf(prop) >= 0) throw new Error('Circular reference detected!'); this.cloning.push(prop); if (utils.isArray(prop)) { clone[p] = this.cloneArray(prop); } else if (utils.isObject(prop)) { clone[p] = this.cloneObject(prop); } else { clone[p] = prop; } var cloningIdx = this.cloning.indexOf(prop); this.cloning.splice(cloningIdx, 1); } return clone; } })
#!/usr/bin/env bash # # Copyright (c) 2018-2019 The Zenacoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # # Check that all logs are terminated with '\n' # # Some logs are continued over multiple lines. They should be explicitly # commented with \* Continued *\ # # There are some instances of LogPrintf() in comments. Those can be # ignored export LC_ALL=C UNTERMINATED_LOGS=$(git grep --extended-regexp "LogPrintf?\(" -- "*.cpp" | \ grep -v '\\n"' | \ grep -v '\.\.\.' | \ grep -v "/\* Continued \*/" | \ grep -v "LogPrint()" | \ grep -v "LogPrintf()") if [[ ${UNTERMINATED_LOGS} != "" ]]; then # shellcheck disable=SC2028 echo "All calls to LogPrintf() and LogPrint() should be terminated with \\n" echo echo "${UNTERMINATED_LOGS}" exit 1 fi
#!/bin/bash # Check if the required argument is provided if [ -z "$1" ]; then echo "Usage: $0 <number_of_repos>" exit 1 fi # Function to fetch repository data from GitHub API fetch_github_data() { local page=1 local per_page=100 local repos=() while true; do local response=$(curl -s "https://api.github.com/search/repositories?q=stars:>1&sort=stars&order=desc&page=$page&per_page=$per_page") local new_repos=($(echo "$response" | jq -r '.items[] | .name + " " + (.stargazers_count | tostring)')) repos+=("${new_repos[@]}") if [ ${#new_repos[@]} -lt $per_page ]; then break fi ((page++)) done echo "${repos[@]}" } # Main function to process and display the top repositories main() { local num_repos=$1 local repo_data=$(fetch_github_data) if [ -z "$repo_data" ]; then echo "Failed to fetch repository data. Please try again later." exit 1 fi IFS=$'\n' sorted_repos=($(sort -k2,2nr <<<"${repo_data[*]}")) unset IFS echo "Top $num_repos GitHub Repositories by Stars:" for ((i=0; i<num_repos && i<${#sorted_repos[@]}; i++)); do echo "${sorted_repos[i]}" done } # Execute the main function with the provided argument main "$1"
<filename>simetra/artemis.py import optparse, glob import simetra as art import numpy as np import injection as inj import logging from astropy.time import Time def main(opts): # set up logging info logging.basicConfig(filename=opts.outfile.split(".")[0]+".log", level=logging.INFO) # define image section and assign pixel id sec, xid, yid = art.define_image_section(opts.section) npzname = opts.section.replace(",","_")+".npz" # convert images to pixel lightcurves if opts.ioconversion: imlist = art.file2list(opts.images) imdata, imtime, imnoise = art.timestack(imlist, sec) if not opts.beams: logging.info("** No primary beam files specified **") logging.info("** Analysis will proceed by assuming all primary beam values are 1 **") bmdata = np.ones(imdata.shape) else: bmlist = art.file2list(opts.beams) bmdata, notime, nonoise = art.timestack(bmlist, sec, isbeam=True) np.savez(npzname, imdata=imdata, imtime=imtime, imnoise=imnoise, bmdata=bmdata) else: lcarray = np.load(npzname) imdata = lcarray["imdata"] bmdata = lcarray["bmdata"] imnoise = lcarray["imnoise"] imtime = Time(lcarray["imtime"]) # weight the lightcurves imdata = imdata * bmdata / imnoise**2 ## ASSUME input images are NOT pbcor images bmdata = bmdata**2 / imnoise**2 # inject transients injnpz = None # default if opts.injection: injnpz = opts.injnpz logging.info("Inject transients from " + injnpz) inj_data, inj_amp, inj_params, inj_t0 = inj.inject_transients(imdata.shape, imtime, injnpz, plot=opts.plot) art.writeifits(opts.outfile.split(".")[0]+"_istore.fits", xid, yid, inj_amp, inj_params, inj_t0) imdata = imdata + bmdata*inj_data # calculate rho template_id = map(int, opts.which_template.split(",")) # for single template option rho, pnames = art.hunt_transients(imtime, imdata, bmdata, opts.template, opts.single, opts.dtshift, i=template_id[0], j=template_id[1], plot=opts.plot) # save output art.writefits(opts.outfile, xid, yid, rho, pnames, injnpz=injnpz) if __name__ == "__main__": template_choices = ["tophat", "power_law", "fred"] parser = optparse.OptionParser() parser.set_usage("Usage: python %prog [options]") parser.add_option("-i", "--images", default="", type="str", help="List of input sky images ('*.fits'). [default: %default]") parser.add_option("-b", "--beams", default="", type="str", help="List of input primary beam images ('*.fits'). [default: %default]") parser.add_option("-s", "--section", default="", type="str", help="Coordinate list to define the image section (x1,x2,y1,y2). [default: %default]") parser.add_option("-t", "--template", default="tophat", type="choice", choices=template_choices, help="Template name. Valid options: " + ",".join(template_choices) + " [default: %default]") parser.add_option("-o", "--outfile", default="killua.fits", type="str", help="Output filename. Existing files will be overwritten. [default: %default]") parser.add_option("-l", "--single", action="store_true", default=False, help="Iterate over one single template. [default: %default]") parser.add_option("-c", "--which_template", default="0,0", type="str", help="Single template choice: template_index, start_time_index. [default: %default]") parser.add_option("-j", "--injection", action="store_true", default=False, help="Run transient injection. [default: %default]") parser.add_option("-f", "--injnpz", default="injparam.npz", type="str", help="Transient injection npz file. [default: %default]") parser.add_option("-p", "--plot", action="store_true", default=False, help="Plot search or injection templates. This will slow down the code. [default: %default]") parser.add_option("-d", "--dtshift", default=120, type="float", help="Template duration shift in sec. [default: %default (1 snapshot)]") parser.add_option("-w", "--ioconversion", action="store_true", default=False, help="Run I/O fits2npz conversion. [default: %default]") (opts, args) = parser.parse_args() main(opts)
<reponame>igrigorik/agent<filename>examples/producer-consumer.rb project_lib_path = File.expand_path(File.join(File.dirname(__FILE__), "..", "lib")) $LOAD_PATH.unshift(project_lib_path) require 'agent' c = channel!(Integer) go!(c) do |c| i = 0 loop { c << i+= 1 } end p c.receive[0] # => 1 p c.receive[0] # => 2 c.close
from functools import partial from django.core.exceptions import ValidationError from cyder.base.tests import ModelTestMixin from cyder.core.ctnr.models import Ctnr from cyder.core.system.models import System from cyder.cydns.tests.utils import create_reverse_domain, create_zone, DNSTest from cyder.cydns.ip.utils import ip_to_reverse_name from cyder.cydns.domain.models import Domain from cyder.cydns.ptr.models import PTR from cyder.cydns.ip.models import Ip from cyder.cydhcp.interface.static_intr.models import StaticInterface from cyder.cydhcp.network.models import Network from cyder.cydhcp.range.models import Range from cyder.cydhcp.vrf.models import Vrf class PTRTests(DNSTest, ModelTestMixin): def setUp(self): super(PTRTests, self).setUp() Vrf.objects.create(name='test_vrf') self._128 = create_zone('128.in-addr.arpa') create_zone('8.ip6.arpa') self.c1 = Ctnr.objects.create(name='test_ctnr1') self.n = Network.objects.create( vrf=Vrf.objects.get(name='test_vrf'), ip_type='4', network_str='172.16.58.3/24') self.r = Range.objects.create( network=self.n, range_type='st', start_str='172.16.17.32', end_str='172.16.31.10') self.c1.ranges.add(self.r) for name in ('edu', 'oregonstate.edu', 'bar.oregonstate.edu', 'nothing', 'nothing.nothing', 'nothing.nothing.nothing'): d = Domain.objects.create(name=name) self.c1.domains.add(d) create_reverse_domain('172.16.58.3', ip_type='6') self.osu_block = "8620:105:F000:" self.create_network_range( network_str="8620:105::/32", start_str='8620:fc00:e968:6179::de52:7100', end_str='8620:fc00:e968:6179::de52:7100', ip_type='6') def create_network_range(self, network_str, start_str, end_str, range_type="st", ip_type='4', domain=None): if domain is None: domain = Domain.objects.get(name="oregonstate.edu") n = Network.objects.create( vrf=Vrf.objects.get(name='test_vrf'), ip_type=ip_type, network_str=network_str) r = Range.objects.create( network=n, range_type=range_type, start_str=start_str, end_str=end_str, domain=domain, ip_type=ip_type) self.c1.ranges.add(r) def create_ptr(self, **kwargs): kwargs.setdefault('ctnr', self.c1) return PTR.objects.create(**kwargs) @property def objs(self): """Create objects for test_create_delete.""" return ( self.create_ptr( ip_str='172.16.31.10', ip_type='4', fqdn='a.oregonstate.edu'), self.create_ptr( ip_str='172.16.31.10', ip_type='4', fqdn='bbbbbbbbbbbbbb.nothing.nothing'), self.create_ptr( ip_str='192.168.3.11', ip_type='4', fqdn='c-c-c-c-c-c.nothing'), self.create_ptr( ip_str='172.16.58.3', ip_type='4', fqdn='d1d.edu'), ) def test_no_domain(self): for fqdn in ('lol.foo', 'oregonstate.com', 'me.oregondfastate.edu'): self.assertRaises( ValidationError, self.create_ptr, ip_str='244.123.123.123', ip_type='4', fqdn=fqdn) def test_invalid_name(self): ptr_v4 = self.create_ptr( ip_str='192.168.3.11', ip_type='4', fqdn='foo.oregonstate.edu') ptr_v6 = self.create_ptr( ip_str=(self.osu_block + ':1'), ip_type='6', fqdn='foo.oregonstate.edu') bad_fqdns = ( '2134!@#$!@', 'asdflj..com', 'A' * 257, '.oregonstate.edu', '%.s#.com') for fqdn in bad_fqdns: self.assertRaises( ValidationError, self.create_ptr, ip_str='192.168.3.11', ip_type='4', fqdn=fqdn) self.assertRaises( ValidationError, self.do_generic_update, ptr_v4, fqdn=fqdn) self.assertRaises( ValidationError, self.create_ptr, ip_str=(self.osu_block + ':2'), ip_type='6', fqdn=fqdn) self.assertRaises( ValidationError, self.do_generic_update, ptr_v6, fqdn=fqdn) def test_invalid_ip(self): ptr_v4 = self.create_ptr( ip_str='192.168.3.11', ip_type='4', fqdn='foo.oregonstate.edu') bad_ipv4_ips = ( '123.123', 'asdfasdf', 32141243, '128.123.123.123.123', '....', '1234.', None, False, True) for ip_str in bad_ipv4_ips: self.assertRaises( ValidationError, self.create_ptr, fqdn='oregonstate.edu', ip_str=ip_str, ip_type='4') self.assertRaises( ValidationError, self.do_generic_update, ptr_v4, ip_str=ip_str) ptr_v6 = self.create_ptr( ip_str=(self.osu_block + ':1'), ip_type='6', fqdn='foo.oregonstate.edu') bad_ipv6_ips = ( '172.16.31.10.', '123:!23:!23:', ':::', None, True, False, lambda x: x, 'fdf8:f53e:61e4::18:9:1', 'fd00:a516:7c1b:17cd:6d81:2137:bd2a:2c5b', '8.9.9.1', '172.16.31.10') for ip_str in bad_ipv6_ips: self.assertRaises( ValidationError, self.create_ptr, ip_str=ip_str, fqdn='oregonstate.edu', ip_type='6') self.assertRaises( ValidationError, self.do_generic_update, ptr_v6, ip_str=ip_str) def test_no_reverse_domain(self): self.assertRaises( ValidationError, self.create_ptr, fqdn='oregonstate.edu', ip_str='8.9.9.1', ip_type='4') self.assertRaises( ValidationError, self.create_ptr, fqdn='oregonstate.edu', ip_str='172.16.31.10', ip_type='4') def do_generic_remove(self, ip_str, fqdn, ip_type): ptr = PTR.objects.create( ip_str=ip_str, fqdn=fqdn, ip_type=ip_type, ctnr=self.c1) ptr.delete() ip = Ip(ip_str=ip_str, ip_type=ip_type) ip.clean_ip() self.assertFalse(PTR.objects.filter( fqdn=fqdn, ip_upper=ip.ip_upper, ip_lower=ip.ip_lower).exists()) def test_remove_ipv4(self): self.create_network_range( network_str='172.16.31.10/16', start_str='192.168.127.12', end_str='192.168.3.11') self.do_generic_remove( ip_str='192.168.127.12', ip_type='4', fqdn='asdf34foo.bar.oregonstate.edu') self.do_generic_remove( ip_str='172.16.31.10', ip_type='4', fqdn='fo124kfasdfko.bar.oregonstate.edu') self.do_generic_remove( ip_str='192.168.127.12', ip_type='4', fqdn='or1fdsaflkegonstate.edu') self.do_generic_remove( ip_str='192.168.127.12', ip_type='4', fqdn='12.bar.oregonstate.edu') self.do_generic_remove( ip_str='192.168.127.12', ip_type='4', fqdn='fcwoo.bar.oregonstate.edu') self.do_generic_remove( ip_str='192.168.3.11', ip_type='4', fqdn='asffad124jfasf-oregonstate.edu') def test_remove_ipv6(self): self.do_generic_remove( ip_str=(self.osu_block + ":1"), ip_type='6', fqdn='asdf34foo.bar.oregonstate.edu') self.do_generic_remove( ip_str=(self.osu_block + ":2"), ip_type='6', fqdn='fo124kfasdfko.bar.oregonstate.edu') self.do_generic_remove( ip_str=(self.osu_block + ":8"), ip_type='6', fqdn='or1fdsaflkegonstate.edu') self.do_generic_remove( ip_str=(self.osu_block + ":8"), ip_type='6', fqdn='12.bar.oregonstate.edu') self.do_generic_remove( ip_str=(self.osu_block + ":20"), ip_type='6', fqdn='fcwoo.bar.oregonstate.edu') self.do_generic_remove( ip_str=(self.osu_block + ":ad"), ip_type='6', fqdn='asffad124jfasf-oregonstate.edu') def do_generic_update(self, ptr, fqdn=None, ip_str=None): if fqdn is not None: ptr.fqdn = fqdn if ip_str is not None: ptr.ip_str = ip_str ptr.save() db_ptr = PTR.objects.get( fqdn=ptr.fqdn, ip_upper=ptr.ip_upper, ip_lower=ptr.ip_lower) self.assertEqual(ptr.fqdn, db_ptr.fqdn) self.assertEqual(ptr.ip_str, db_ptr.ip_str) def test_update_ipv4(self): self.create_network_range( network_str='172.16.58.3/24', start_str='192.168.127.12', end_str='192.168.3.11') ptr = self.create_ptr( ip_str='192.168.127.12', ip_type='4', fqdn='oregonstate.edu') self.do_generic_update(ptr, fqdn='nothing.nothing.nothing') self.do_generic_update(ptr, fqdn='google.edu') self.do_generic_update(ptr, fqdn='bar.oregonstate.edu') def test_update_ipv6(self): ptr = self.create_ptr( ip_str=(self.osu_block + ':1'), ip_type='6', fqdn='oregonstate.edu') self.do_generic_update(ptr, fqdn="nothing.nothing.nothing") self.do_generic_update(ptr, fqdn="google.edu") self.do_generic_update(ptr, fqdn="bar.oregonstate.edu") def test_ctnr_range(self): """Test that a PTR is allowed only in its IP's range's containers""" c2 = Ctnr.objects.create(name='test_ctnr2') r = self.r self.c1.ranges.add(r) self.create_ptr( fqdn='www1.oregonstate.edu', ip_str='172.16.17.32', ip_type='4', ctnr=self.c1) with self.assertRaises(ValidationError): self.create_ptr( fqdn='www2.oregonstate.edu', ip_str='172.16.58.3', ip_type='4', ctnr=c2) def test_target_existence(self): """Test that a PTR's target is not required to exist""" self.create_ptr( ip_str='172.16.17.32', fqdn='nonexistent.oregonstate.edu', ip_type='4') def test_domain_ctnr(self): """Test that a PTR's container is independent of its domain's container """ self.c1.domains.add(Domain.objects.get(name='oregonstate.edu')) c2 = Ctnr.objects.create(name='test_ctnr2') c2.ranges.add(self.r) self.create_ptr( ip_str='172.16.17.32', fqdn='foo1.oregonstate.edu', ip_type='4', ctnr=self.c1) self.create_ptr( ip_str='172.16.58.3', fqdn='foo2.oregonstate.<EMAIL>', ip_type='4', ctnr=c2) def test_target_resembles_ip(self): """Test that a PTR's target cannot resemble an IP address""" for fqdn in ('10.234.30.253', '172.16.58.3', 'fe80::e1c9:1:228d:d8'): with self.assertRaises(ValidationError): self.create_ptr(ip_str='172.16.17.32', fqdn=fqdn, ip_type='4') def test_same_ip_as_static_intr(self): """Test that a PTR and a static inteface cannot share an IP (It doesn't matter whether the static interface is enabled.) """ def create_si(dns_enabled): s = System.objects.create(name='test_system', ctnr=self.c1) return StaticInterface.objects.create( mac='be:ef:fa:ce:12:34', label='foo1', domain=Domain.objects.get(name='oregonstate.edu'), ip_str='172.16.17.32', ip_type='4', system=s, dns_enabled=dns_enabled) create_si_enabled = partial(create_si, True) create_si_enabled.name = "StaticInterface with DNS enabled" create_si_disabled = partial(create_si, False) create_si_disabled.name = "StaticInterface with DNS disabled" def create_ptr(): return self.create_ptr( ip_str='172.16.17.32', ip_type='4', fqdn='foo2.oregonstate.edu') create_ptr.name = 'PTR' self.assertObjectsConflict((create_si_enabled, create_ptr)) self.assertObjectsConflict((create_si_disabled, create_ptr)) def test_same_ip(self): """Test that two PTRs cannot have the same IP""" self.create_ptr( ip_str='172.16.17.32', ip_type='4', fqdn='foo1.oregonstate.edu') with self.assertRaises(ValidationError): self.create_ptr( ip_str='172.16.17.32', ip_type='4', fqdn='foo2.oregonstate.edu') def test_ptr_in_dynamic_range(self): """Test that the IP cannot be in a dynamic range""" self.create_network_range( network_str='172.16.58.3/24', start_str='172.16.58.3', end_str='192.168.3.11', range_type='dy') with self.assertRaises(ValidationError): self.create_ptr( ip_str='172.16.58.3', ip_type='4', fqdn='foo.oregonstate.edu')
#! /usr/bin/env bash # # Copyright (c) 2019 Nat! - Mulle kybernetiK # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # Neither the name of Mulle kybernetiK nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # emit_BOOL_values() { cat <<EOF YES NO EOF } emit_BOOL_printer() { local variable="$1" local name="$2" local indent="$3" echo "${indent}printf( \"%s\\n\", ${variable} ? \"YES\" : \"NO\");" }
/* eslint-disable @typescript-eslint/no-var-requires */ import os from 'os'; import { command } from 'yargs' const packageJson = require('../../package.json'); export = command('info', 'info/version information for cli', () => { // nothing }, () => { console.log(`project details: `, { name: packageJson.name, description: packageJson.description, version: packageJson.version, license: packageJson.license, repository: packageJson.repository.url, host: JSON.stringify({ hostOS: os.type(), platform: os.platform(), release: os.release() }), userInfo: JSON.stringify(os.userInfo()) }); } )
package com.example.PromoLac.NotificationLogs; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.support.v4.content.LocalBroadcastManager; import android.telephony.SmsManager; import android.telephony.SmsMessage; import android.util.Log; import android.widget.Toast; public class IncomingSms extends BroadcastReceiver { // Get the object of SmsManager final SmsManager sms = SmsManager.getDefault(); public void onReceive(Context context, Intent intent) { // Retrieves a map of extended data from the intent. final Bundle bundle = intent.getExtras(); try { if (bundle != null) { final Object[] pdusObj = (Object[]) bundle.get("pdus"); for (int i = 0; i < pdusObj.length; i++) { SmsMessage currentMessage = SmsMessage.createFromPdu((byte[]) pdusObj[i]); String phoneNumber = currentMessage.getDisplayOriginatingAddress(); String senderNum = phoneNumber; String message = currentMessage.getDisplayMessageBody(); Log.i("SmsReceiver", "senderNum: " + senderNum + "; message: " + message); // Show Alert int duration = Toast.LENGTH_LONG; // Toast toast = Toast.makeText(context, // "senderNum: "+ senderNum + ", message: " + message, duration); // toast.show(); Intent msgrcv = new Intent("Msg"); msgrcv.putExtra("package", ""); msgrcv.putExtra("ticker", senderNum); msgrcv.putExtra("title", senderNum); msgrcv.putExtra("text", message); LocalBroadcastManager.getInstance(context).sendBroadcast(msgrcv); } // end for loop } // bundle is null } catch (Exception e) { Log.e("SmsReceiver", "Exception smsReceiver" +e); } } }
package cucumber.api.rhino; public class README { }
#!/bin/bash set -e MINIO_ROOT_USER=admin MINIO_ROOT_PASSWORD=password MINIO_VOLUMES=/mnt/data wget -nc https://dl.min.io/server/minio/release/linux-amd64/minio chmod +x minio sudo cp minio /usr/local/bin wget -nc https://dl.min.io/client/mc/release/linux-amd64/mc chmod +x mc sudo cp mc /usr/local/bin sudo mkdir -p $MINIO_VOLUMES sudo chmod -R 0777 $MINIO_VOLUMES # https://github.com/minio/minio-service sudo useradd -s /sbin/nologin -M -U minio-user sudo mkdir -p /etc/default cat <<EOT | sudo tee -a /etc/default/minio # Volume to be used for MinIO server. MINIO_VOLUMES="$MINIO_VOLUMES" # Use if you want to run MinIO on a custom port. MINIO_OPTS="--address 127.0.0.1:9000 --console-address 127.0.0.1:9001" # Root user for the server. MINIO_ROOT_USER=$MINIO_ROOT_USER # Root secret for the server. MINIO_ROOT_PASSWORD=$MINIO_ROOT_PASSWORD EOT wget -nc https://raw.githubusercontent.com/minio/minio-service/master/linux-systemd/minio.service sudo cp minio.service /etc/systemd/system/ sudo systemctl enable minio.service sudo systemctl start minio.service sudo systemctl status minio.service echo "Or start MinIO server" echo "" echo " MINIO_ROOT_USER=admin MINIO_ROOT_PASSWORD=password /usr/local/bin/minio server /mnt/data --console-address ':9001' &" sleep 5 mc alias set my1 http://127.0.0.1:9000/ admin password mc alias set my2 https://127.0.0.1/ admin password mc alias set my3 https://example.local/ admin password mc alias set my4 https://my.example.local/ admin password mc mb my4/test mc cp minio.service my4/test/minio.service mc ls my4/ mc ls my4/test/ mc --debug ls my4/test
#!/usr/bin/env bash # MIT License # # Copyright (c) 2019 Collin Pasternack # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # # CPasternack github.com/cpasternack # 09/02/2019 # Install smartmon tools in opensolaris/illumos # v0.1 # Instructions on setting up smf originally from: # https://nlfiedler.github.io/2009/02/22/setting-up-smartmontools-on-opensolaris.html # SMARTMONTOOLSDIR=/etc/smartmontools SMARTDXML=/var/svc/manifest/site/smartd.xml SMARTDLXML=smartd.xml SMARTMT=smartmontools SMARTDCONF=smartd.conf SMARTDINITD=smartd.init USERID=`id -u` # must run with enhanced permissions if [[ 0 -ne $USER ]] then echo "This script must be run with /usr/bin/pfexec or /usr/bin/pfbash" exit 1 fi # check that smartmontools directory in /etc exists if [ ! -d ${SMARTMONTOOLSDIR} ] then mkdir -p /etc/smartmontools fi # copy the file if it exists and is not empty if [ -e ${SMARTDLXML} ] && [ -s ${SMARTDLXML} ] then cp ./$SMARTDLXML $SMARTDXML cp ./smartd.conf /etc/smartmontools/ cp ./smartmontools /etc/default/ cp ./smartd.init /etc/rc3.d/S90-smartd cp ./smartd_warning.sh /etc/smartmontools/ fi # set the permissions if [ -e ${SMARTDXML} ] then chown root:sys /var/svc/manifest/site/smartd.xml chown root:sys /etc/rc3.d/S90-smartd chown root:sys /etc/smartmontools/smartd_warning.sh chmod a+x /etc/rc3.d/S90-smartd fi # Import service configuration file svccfg -v import /var/svc/manifest/site/smartd.xml # enable the service svcadm enable smartd #verify the service is running SMARTDSVCS=`svcs | grep smartd` SVCSERROR=`svcs -xv | wc -l` if [[ 0 -lt $SVCSERROR ]] then echo "Check service configuration file and restart manually" exit 2 fi if [[ $SMARTDSVCS == "online"* ]]; then echo "Smartd online and configured." echo "Complete. Exiting." exit 0 fi
def initialize_and_run_mapper(backbone: str, featureset: str, distance: str, onwhat: str, threshold: float, statistic: str, suffix: str, permfilter: bool, graph: str, network: str) -> str: # Initialize and run the mapper using the parsed input values # Example: Assuming the mapper is a hypothetical process, the following code demonstrates the initialization and execution mapper_result = f"Mapper initialized and run with parameters: backbone={backbone}, featureset={featureset}, distance={distance}, onwhat={onwhat}, threshold={threshold}, statistic={statistic}, suffix={suffix}, permfilter={permfilter}, graph={graph}, network={network}" return mapper_result
from flask import render_template, request, Blueprint, flash from flask_login import login_required from noteshare.models import Note from noteshare.main.forms import ContactUsForm from noteshare.main.utils import send_contact_email main = Blueprint('main', __name__) @main.route('/') @main.route('/index') def index(): return render_template('index.html') @main.route('/home', methods=['GET']) @login_required def home(): page = request.args.get('page', 1, type=int) filter = request.args.get('filter', None) if filter is None: notes = Note.query.order_by(Note.date_posted.desc()).paginate(page=page, per_page=5) elif filter == "latest": notes = Note.query.order_by(Note.date_posted.desc()).paginate(page=page, per_page=5) elif filter == "earlier": notes = Note.query.order_by(Note.date_posted.asc()).paginate(page=page, per_page=5) elif filter == "top": notes = Note.query.order_by(Note.ups.desc()).paginate(page=page, per_page=5) return render_template('home.html', title='Home', notes=notes) @main.route('/contact-us', methods=['GET', 'POST']) def contact_us(): form = ContactUsForm() if form.validate_on_submit(): send_contact_email(form.name.data, form.email.data, form.message.data) flash(f'Your message has been received. You\'ll get a reply from us very soon!', 'success') return render_template('contact_us.html', title='Contact Us', form=form) @main.route('/about-us') def about_us(): return render_template('about_us.html')
use std::ops::{Add, Sub, Mul}; #[derive(Debug, PartialEq)] struct Vec3 { x: f64, y: f64, z: f64, } impl Vec3 { fn new(x: f64, y: f64, z: f64) -> Vec3 { Vec3 { x, y, z } } fn from_slice(slice: &[f64]) -> Vec3 { assert_eq!(slice.len(), 3); Vec3::new(slice[0], slice[1], slice[2]) } fn add(&self, other: &Vec3) -> Vec3 { Vec3::new(self.x + other.x, self.y + other.y, self.z + other.z) } fn sub(&self, other: &Vec3) -> Vec3 { Vec3::new(self.x - other.x, self.y - other.y, self.z - other.z) } fn mul_scalar(&self, scalar: f64) -> Vec3 { Vec3::new(self.x * scalar, self.y * scalar, self.z * scalar) } fn dot(&self, other: &Vec3) -> f64 { self.x * other.x + self.y * other.y + self.z * other.z } fn cross(&self, other: &Vec3) -> Vec3 { Vec3::new( self.y * other.z - self.z * other.y, self.z * other.x - self.x * other.z, self.x * other.y - self.y * other.x, ) } fn normalize(&self) -> Vec3 { let mag = (self.x * self.x + self.y * self.y + self.z * self.z).sqrt(); Vec3::new(self.x / mag, self.y / mag, self.z / mag) } } fn main() { let v1 = Vec3::new(1.0, 2.0, 3.0); let v2 = Vec3::new(4.0, 5.0, 6.0); let v3 = v1.add(&v2); let v4 = v1.sub(&v2); let v5 = v1.mul_scalar(2.0); let dot_product = v1.dot(&v2); let cross_product = v1.cross(&v2); let normalized_v1 = v1.normalize(); println!("{:?}", v3); // Output: Vec3 { x: 5.0, y: 7.0, z: 9.0 } println!("{:?}", v4); // Output: Vec3 { x: -3.0, y: -3.0, z: -3.0 } println!("{:?}", v5); // Output: Vec3 { x: 2.0, y: 4.0, z: 6.0 } println!("{}", dot_product); // Output: 32.0 println!("{:?}", cross_product); // Output: Vec3 { x: -3.0, y: 6.0, z: -3.0 } println!("{:?}", normalized_v1); // Output: Vec3 { x: 0.2672612419124244, y: 0.5345224838248488, z: 0.8017837257372732 } }
public static boolean containsAllVowels(String str) { String vowels = "aeiou"; for (int i = 0; i < 5; i++) { if(!str.contains(vowels.charAt(i)+"")) { return false; } } return true; } System.out.println(containsAllVowels("Hello world!")); # will return false
<reponame>fossabot/erxes import React, { Component } from 'react'; import { Link } from 'react-router-dom'; import PropTypes from 'prop-types'; import { OverlayTrigger, Popover } from 'react-bootstrap'; import classnames from 'classnames'; import { ChromePicker } from 'react-color'; import { uploadHandler } from 'modules/common/utils'; import { Button, Icon, Tip } from 'modules/common/components'; import { ActionBar, Wrapper } from 'modules/layout/components'; import { MessengerPreview, Messenger } from 'modules/engage/styles'; import Sidebar from '../Sidebar'; import { WidgetPreview } from './'; import { SubHeading, Margined, WidgetApperance, WidgetSettings, WidgetBackgrounds, WidgetBox, ColorPick, BackgroundSelector, ColorPicker, LogoContainer } from '../../styles'; class Appearance extends Component { constructor(props) { super(props); this.state = { color: props.prevOptions.color || '#04A9F5', wallpaper: props.prevOptions.wallpaper || '1', logo: props.prevOptions.logo, logoPreviewStyle: {}, logoPreviewUrl: props.prevOptions.logo || '/images/logo-image.png' }; this.save = this.save.bind(this); this.onColorChange = this.onColorChange.bind(this); this.onWallpaperChange = this.onWallpaperChange.bind(this); this.handleLogoChange = this.handleLogoChange.bind(this); } onColorChange(e) { this.setState({ color: e.hex }); } onWallpaperChange(value) { this.setState({ wallpaper: value }); } handleLogoChange(e) { const imageFile = e.target.files[0]; uploadHandler({ file: imageFile, beforeUpload: () => { this.setState({ logoPreviewStyle: { opacity: '0.9' } }); }, afterUpload: ({ response }) => { this.setState({ logo: response, logoPreviewStyle: { opacity: '1' } }); }, afterRead: ({ result }) => { this.setState({ logoPreviewUrl: result }); } }); } save(e) { e.preventDefault(); this.props.save({ color: this.state.color, wallpaper: this.state.wallpaper, logo: this.state.logo }); } renderWallpaperSelect(value) { const isSelected = this.state.wallpaper === value; const selectorClass = classnames({ selected: isSelected }); return ( <BackgroundSelector className={selectorClass} onClick={() => this.onWallpaperChange(value)} style={{ borderColor: isSelected ? this.state.color : 'transparent' }} > <div className={`background-${value}`} /> </BackgroundSelector> ); } render() { const popoverTop = ( <Popover id="color-picker"> <ChromePicker color={this.state.color} onChange={this.onColorChange} /> </Popover> ); const { logoPreviewStyle, logoPreviewUrl } = this.state; const content = ( <Margined> <WidgetApperance className="type-box"> <WidgetSettings> <WidgetBox> <SubHeading>Choose a custom color</SubHeading> <OverlayTrigger trigger="click" rootClose placement="bottom" overlay={popoverTop} > <ColorPick> <ColorPicker style={{ backgroundColor: this.state.color }} /> </ColorPick> </OverlayTrigger> </WidgetBox> <WidgetBox> <SubHeading>Choose a wallpaper</SubHeading> <WidgetBackgrounds> {this.renderWallpaperSelect('1')} {this.renderWallpaperSelect('2')} {this.renderWallpaperSelect('3')} {this.renderWallpaperSelect('4')} {this.renderWallpaperSelect('5')} </WidgetBackgrounds> </WidgetBox> <WidgetBox> <SubHeading>Choose a logo</SubHeading> <input type="file" onChange={this.handleLogoChange} /> </WidgetBox> </WidgetSettings> <MessengerPreview> <Messenger> <WidgetPreview color={this.state.color} wallpaper={this.state.wallpaper} user={this.props.user} /> <Tip text="Choose a logo"> <LogoContainer style={Object.assign( { backgroundColor: this.state.color, backgroundImage: `url(${logoPreviewUrl})` }, logoPreviewStyle )} > <label> <Icon icon="ios-upload-outline icon" size={30} style={{ backgroundColor: this.state.color }} /> <input type="file" onChange={this.handleLogoChange} /> </label> </LogoContainer> </Tip> </Messenger> </MessengerPreview> </WidgetApperance> </Margined> ); const breadcrumb = [ { title: 'Settings', link: '/settings/integrations' }, { title: 'Integrations' } ]; const actionBar = ( <ActionBar right={ <Button.Group> <Link to="/settings/integrations"> <Button size="small" btnStyle="simple" icon="close"> Cancel </Button> </Link> <Button size="small" btnStyle="success" onClick={this.save} icon="checkmark" > Save </Button> </Button.Group> } /> ); return ( <Wrapper header={<Wrapper.Header breadcrumb={breadcrumb} />} leftSidebar={<Sidebar />} footer={actionBar} content={content} /> ); } } Appearance.propTypes = { prevOptions: PropTypes.object.isRequired, // eslint-disable-line user: PropTypes.object.isRequired, // eslint-disable-line save: PropTypes.func.isRequired }; export default Appearance;
<reponame>HISPSA/data-visualizer-app import { colors, theme } from '@dhis2/ui' // Layout export const LAYOUT_HEIGHT = '78px' // Axis export const AXIS_PADDING = '4px 4px 2px 6px' export const AXIS_LABEL_PADDING = '2px 0px 0px 4px' export const AXIS_BORDER_COLOR = colors.grey300 export const AXIS_BORDER_STYLE = 'solid' export const AXIS_BORDER_WIDTH = '0px 0px 1px 1px' export const AXIS_BACKGROUND_COLOR = colors.white // Chip export const CHIP_COLOR = colors.grey900 export const CHIP_FONT_WEIGHT = 400 export const CHIP_FONT_SIZE = '14px' export const CHIP_BACKGROUND_COLOR = theme.secondary200 export const CHIP_HEIGHT = '14px' export const CHIP_MARGIN = '4px' export const CHIP_PADDING = '1px 6px' export const CHIP_BORDER_RADIUS = '2px'
#!/bin/bash set -e set -u tmp="${TMPDIR:-/tmp}" name="dev_env" dir="${HOME}/Programming/${name}" output="${tmp}/${name}_setup" apt_args="--no-install-recommends --yes" cyan="$(tput setaf 6)" red="$(tput setaf 1)" yellow="$(tput setaf 3)" green="$(tput setaf 2)" function message { echo "${*}$(tput sgr0)" echo "${*:2}" &>>"${output}" } function step { message $cyan "==> ${1}" ${*:2} &>>"${output}" message $green "--> Done" } function step_no_redirect { message $cyan "==> ${1}" ${*:2} message $green "--> Done" } message $yellow "!!! Detailed command output is being written to ${output}" function system_step { sudo apt-get update ${apt_args} sudo apt-get upgrade ${apt_args} } step "Upgrading system" system_step function dep_step { sudo apt-get install ${apt_args} curl git build-essential } step "Installing dependencies" dep_step function clone_step { if [[ -d "${dir}" ]]; then rm -rf "${dir}" fi git clone "git@github.com:larzconwell/${name}.git" "${dir}" } step "Cloning ${name}" clone_step function link_step { "${dir}/scripts/link_files" "${dir}/dotfiles/unix" "${HOME}" "." "${dir}/scripts/link_files" "${dir}/dotfiles/linux" "${HOME}" "." local sshd_dir="/etc/ssh/sshd_config.d" sudo mkdir -p "${sshd_dir}" sudo "${dir}/scripts/link_files" "${dir}/sshd" "${sshd_dir}" } step "Linking dotfiles" link_step function fonts_step { local font_zip="${tmp}/font.zip" local tmp_font_dir="${tmp}/font" local font_dir="/usr/local/share/fonts/truetype/JetBrainsMono" mkdir -p "${tmp_font_dir}" sudo mkdir -p "${font_dir}" curl -sSfL "https://download.jetbrains.com/fonts/JetBrainsMono-2.242.zip" 1>"${font_zip}" 2>>"${output}" unzip -d "${tmp_font_dir}" "${font_zip}" sudo mv "${tmp_font_dir}/fonts/ttf/"* "${font_dir}" fc-cache -v rm -rf "${font_zip}" "${tmp_font_dir}" } step "Installing fonts" fonts_step function packages_step { curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu \ $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null sudo apt-get update sudo apt-get install ${apt_args} firefox kitty \ openssh-server vim-gtk p7zip xsel zsh clang-tidy \ clang-format shellcheck cmake valgrind clang nasm \ docker-ce docker-ce-cli containerd.io \ graphviz jq tree imagemagick docker_compose_bin="/usr/local/bin/docker-compose" sudo curl -L "https://github.com/docker/compose/releases/download/1.29.2/docker-compose-$(uname -s)-$(uname -m)" -o "${docker_compose_bin}" sudo chmod +x "${docker_compose_bin}" snaps=("go" "blender" "gimp") for pkg in ${snaps[@]}; do sudo snap install "${pkg}" done export PATH="${HOME}/.cargo/bin:${PATH}" curl -sSfL "https://sh.rustup.rs" | sh -s -- -y --no-modify-path cargo install -q just curl -sSfL "https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh" | sh -s -- -b "$(go env GOPATH)/bin" "v1.43.0" go_pkgs=( "github.com/nsf/gocode@latest" "github.com/google/gops@latest" "golang.org/x/tools/cmd/goimports@latest" "golang.org/x/tools/gopls@latest" ) for pkg in ${go_pkgs[@]}; do go install "${pkg}" done } step "Installing packages" packages_step function shell_step { chsh -s "$(which zsh)" sudo chsh -s "$(which zsh)" } step_no_redirect "Configuring default shell" shell_step function root_passwd_step { sudo passwd -q } step_no_redirect "Configuring root password" root_passwd_step function cleanup_step { sudo apt-get clean ${apt_args} sudo apt-get autoremove ${apt_args} } step "Cleaning up apt" cleanup_step message $green "==> Setup complete, a restart is advised" message $green "==> Additional manual steps should be taken:" message $green "--> Set the systems monospace font to JetBrains Mono NL" message $green "--> run vimupdate" message $green "--> run :GoInstallBinaries inside vim"
#!/bin/bash # Install a custom FFmpeg version - https://ffmpeg.org # # To run this script on Codeship, add the following # command to your project's setup commands: # \curl -sSL https://raw.githubusercontent.com/codeship/scripts/master/packages/ffmpeg.sh | bash -s # # Add the following environment variable to your project configuration # (otherwise the default below will be used). # * FFMPEG_VERSION # FFMPEG_VERSION=${FFMPEG_VERSION:="4.2"} FFMPEG_DIR=${FFMPEG_DIR:=$HOME/cache/ffmpeg-$FFMPEG_VERSION} set -e if [ ! -d "${FFMPEG_DIR}" ]; then CACHED_DOWNLOAD="${HOME}/cache/ffmpeg-${FFMPEG_VERSION}.tar.gz" mkdir -p "${HOME}/ffmpeg" wget --continue --output-document "${CACHED_DOWNLOAD}" "https://github.com/FFmpeg/FFmpeg/archive/n${FFMPEG_VERSION}.tar.gz" tar -xaf "${CACHED_DOWNLOAD}" --strip-components=1 --directory "${HOME}/ffmpeg" ( cd "${HOME}/ffmpeg" || exit 1 ./configure --prefix="${FFMPEG_DIR}" --disable-shared --enable-static --enable-gpl --enable-nonfree make make install ) fi ln -s "${FFMPEG_DIR}/bin/"* "${HOME}/bin" ffmpeg -version | grep "${FFMPEG_VERSION}"
#!/bin/sh if [ -n "$DESTDIR" ] ; then case $DESTDIR in /*) # ok ;; *) /bin/echo "DESTDIR argument must be absolute... " /bin/echo "otherwise python's distutils will bork things." exit 1 esac DESTDIR_ARG="--root=$DESTDIR" fi echo_and_run() { echo "+ $@" ; "$@" ; } echo_and_run cd "/xavier_ssd/TrekBot/TrekBot_WS/src/geometry2/tf2_kdl" # ensure that Python install destination exists echo_and_run mkdir -p "$DESTDIR/xavier_ssd/TrekBot/TrekBot_WS/install_isolated/lib/python2.7/dist-packages" # Note that PYTHONPATH is pulled from the environment to support installing # into one location when some dependencies were installed in another # location, #123. echo_and_run /usr/bin/env \ PYTHONPATH="/xavier_ssd/TrekBot/TrekBot_WS/install_isolated/lib/python2.7/dist-packages:/xavier_ssd/TrekBot/TrekBot_WS/build_isolated/tf2_kdl/lib/python2.7/dist-packages:$PYTHONPATH" \ CATKIN_BINARY_DIR="/xavier_ssd/TrekBot/TrekBot_WS/build_isolated/tf2_kdl" \ "/usr/bin/python2" \ "/xavier_ssd/TrekBot/TrekBot_WS/src/geometry2/tf2_kdl/setup.py" \ build --build-base "/xavier_ssd/TrekBot/TrekBot_WS/build_isolated/tf2_kdl" \ install \ $DESTDIR_ARG \ --install-layout=deb --prefix="/xavier_ssd/TrekBot/TrekBot_WS/install_isolated" --install-scripts="/xavier_ssd/TrekBot/TrekBot_WS/install_isolated/bin"
package interactive.text; import java.io.BufferedReader; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.util.HashMap; import java.util.Map; import data.Texture; public class Font { private Texture texture; private float lineHeight; private Map<Integer, Glyph> alphabet; private float size; public Font(String fontName, float size) { File font = new File("res/font/" + fontName + ".fnt"); alphabet = new HashMap<Integer, Glyph>(); this.size = size; try (BufferedReader reader = new BufferedReader(new FileReader(font))) { String line; int scaleW = 0; int scaleH = 0; float invScaleW = 0.0f; float invScaleH = 0.0f; while((line = reader.readLine()) != null) { if (line.startsWith("common ")) { String[] arguments = line.substring(7).split(" "); int pixLineHeight = 0; for (String arg : arguments) { String[] value = arg.split("="); if (value[0].contentEquals("scaleW")) { scaleW = Integer.parseInt(value[1]); invScaleW = 1.0f / scaleW; } else if (value[0].contentEquals("scaleH")) { scaleH = Integer.parseInt(value[1]); invScaleH = 1.0f / scaleH; } else if (value[0].contentEquals("lineHeight")) { pixLineHeight = Integer.parseInt(value[1]); } } lineHeight = pixLineHeight * invScaleH; } else if (line.startsWith("char ")) { String[] parameters = line.substring(5).split(" +"); int ascii = 0; int x = 0; int y = 0; int width = 0; int height = 0; int xOffset = 0; int yOffset = 0; int xAdvance = 0; for (String param : parameters) { String[] value = param.split("="); if (value[0].contentEquals("id")) { ascii = Integer.parseInt(value[1]); } else if (value[0].contentEquals("x")) { x = Integer.parseInt(value[1]); } else if (value[0].contentEquals("y")) { y = Integer.parseInt(value[1]); } else if (value[0].contentEquals("width")) { width = Integer.parseInt(value[1]); } else if (value[0].contentEquals("height")) { height = Integer.parseInt(value[1]); } else if (value[0].contentEquals("xoffset")) { xOffset = Integer.parseInt(value[1]); } else if (value[0].contentEquals("yoffset")) { yOffset = Integer.parseInt(value[1]); } else if (value[0].contentEquals("xadvance")) { xAdvance = Integer.parseInt(value[1]); } } Glyph g = new Glyph(ascii, x * invScaleW, y * invScaleH, width * invScaleW, height * invScaleH, xOffset * invScaleW, yOffset * invScaleH, xAdvance * invScaleW); alphabet.put(ascii, g); } } } catch (FileNotFoundException e) { System.err.println("Error finding font file for font " + fontName); e.printStackTrace(); } catch (IOException e) { System.err.println("Error reading font file for font " + fontName); e.printStackTrace(); } texture = new Texture("font/" + fontName + ".png", 1, 1); } public Texture getTexture() { return texture; } public float getLineHeight() { return lineHeight; } public Glyph getGlyph(char c) { return alphabet.get((int)c); } public void free() { texture.free(); } public float getSize() { return size; } }
#!/bin/bash -f #********************************************************************************************************* # Vivado (TM) v2020.1 (64-bit) # # Filename : clk_wiz_0.sh # Simulator : Aldec Riviera-PRO Simulator # Description : Simulation script for compiling, elaborating and verifying the project source files. # The script will automatically create the design libraries sub-directories in the run # directory, add the library logical mappings in the simulator setup file, create default # 'do/prj' file, execute compilation, elaboration and simulation steps. # # Generated by Vivado on Sat May 15 20:18:01 -0400 2021 # SW Build 2902540 on Wed May 27 19:54:49 MDT 2020 # # Copyright 1986-2020 Xilinx, Inc. All Rights Reserved. # # usage: clk_wiz_0.sh [-help] # usage: clk_wiz_0.sh [-lib_map_path] # usage: clk_wiz_0.sh [-noclean_files] # usage: clk_wiz_0.sh [-reset_run] # # Prerequisite:- To compile and run simulation, you must compile the Xilinx simulation libraries using the # 'compile_simlib' TCL command. For more information about this command, run 'compile_simlib -help' in the # Vivado Tcl Shell. Once the libraries have been compiled successfully, specify the -lib_map_path switch # that points to these libraries and rerun export_simulation. For more information about this switch please # type 'export_simulation -help' in the Tcl shell. # # You can also point to the simulation libraries by either replacing the <SPECIFY_COMPILED_LIB_PATH> in this # script with the compiled library directory path or specify this path with the '-lib_map_path' switch when # executing this script. Please type 'clk_wiz_0.sh -help' for more information. # # Additional references - 'Xilinx Vivado Design Suite User Guide:Logic simulation (UG900)' # #********************************************************************************************************* # Script info echo -e "clk_wiz_0.sh - Script generated by export_simulation (Vivado v2020.1 (64-bit)-id)\n" # Main steps run() { check_args $# $1 setup $1 $2 compile simulate } # RUN_STEP: <compile> compile() { # Compile design files source compile.do 2>&1 | tee -a compile.log } # RUN_STEP: <simulate> simulate() { runvsimsa -l simulate.log -do "do {simulate.do}" } # STEP: setup setup() { case $1 in "-lib_map_path" ) if [[ ($2 == "") ]]; then echo -e "ERROR: Simulation library directory path not specified (type \"./clk_wiz_0.sh -help\" for more information)\n" exit 1 fi map_setup_file $2 ;; "-reset_run" ) reset_run echo -e "INFO: Simulation run files deleted.\n" exit 0 ;; "-noclean_files" ) # do not remove previous data ;; * ) map_setup_file $2 esac # Add any setup/initialization commands here:- # <user specific commands> } # Map library.cfg file map_setup_file() { file="library.cfg" if [[ ($1 != "") ]]; then lib_map_path="$1" else lib_map_path="C:/Users/Aleksa/Documents/FPGA_Dev/Artix7_PCIe/DDR3_Optimization/dso_top_23256/dso_top_23256.cache/compile_simlib/riviera" fi if [[ ($lib_map_path != "") ]]; then src_file="$lib_map_path/$file" if [[ -e $src_file ]]; then vmap -link $lib_map_path fi fi } # Delete generated data from the previous run reset_run() { files_to_remove=(compile.log elaboration.log simulate.log dataset.asdb work riviera) for (( i=0; i<${#files_to_remove[*]}; i++ )); do file="${files_to_remove[i]}" if [[ -e $file ]]; then rm -rf $file fi done } # Check command line arguments check_args() { if [[ ($1 == 1 ) && ($2 != "-lib_map_path" && $2 != "-noclean_files" && $2 != "-reset_run" && $2 != "-help" && $2 != "-h") ]]; then echo -e "ERROR: Unknown option specified '$2' (type \"./clk_wiz_0.sh -help\" for more information)\n" exit 1 fi if [[ ($2 == "-help" || $2 == "-h") ]]; then usage fi } # Script usage usage() { msg="Usage: clk_wiz_0.sh [-help]\n\ Usage: clk_wiz_0.sh [-lib_map_path]\n\ Usage: clk_wiz_0.sh [-reset_run]\n\ Usage: clk_wiz_0.sh [-noclean_files]\n\n\ [-help] -- Print help information for this script\n\n\ [-lib_map_path <path>] -- Compiled simulation library directory path. The simulation library is compiled\n\ using the compile_simlib tcl command. Please see 'compile_simlib -help' for more information.\n\n\ [-reset_run] -- Recreate simulator setup files and library mappings for a clean run. The generated files\n\ from the previous run will be removed. If you don't want to remove the simulator generated files, use the\n\ -noclean_files switch.\n\n\ [-noclean_files] -- Reset previous run, but do not remove simulator generated files from the previous run.\n\n" echo -e $msg exit 1 } # Launch script run $1 $2
#!/usr/bin/env bash # run with bash tests/test_pipeline -c /path/to/config_file # import and check config while getopts "c:e" opt; do case $opt in c) CONFIG=$OPTARG if [ ! -f "$CONFIG" -a "$CONFIG" != "test" ];then echo "Config file "$CONFIG" does not exist." >&2 exit 1 elif [ $CONFIG = "test" ]; then echo "test mode" else echo "Using config file $CONFIG." >&2 source "$CONFIG" fi ;; e) set -e ;; \?) echo "Invalid option: -$OPTARG" >&2 exit 1 ;; esac done if [ -z "$CONFIG" ]; then echo "You must provide a config file." exit 1 fi setUp() { set +e mkdir -p "$PRD"/test_"$NAME_TEST" cp -rn "$PRD"/data "$PRD"/test_"$NAME_TEST"/data if [ "$HCP" = "yes" ]; then cp "$PRD"/100307_3T_Diffusion_preproc.zip \ "$PRD"/100307_3T_Structural_preproc.zip \ "$PRD"/100307_3T_Structural_preproc_extended.zip "$PRD"/test_"$NAME_TEST" fi PRD="$PRD"/test_"$NAME_TEST" } Teardown() { if [ "$CLEAN_AFTER_TEST" = 1 ]; then rm -r "$PRD" fi } test_function() { bash ./main_surface.sh -c "test" -e -q -f > /dev/null && out="success" || out="fail" printf "\n >>> Test "$NAME_TEST" output is : "$out" <<< \n" } # Variables to change from config file: export TOPUP="no" # to accelerate things a little, eddy takes several hours export NB_THREADS=2 export CLEAN_AFTER_TEST=0 #( export NAME_TEST="fsl_5"; export FSL="fsl5.0-"; setUp; test_function ) & #( export NAME_TEST="registration_boundary"; export REGISTRATION="boundary"; setUp; test_function; Teardown ) & #( export NAME_TEST="registration_pseudo"; export REGISTRATION="pseudo"; setUp; test_function; Teardown ) & #( export NAME_TEST="region_mapping_corr"; export REGION_MAPPING_COOR="0.5"; setUp; test_function; Teardown ) & #( export NAME_TEST="k_list"; export K_LIST="0 2 5"; setUp; test_function; Teardown ) & #( export NAME_TEST="no_k_list"; export K_LIST=""; setUp; test_function; Teardown ) & #( export NAME_TEST="number_tracks"; export NUMBER_TRACKS=10000; setUp; test_function; Teardown ) & #( export NAME_TEST="parcel_destrieux"; PARCEL="destrieux"; setUp; test_function; Teardown ) & #( export NAME_TEST="parcel_HCP"; export PARCEL="HCP-MMP"; setUp; test_function; Teardown ) & #( export NAME_TEST="parcel_Yeo7"; export PARCEL="Yeo-7nets"; setUp; test_function; Teardown) & #( export NAME_TEST="parcel_Yeo17"; export PARCEL="Yeo-17nets"; setUp; test_function; Teardown ) & #( export NAME_TEST="no_topup"; export TOPUP="no"; setUp; test_function; Teardown ) & #( export NAME_TEST="no_act"; export ACT="no"; setUp; test_function; Teardown ) & #( export NAME_TEST="no_sift"; export SIFT="no"; setUp; test_function; Teardown ) & #( export NAME_TEST="sift"; export SIFT="sift"; setUp; test_function; Teardown ) & #( export NAME_TEST="sift_multiplier"; export SIFT="sift"; export SIFT_MULTIPLIER=2; setUp; test_function; Teardown ) & #( export NAME_TEST="seed_dynamic"; export SEED="dynamic"; setUp; test_function; Teardown ) & #( export NAME_TEST="aseg_fs"; export ASEG="fs"; setUp; test_function; Teardown ) & ( export NAME_TEST="5ttgen_fs"; export FTTGEN="fs"; setUp; test_function; Teardown ) & #( export NAME_TEST="no_mne"; export MNE="no"; setUp; test_function; Teardown ) & #( export NAME_TEST="nb_threads_2"; export NB_THREADS="3"; setUp; test_function; Teardown ) & exit
#!/bin/bash # Requires a running instance of https://github.com/kb-dk/ds-storage/ # with default test setup : ${RECORD_FILES:="$@"} : ${RECORD_FILES:="albert-einstein.xml hvidovre-teater.xml simonsen-brandes.xml tystrup-soroe.xml homiliae-super-psalmos.xml work_on_logic.xml joergen_hansens_visebog.xml responsa.xml"} : ${STORAGE:="http://localhost:9072/ds-storage/v1"} : ${ENDPOINT:="$STORAGE/record"} post_record() { local RECORD_FILE="$1" ID="doms.radio:$RECORD_FILE" T=$(mktemp) echo '{"id":"'$ID'", "base":"doms.radio", "data":'$(jq -R -s '.' < $RECORD_FILE)'}' > $T STATUSCODE=$(curl -s --output /dev/stderr --write-out "%{http_code}" -X POST "$ENDPOINT" -H "accept: */*" -H "Content-Type: application/json" -d @${T}) if [ ! "$STATUSCODE" -eq 204 ]; then >&2 echo "Error: Unable to post content to ds-storage" >&2 echo "Got HTTP code $STATUSCODE for POST to $ENDPOINT" >&2 echo "Check if storage is running at ${STORAGE}" exit 2 fi #curl -s -X POST "$STORAGE/record/createOrUpdateRecord" -H "accept: */*" -H "Content-Type: application/json" -d @${T} rm "$T" echo "Indexed ${ID}. Access at ${STORAGE}/record/$ID" } for RECORD_FILE in $RECORD_FILES; do post_record "$RECORD_FILE" done echo "All done."
package app.model; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component; @Component public class AnimalsCage { @Autowired @Qualifier("dog") private Animal animal; @Autowired @Qualifier("timer") private Timer timer; public Timer getTimer() { return timer; } public void whatAnimalSay() { System.out.println("Say:"); System.out.println(animal.toString()); System.out.println("At:"); System.out.println(timer.getTime()); System.out.println("________________________"); } }
// Function to reverse a given string using recursion function reverseString(str) { if (str === "") // This is the terminal case that will end the recursion return ""; else return reverseString(str.substr(1)) + str.charAt(0); } // Driver code let str = "Hello World!"; console.log(reverseString(str)) // !dlroW olleH
<gh_stars>0 import React, { useContext } from 'react'; import { Table, TableHeader, TableCell, TableFooter, TableContainer, Select, Input, Button, Card, CardBody, Pagination, } from '@windmill/react-ui'; import { FiPlus } from 'react-icons/fi'; import { CSVReader, CSVDownloader } from 'react-papaparse'; import useAsync from '../hooks/useAsync'; import useFilter from '../hooks/useFilter'; import productData from '../utils/products'; import NotFound from '../components/table/NotFound'; import Loading from '../components/preloader/Loading'; import ProductServices from '../services/ProductServices'; import PageTitle from '../components/Typography/PageTitle'; import { SidebarContext } from '../context/SidebarContext'; import ProductTable from '../components/product/ProductTable'; import SelectCategory from '../components/form/SelectCategory'; import MainDrawer from '../components/drawer/MainDrawer'; import ProductDrawer from '../components/drawer/ProductDrawer'; const Products = () => { const { toggleDrawer } = useContext(SidebarContext); const { data, loading } = useAsync(ProductServices.getAllProducts); const { searchRef, setFilter, setSortedField, handleChangePage, totalResults, resultsPerPage, dataTable, serviceData, handleSubmitForAll, handleOnDrop, handleUploadProducts, } = useFilter(data); return ( <> <PageTitle>Products</PageTitle> <MainDrawer> <ProductDrawer /> </MainDrawer> <Card className="min-w-0 shadow-xs overflow-hidden bg-white dark:bg-gray-800 mb-5"> <CardBody> <form onSubmit={handleSubmitForAll} className="py-3 grid gap-4 lg:gap-6 xl:gap-6 md:flex xl:flex" > <div className="flex-grow-0 md:flex-grow lg:flex-grow xl:flex-grow"> <Input ref={searchRef} className="border h-12 text-sm focus:outline-none block w-full bg-gray-100 border-transparent focus:bg-white" type="search" name="search" placeholder="Search by product name" /> <button type="submit" className="absolute right-0 top-0 mt-5 mr-1" ></button> </div> <div className="flex-grow-0 md:flex-grow lg:flex-grow xl:flex-grow"> <SelectCategory setFilter={setFilter} /> </div> <div className="flex-grow-0 md:flex-grow lg:flex-grow xl:flex-grow"> <Select onChange={(e) => setSortedField(e.target.value)} className="border h-12 text-sm focus:outline-none block w-full bg-gray-100 border-transparent focus:bg-white" > <option value="All" defaultValue hidden> Price </option> <option value="Low">Low to High</option> <option value="High">High to Low</option> </Select> </div> <div className="w-full md:w-56 lg:w-56 xl:w-56"> <Button onClick={toggleDrawer} className="w-full rounded-md h-12"> <span className="mr-3"> <FiPlus /> </span> Add Product </Button> </div> </form> </CardBody> </Card> <Card className="min-w-0 shadow-xs overflow-hidden bg-white dark:bg-gray-800 rounded-t-lg rounded-0"> <CardBody> <div className="grid gap-4 md:grid-cols-3 xl:grid-cols-3"> <div className="col-span-2"> <CSVReader onDrop={handleOnDrop} addRemoveButton config={{ header: true, }} style={{ dropArea: { borderColor: 'green', borderRadius: 6, borderWidth: 1, height: '3em', padding: '0 0.2em', }, dropAreaActive: { borderColor: 'green', }, dropFile: { width: '100%', display: 'block', height: 'auto', background: 'none', borderRadius: 6, padding: '0.2em 0.2em', }, fileSizeInfo: { color: '#fff', backgroundColor: '#000', borderRadius: 0, lineHeight: 1, fontSize: 12, marginBottom: '0.5em', padding: '0.3em 0.2em', }, fileNameInfo: { color: '#757575', backgroundColor: 'transparent', borderRadius: 1, fontSize: 14, lineHeight: 1, padding: '0 0.4em', }, removeButton: { color: 'red', }, progressBar: { backgroundColor: 'green', }, }} > <span className="text-sm text-gray-500">Drop CSV file</span> </CSVReader> </div> <div className="flex items-center"> <Button onClick={handleUploadProducts} layout="outline"> Upload </Button> <div className="w-full"> <CSVDownloader data={productData} filename={'products'}> <Button className="w-full h-12">Download</Button> </CSVDownloader> </div> </div> </div> </CardBody> </Card> {loading ? ( <Loading loading={loading} /> ) : serviceData.length !== 0 ? ( <TableContainer className="mb-8 rounded-b-lg"> <Table> <TableHeader> <tr> <TableCell>SKU</TableCell> <TableCell>Product name</TableCell> <TableCell>Category</TableCell> <TableCell>Price</TableCell> <TableCell>Stock</TableCell> <TableCell>Status</TableCell> <TableCell>Discount</TableCell> <TableCell>Details</TableCell> <TableCell className="text-center">Published</TableCell> <TableCell className="text-right">Actions</TableCell> </tr> </TableHeader> <ProductTable products={dataTable} /> </Table> <TableFooter> <Pagination totalResults={totalResults} resultsPerPage={resultsPerPage} onChange={handleChangePage} label="Product Page Navigation" /> </TableFooter> </TableContainer> ) : ( <NotFound title="Product" /> )} </> ); }; export default Products;
#!/bin/bash TASK=8 SHOT=1 LANG=tr MODEL=ctrl_xuniter MODEL_CONFIG=ctrl_xuniter_base TASKS_CONFIG=iglue_test_tasks_boxes36.dtu TRTASK=RetrievalxFlickrCO${LANG}_${SHOT} TETASK=RetrievalxFlickrCO${LANG} TEXT_PATH=/home/projects/ku_00062/data/xFlickrCO/annotations/${LANG}/test.jsonl FEAT_PATH=/home/projects/ku_00062/data/xFlickrCO/features/xflickrco-test_boxes36.lmdb here=$(pwd) source /home/projects/ku_00062/envs/iglue/bin/activate cd ../../../../../../volta best=-1 best_lr=-1 for lr in 1e-4 5e-5 1e-5; do f=${here}/train.${lr}.log s=`tail -n1 $f | cut -d ' ' -f 4` d=$(echo "$s>$best" | bc) if [[ $d -eq 1 ]]; then best=$s best_lr=$lr fi done echo "Best lr: " $best_lr PRETRAINED=/home/projects/ku_00062/checkpoints/iglue/few_shot/xflickrco/${TRTASK}/${MODEL}/${best_lr}/RetrievalFlickr30k_${MODEL_CONFIG}/pytorch_model_best.bin OUTPUT_DIR=/home/projects/ku_00062/results/iglue/few_shot/xflickrco/${MODEL}/${best_lr}/${TRTASK}_${MODEL_CONFIG}/$TETASK/test python eval_retrieval.py \ --bert_model /home/projects/ku_00062/huggingface/xlm-roberta-base --config_file config/${MODEL_CONFIG}.json \ --from_pretrained ${PRETRAINED} \ --tasks_config_file config_tasks/${TASKS_CONFIG}.yml --task $TASK \ --split test_${LANG} --batch_size 1 \ --caps_per_image 1 --val_annotations_jsonpath ${TEXT_PATH} --val_features_lmdbpath ${FEAT_PATH} \ --output_dir ${OUTPUT_DIR} \ deactivate
words = ["cat", "dog", "bird", "cat", "dog", "ant"] # Create an empty dictionary to store the counts wordCounts = {} # Iterate over the words list for word in words: # Check if the word is already in the dictionary if word in wordCounts: # Increment the count for the word wordCounts[word] += 1 else: # Create a new entry with count 1 wordCounts[word] = 1 # Print out the dictionary print(wordCounts)
<filename>client/uni-app/mix-mall/pages/coupon/js/list.js import {getUserList} from '@/api/coupon' import uniLoadMore from '@/components/uni-load-more/uni-load-more.vue'; import empty from "@/components/empty"; import coolcCoupon from '../components/coolc-coupon.vue'; export default { components: { uniLoadMore, empty, coolcCoupon }, data() { return { tabCurrentIndex: 0, navList: [{ state: 0, text: '全部', loadingType: 'more', orderList: [] }, { state: 1, text: '未使用', loadingType: 'more', orderList: [] }, { state: 2, text: '已使用', loadingType: 'more', orderList: [] }, { state: 3, text: '已失效', loadingType: 'more', orderList: [] } ], page:1, }; }, onLoad(options){ this.loadData(); }, methods: { //列表 async loadData(source,search){ let index = this.tabCurrentIndex; let navItem = this.navList[index]; let state = navItem.state; if(source === 'tabChange' && navItem.loaded === true){ return; } if(navItem.loadingType === 'loading'){ //防止重复加载 return; } if(navItem.loadingType === 'noMore'){ //无更多数据时跳出 return; } navItem.loadingType = 'loading'; let userCouponList = [] let that =this await getUserList({ limit: 8, page: this.page, index: index },function(res){ userCouponList = res.data if (res.last_page > that.page){ that.page ++ //判断是否还有数据, 有改为 more, 没有改为noMore that.$set(navItem, 'loadingType', 'more'); } else { that.$set(navItem, 'loadingType', 'noMore'); } userCouponList.forEach(item=>{ let data = { id: item.coupon.id, money: item.coupon.cost/100, title: item.coupon.explain, type: item.coupon.type, url: '/pages/index/index', end_time: item.failure_time ? item.failure_time.split(' ')[0].replace(/-/g,".") : item.coupon.endtime.split(' ')[0].replace(/-/g,"."), } if(item.state === 1){ data.state = '2' } else if(item.state === 2){ data.state = '3' }else{ data.state = '1' } navItem.orderList.push(data); }) //loaded新字段用于表示数据加载完毕,如果为空可以显示空白页 that.$set(navItem, 'loaded', true); }) }, //swiper 切换 changeTab(e){ this.tabCurrentIndex = e.target.current; this.loadData('tabChange'); }, //顶部tab点击 tabClick(index){ this.tabCurrentIndex = index this.page = 1 } } }
<reponame>tomviner/maven """ Model-ready dataset for the United Kingdom's 2015 General Election. Usage: > import maven > maven.get('general-election/UK/2015/model', data_directory='./data/') """ import os from pathlib import Path import shutil import pandas as pd import maven class UK2015Model: """Generates model-ready data for the United Kingdom's 2015 General Election.""" def __init__(self, directory=Path('data/general-election/UK/2015/model')): self.directory = Path(directory) def retrieve(self): """Will check to see if this already exists in directory tree, otherwise puts the datasets there by executing the necessary code from within this repo.""" destination_target = self.directory / 'raw' os.makedirs(destination_target, exist_ok=True) # create directory if it doesn't exist data_directory = (self.directory / '..' / '..' / '..' / '..').resolve() # sensible guess data = [ # (identifier, type, filename) ('general-election/UK/2010/results', 'processed', 'general_election-uk-2010-results.csv'), ('general-election/UK/2010/results', 'processed', 'general_election-uk-2010-results-full.csv'), ('general-election/UK/2015/results', 'processed', 'general_election-uk-2015-results.csv'), ('general-election/UK/2015/results', 'processed', 'general_election-uk-2015-results-full.csv'), ('general-election/UK/polls', 'processed', 'general_election-uk-polls.csv'), ('general-election/UK/polls', 'processed', 'general_election-london-polls.csv'), ('general-election/UK/polls', 'processed', 'general_election-scotland-polls.csv'), ('general-election/UK/polls', 'processed', 'general_election-wales-polls.csv'), ('general-election/UK/polls', 'processed', 'general_election-ni-polls.csv'), ] for identifier, data_type, filename in data: source_target = f"{identifier}/{data_type}/{filename}" if not (data_directory / source_target).is_file(): print(f'Dataset {identifier} not found - retrieving now') maven.get(identifier, data_directory=data_directory) shutil.copyfile( src=data_directory / source_target, dst=destination_target / filename ) def process(self): """Process results data from the United Kingdom's 2010 and 2015 General Elections into a single model-ready dataset for predicting the 2015 General Election.""" processed_directory = (self.directory / 'processed') os.makedirs(processed_directory, exist_ok=True) # create directory if it doesn't exist # TODO: Refactor these sections into functions to make it easier to read. ############# # IMPORT DATA ############# # Import general election results ge_2010 = pd.read_csv(self.directory / 'raw' / 'general_election-uk-2010-results.csv') ge_2010_full = pd.read_csv(self.directory / 'raw' / 'general_election-uk-2010-results-full.csv') ge_2015 = pd.read_csv(self.directory / 'raw' / 'general_election-uk-2015-results.csv') ge_2015_full = pd.read_csv(self.directory / 'raw' / 'general_election-uk-2015-results-full.csv') polls = pd.read_csv(self.directory / 'raw' / 'general_election-uk-polls.csv') # Check constituencies are mergeable assert set(ge_2010['Press Association Reference']).difference(set(ge_2015['Press Association ID Number'])) == set() assert set(ge_2015['Press Association ID Number']).difference(set(ge_2010['Press Association Reference'])) == set() assert len(ge_2010) == len(ge_2010['Press Association Reference']) == 650 assert len(ge_2015) == len(ge_2015['Press Association ID Number']) == 650 # Construct some lookups of the parties we want to model parties_lookup_2010 = { 'Con': 'con', 'Lab': 'lab', 'LD': 'ld', 'UKIP': 'ukip', 'Grn': 'grn', 'Other': 'other' } parties_15 = list(parties_lookup_2010.values()) parties_lookup_2015 = { 'C': 'con', 'Lab': 'lab', 'LD': 'ld', 'UKIP': 'ukip', 'Green': 'grn', 'SNP': 'snp', 'PC': 'pc', 'Other': 'other' } parties_17 = list(parties_lookup_2015.values()) ############## # 2015 POLLING ############## # Get 2015 polling pollsters = polls[(polls.to >= '2015-04-04') & (polls.to <= '2015-05-04')].company.unique() # Use single last poll from each pollster in final week of polling then average out polls = polls[(polls.to >= '2015-04-01') & (polls.to <= '2015-05-07')] pop = polls.loc[:0] for p in pollsters: pop = pop.append(polls[polls.company == p].tail(1)) # Create new polls dictionary by geo containing simple average across all pollsters polls = {'UK': {}} for p in ['con', 'lab', 'ld', 'ukip', 'grn']: polls['UK'][p] = pop[p].mean() polls['UK'] = pd.Series(polls['UK']) # Scotland, Wales, NI, London not available in 2015 data (we haven't extracted them yet!) # Add Other for geo in ['UK']: if 'other' not in polls[geo]: polls[geo]['other'] = 1 - sum(polls[geo]) # Reweight to 100% for geo in ['UK']: polls[geo] = polls[geo] / polls[geo].sum() ############## # 2017 POLLING ############## # TODO: This is messy. # TODO: This should be in the polling processing pipeline. # Latest polling data polls_17 = {'UK': {}} polls_17_uk = pd.read_csv(self.directory / 'raw' / 'general_election-uk-polls.csv') # Filter to recent data polls_17_uk = polls_17_uk[polls_17_uk.to >= '2017-06-06'] # Add parties for p in ['con', 'lab', 'ld', 'ukip', 'grn', 'snp']: polls_17['UK'][p] = (polls_17_uk.sample_size * polls_17_uk[p]).sum() / polls_17_uk.sample_size.sum() polls_17['UK'] = pd.Series(polls_17['UK'], index=['con', 'lab', 'ld', 'ukip', 'snp', 'grn']) # Repeat for Scotland polling... polls_17['Scotland'] = {} polls_17_tmp = pd.read_csv(self.directory / 'raw' / 'general_election-scotland-polls.csv') polls_17_tmp = polls_17_tmp[polls_17_tmp.to >= '2017-06-05'] for p in ['con', 'lab', 'ld', 'ukip', 'snp', 'grn']: polls_17['Scotland'][p] = (polls_17_tmp.sample_size * polls_17_tmp[p]).sum() / polls_17_tmp.sample_size.sum() polls_17['Scotland'] = pd.Series(polls_17['Scotland'], index=['con', 'lab', 'ld', 'ukip', 'snp', 'grn']) # ...and Wales polls_17['Wales'] = {} polls_17_tmp = pd.read_csv(self.directory / 'raw' / 'general_election-wales-polls.csv') polls_17_tmp = polls_17_tmp[polls_17_tmp.to >= '2017-06-07'] for p in ['con', 'lab', 'ld', 'ukip', 'pc', 'grn']: polls_17['Wales'][p] = (polls_17_tmp.sample_size * polls_17_tmp[p]).sum() / polls_17_tmp.sample_size.sum() polls_17['Wales'] = pd.Series(polls_17['Wales'], index=['con', 'lab', 'ld', 'ukip', 'pc', 'grn']) # NI polls_17['NI'] = (pd.read_csv(self.directory / 'raw' / 'general_election-ni-polls.csv') .sort_values(by='to', ascending=False).iloc[0]) # Collate all NI parties under other for k in polls_17['NI'].index: if k not in parties_17: del polls_17['NI'][k] del polls_17['NI']['other'] # London polls_17['London'] = {} polls_17_tmp = pd.read_csv(self.directory / 'raw' / 'general_election-london-polls.csv') polls_17_tmp = polls_17_tmp[polls_17_tmp.to >= '2017-05-31'] for p in ['con', 'lab', 'ld', 'ukip', 'grn']: polls_17['London'][p] = (polls_17_tmp.sample_size * polls_17_tmp[p]).sum() / polls_17_tmp.sample_size.sum() polls_17['London'] = pd.Series(polls_17['London'], index=['con', 'lab', 'ld', 'ukip', 'grn']) # Estimate polling for England excluding London survation_wts = { # from http://survation.com/wp-content/uploads/2017/06/Final-MoS-Post-BBC-Event-Poll-020617SWCH-1c0d4h9.pdf 'Scotland': 85, 'England': 881, 'Wales': 67, 'London': 137, 'NI': 16 } survation_wts['England_not_london'] = survation_wts['England'] - survation_wts['London'] survation_wts['UK'] = survation_wts['Scotland'] + survation_wts['England'] + survation_wts['Wales'] + survation_wts['NI'] def calculate_england_not_london(party): out = polls_17['UK'][party] * survation_wts['UK'] for geo in ['Scotland', 'Wales', 'NI', 'London']: if party in polls_17[geo]: out = out - polls_17[geo][party] * survation_wts[geo] out = out / survation_wts['England_not_london'] return out polls_17['England_not_london'] = {'pc': 0, 'snp': 0} for party in ['con', 'lab', 'ld', 'ukip', 'grn']: polls_17['England_not_london'][party] = calculate_england_not_london(party) polls_17['England_not_london'] = pd.Series(polls_17['England_not_london']) # Fill in the gaps for geo in ['UK', 'Scotland', 'Wales', 'NI', 'London', 'England_not_london']: for party in ['con', 'lab', 'ld', 'ukip', 'grn', 'snp', 'pc']: if party not in polls_17[geo]: #print("Adding {} to {}".format(party, geo)) polls_17[geo][party] = 0 # Fix PC (Plaid Cymru) for UK polls_17['UK']['pc'] = polls_17['Wales']['pc'] * survation_wts['Wales'] / survation_wts['UK'] # Add Other for geo in ['UK', 'Scotland', 'Wales', 'NI', 'London', 'England_not_london']: if 'other' not in polls_17[geo]: polls_17[geo]['other'] = 1 - sum(polls_17[geo]) # This doesn't work for UK or England_not_london; set current other polling to match 2015 result polls_17['UK']['other'] = 0.03 # ge.other.sum() / ge['Valid Votes'].sum() polls_17['England_not_london']['other'] = 0.01 # ge[ge.geo == 'England_not_london'].other.sum() / ge[ge.geo == 'England_not_london']['Valid Votes'].sum() # Reweight to 100% for geo in ['UK', 'Scotland', 'Wales', 'NI', 'London', 'England_not_london']: polls_17[geo] = polls_17[geo] / polls_17[geo].sum() # Export polling data polls_15_csv = pd.DataFrame(columns=['con', 'lab', 'ld', 'ukip', 'grn', 'snp', 'pc', 'other']) for geo in polls: for party in polls[geo].index: polls_15_csv.loc[geo, party] = polls[geo].loc[party] #polls_15_csv.to_csv(polls_data_dir / 'final_polls_2015.csv', index=True) polls_17_csv = pd.DataFrame(columns=['con', 'lab', 'ld', 'ukip', 'grn', 'snp', 'pc', 'other']) for geo in polls_17: for party in polls_17[geo].index: polls_17_csv.loc[geo, party] = polls_17[geo].loc[party] #polls_17_csv.to_csv(polls_data_dir / 'final_polls_2017.csv', index=True) ############################# # Calculate uplifts ("swing") ############################# parties_15 = ['con', 'lab', 'ld', 'ukip', 'grn', 'other'] parties_17 = ['con', 'lab', 'ld', 'ukip', 'grn', 'snp', 'pc', 'other'] parties_lookup_2010 = { 'Con': 'con', 'Lab': 'lab', 'LD': 'ld', 'UKIP': 'ukip', 'Grn': 'grn', 'Other': 'other' } parties_lookup_2015 = { 'C': 'con', 'Lab': 'lab', 'LD': 'ld', 'UKIP': 'ukip', 'Green': 'grn', 'SNP': 'snp', 'PC': 'pc', 'Other': 'other' } # Calculate national voteshare in 2010 ge_2010_totals = ge_2010.loc[:, ['Votes'] + parties_15].sum() ge_2010_voteshare = ge_2010_totals / ge_2010_totals['Votes'] del ge_2010_voteshare['Votes'] ge_2010_voteshare # Calculate swing between 2015 and latest smoothed polling swing = ge_2010_voteshare.copy() for party in parties_15: swing[party] = polls_15_csv.loc['UK', party] / ge_2010_voteshare[party] - 1 ge_2010[party + '_swing'] = polls_15_csv.loc['UK', party] / ge_2010_voteshare[party] - 1 # Forecast is previous result multiplied by swing uplift for party in parties_15: ge_2010[party + '_forecast'] = ge_2010[party + '_pc'] * (1 + swing[party]) def pred_15(row): return row[[p + '_forecast' for p in parties_15]].sort_values(ascending=False).index[0].replace('_forecast', '') #ge_2010['win_10'] = ge_2010_full.apply(win_10, axis=1) #ge_2015['win_15'] = ge_2015_full.apply(win_15, axis=1) ge_2010['win_15'] = ge_2010.apply(pred_15, axis=1) #ge_2010.groupby('win_10').count()['Constituency Name'].sort_values(ascending=False) ######################################################## # Calculate Geo-Level Voteshare + Swing inc. all parties ######################################################## # Add geos geos = list(ge_2015.geo.unique()) # Calculate geo-level voteshare in 2015 ge_2015_totals = ge_2015.loc[:, ['Valid Votes', 'geo'] + parties_17].groupby('geo').sum() # Convert into vote share ge_2015_voteshare = ge_2015_totals.div(ge_2015_totals['Valid Votes'], axis=0) del ge_2015_voteshare['Valid Votes'] ge_2015_voteshare # Calculate geo-swing swing_17 = ge_2015_voteshare.copy() for party in parties_17: for geo in geos: if ge_2015_voteshare.loc[geo][party] > 0: out = polls_17[geo][party] / ge_2015_voteshare.loc[geo][party] - 1 else: out = 0.0 swing_17.loc[geo, party] = out # Apply swing for party in parties_17: ge_2015[party + '_swing'] = ge_2015.apply(lambda row: swing_17.loc[row['geo']][party], axis=1) ge_2015[party + '_2017_forecast'] = ge_2015.apply(lambda x: x[party + '_pc'] * (1 + swing_17.loc[x['geo']][party]), axis=1) def win_17(row): return row[[p + '_2017_forecast' for p in parties_17]].sort_values(ascending=False).index[0].replace('_2017_forecast', '') ge_2015['win_17'] = ge_2015.apply(win_17, axis=1) ########################### # Create ML-ready dataframe ########################### parties = ['con', 'lab', 'ld', 'ukip', 'grn'] act_15_lookup = {k: v for i, (k, v) in ge_2015[['Press Association ID Number', 'winner']].iterrows()} ge_2010['act_15'] = ge_2010['Press Association Reference'].map(act_15_lookup) pc_15_lookup = { p: {k: v for i, (k, v) in ge_2015[['Press Association ID Number', p + '_pc']].iterrows()} for p in parties } for p in parties: ge_2010[p + '_actual'] = ge_2010['Press Association Reference'].map(pc_15_lookup[p]) df = ge_2010[['Press Association Reference', 'Constituency Name', 'Region', 'Electorate', 'Votes'] + parties] df = pd.melt( df, id_vars=['Press Association Reference', 'Constituency Name', 'Region', 'Electorate', 'Votes'], value_vars=parties, var_name='party', value_name='votes_last' ) # pc_last pc_last = pd.melt( ge_2010[['Press Association Reference'] + [p + '_pc' for p in parties]], id_vars=['Press Association Reference'], value_vars=[p + '_pc' for p in parties], var_name='party', value_name='pc_last' ) pc_last['party'] = pc_last.party.apply(lambda x: x.replace('_pc', '')) df = pd.merge( left=df, right=pc_last, how='left', on=['Press Association Reference', 'party'] ) # win_last win_last = ge_2010[['Press Association Reference', 'winner']] win_last.columns = ['Press Association Reference', 'win_last'] df = pd.merge( left=df, right=win_last, on=['Press Association Reference'] ) # polls_now df['polls_now'] = df.party.map(polls['UK']) # swing_now swing_now = pd.melt( ge_2010[['Press Association Reference'] + [p + '_swing' for p in parties]], id_vars=['Press Association Reference'], value_vars=[p + '_swing' for p in parties], var_name='party', value_name='swing_now' ) swing_now['party'] = swing_now.party.apply(lambda x: x.replace('_swing', '')) df = pd.merge( left=df, right=swing_now, how='left', on=['Press Association Reference', 'party'] ) # swing_forecast_pc swing_forecast_pc = pd.melt( ge_2010[['Press Association Reference'] + [p + '_forecast' for p in parties]], id_vars=['Press Association Reference'], value_vars=[p + '_forecast' for p in parties], var_name='party', value_name='swing_forecast_pc' ) swing_forecast_pc['party'] = swing_forecast_pc.party.apply(lambda x: x.replace('_forecast', '')) df = pd.merge( left=df, right=swing_forecast_pc, how='left', on=['Press Association Reference', 'party'] ) # swing_forecast_win swing_forecast_win = ge_2010[['Press Association Reference', 'win_15']] swing_forecast_win.columns = ['Press Association Reference', 'swing_forecast_win'] df = pd.merge( left=df, right=swing_forecast_win, on=['Press Association Reference'] ) # actual_win_now actual_win_now = ge_2010[['Press Association Reference', 'act_15']] actual_win_now.columns = ['Press Association Reference', 'actual_win_now'] df = pd.merge( left=df, right=actual_win_now, on=['Press Association Reference'] ) # actual_pc_now actual_pc_now = pd.melt( ge_2010[['Press Association Reference'] + [p + '_actual' for p in parties]], id_vars=['Press Association Reference'], value_vars=[p + '_actual' for p in parties], var_name='party', value_name='actual_pc_now' ) actual_pc_now['party'] = actual_pc_now.party.apply(lambda x: x.replace('_actual', '')) df = pd.merge( left=df, right=actual_pc_now, how='left', on=['Press Association Reference', 'party'] ) # dummy party df = pd.concat([df, pd.get_dummies(df.party)], axis=1) # dummy region df = pd.concat([df, pd.get_dummies(df.Region, prefix='Region')], axis=1) # won_here_last df['won_here_last'] = (df['party'] == df['win_last']).astype('int') # turnout df['turnout'] = df.Votes / df.Electorate ######################################## # Export final 2010 -> 2015 training set ######################################## print(f'Exporting 2010->2015 model dataset to {processed_directory.resolve()}') df.to_csv(processed_directory / 'general_election-uk-2015-model.csv', index=False) ###################### # REPEAT FOR 2015-2017 ###################### # Recreate this training dataset using same column names for 2015 -> 2017 for a GE2017 forecast # TODO: Needs refactoring! # Add SNP and <NAME> parties += ['snp', 'pc'] df15 = ge_2015[['Press Association ID Number', 'Constituency Name', 'Region', 'geo', 'Electorate', 'Valid Votes'] + parties] df15.columns = ['Press Association ID Number', 'Constituency Name', 'Region', 'geo', 'Electorate', 'Votes'] + parties df15 = pd.melt( df15, id_vars=['Press Association ID Number', 'Constituency Name', 'Region', 'geo', 'Electorate', 'Votes'], value_vars=parties, var_name='party', value_name='votes_last' ) # pc_last pc_last = pd.melt( ge_2015[['Press Association ID Number'] + [p + '_pc' for p in parties]], id_vars=['Press Association ID Number'], value_vars=[p + '_pc' for p in parties], var_name='party', value_name='pc_last' ) pc_last['party'] = pc_last.party.apply(lambda x: x.replace('_pc', '')) df15 = pd.merge( left=df15, right=pc_last, how='left', on=['Press Association ID Number', 'party'] ) # win_last win_last = ge_2015[['Press Association ID Number', 'winner']] win_last.columns = ['Press Association ID Number', 'win_last'] df15 = pd.merge( left=df15, right=win_last, on=['Press Association ID Number'] ) # polls_now <- USE REGIONAL POLLING! (Possibly a very bad idea, the regional UNS performed worse than national!) df15['polls_now'] = df15.apply(lambda row: polls_17[row.geo][row.party], axis=1) # swing_now swing_now = pd.melt( ge_2015[['Press Association ID Number'] + [p + '_swing' for p in parties]], id_vars=['Press Association ID Number'], value_vars=[p + '_swing' for p in parties], var_name='party', value_name='swing_now' ) swing_now['party'] = swing_now.party.apply(lambda x: x.replace('_swing', '')) df15 = pd.merge( left=df15, right=swing_now, how='left', on=['Press Association ID Number', 'party'] ) # swing_forecast_pc swing_forecast_pc = pd.melt( ge_2015[['Press Association ID Number'] + [p + '_2017_forecast' for p in parties]], id_vars=['Press Association ID Number'], value_vars=[p + '_2017_forecast' for p in parties], var_name='party', value_name='swing_forecast_pc' ) swing_forecast_pc['party'] = swing_forecast_pc.party.apply(lambda x: x.replace('_2017_forecast', '')) df15 = pd.merge( left=df15, right=swing_forecast_pc, how='left', on=['Press Association ID Number', 'party'] ) # swing_forecast_win swing_forecast_win = ge_2015[['Press Association ID Number', 'win_17']] swing_forecast_win.columns = ['Press Association ID Number', 'swing_forecast_win'] df15 = pd.merge( left=df15, right=swing_forecast_win, on=['Press Association ID Number'] ) # dummy party df15 = pd.concat([df15, pd.get_dummies(df15.party)], axis=1) # dummy region df15 = pd.concat([df15, pd.get_dummies(df15.Region, prefix='Region')], axis=1) # won_here_last df15['won_here_last'] = (df15['party'] == df15['win_last']).astype('int') # turnout df15['turnout'] = df.Votes / df.Electorate ########################################## # Export final 2015 -> 2017 prediction set ########################################## print(f'Exporting 2015->2017 model dataset to {processed_directory.resolve()}') df15.to_csv(processed_directory / 'general_election-uk-2017-model.csv', index=False)
#!/bin/bash set -eu cyg() { local IN=$1 case "$(uname)" in CYGWIN*) cygpath --windows -a "$IN" ;; *) echo "$IN" ;; esac } AS_SOMEONE=$(dirname "$0")/../../.. AS_SOMEONE=$(cd "$AS_SOMEONE" && pwd) CYG_AS_SOMEONE=$(cyg "$AS_SOMEONE") # TODO how to define this only once: CACHED=$(cyg "$AS_SOMEONE/.i-cached") CLASSES=$CACHED/.internal/entry-classes CYG_CLASSES=$(cyg "$CLASSES") mkdir -p "$CLASSES" CYG_SRC=$(cyg "$AS_SOMEONE/with/java/org/fluentjava/iwant/entry/Iwant.java") javac -source 1.8 -g -d "$CYG_CLASSES" "$CYG_SRC" java \ -Xmx1024m \ -cp "$CYG_CLASSES" org.fluentjava.iwant.entry.Iwant "$CYG_AS_SOMEONE" "$@"
#ifndef SRC_GOLDSCRIPT_LINKED_H #define SRC_GOLDSCRIPT_LINKED_H #include "gs-memory.h" /** * @file This file provides an inheritable interface of a singly linked list. */ #define GOLDSCRIPT_LINKED_SYMBOL __gs_link #define GOLDSCRIPT_LINKED_HEAD \ int type; \ struct GOLDSCRIPT_LINKED_SYMBOL* next /** * @brief Represents the base struct used in constructing linked list * interfaces. */ struct GOLDSCRIPT_LINKED_SYMBOL { GOLDSCRIPT_LINKED_HEAD; }; typedef struct GOLDSCRIPT_LINKED_SYMBOL golds_linked_t; /** * @brief Function pointer used for generic finds on linked lists. */ typedef int (*golds_linked_check)(const golds_linked_t*); typedef void (*golds_linked_op)(golds_linked_t*); #define GOLDSCRIPT_LINKED_HAS_NEXT(node) (node->next != NULL) #define GOLDSCRIPT_LINKED_CAST(node) ((golds_linked_t*)node) #define GOLDSCRIPT_LINKED_CONN(n1, n2) (n1->next = n2) /** * @brief Macro that consumes a ptr to a \c golds_linked_t* to advance it to * the end. */ #define GOLDSCRIPT_LINKED_ADV_END(node) while((node) != NULL && (node)->next != NULL) node = (node)->next size_t golds_linked_len(golds_linked_t* lst); void golds_linked_put(golds_linked_t* lst, golds_linked_t* item); void golds_linked_append(golds_linked_t* lst, golds_linked_t* item); golds_linked_t* golds_linked_find(golds_linked_t* lst, golds_linked_check fn); void golds_linked_apply_each(golds_linked_t* lst, golds_linked_op fn); void golds_linked_del(golds_linked_t* lst); #endif // SRC_GOLDSCRIPT_LINKED_H
<filename>src/index.js<gh_stars>1-10 import init from './init'; import { linkDeviceClientData, geolocation, saveKeyInDevice, saveKeyInUser, isRegistered, } from './devices'; import { subscribe, unsubscribe } from './push'; import { registration } from './ServiceWorker'; import { store, clear } from './utils'; window.Tendarts = { init, linkDeviceClientData, subscribe, unsubscribe, registration, clear, saveKeyInDevice, saveKeyInUser, geolocation, isRegistered, getState() { return store.getState(); }, };
<reponame>kkoogqw/OpenItem<filename>review/controllers/submit.go package controllers import ( "net/http" "review/logger" "review/models" "review/request" "review/response" ) // a part of project controller // @Title GetOneSubmit // @Description 获取一个submit的信息 // @Param token header string true "user token get at login" // @Param submitId path string true "要获取的submit uuid" // @Success 200 {object} response.Default // @Failure 400 "invalid submit id" // @router /submit/:submitId [get] func (p *ProjectController) GetOneSubmit() { submitId := p.GetString(":submitId") if submitId == "" { p.respondJson(http.StatusBadRequest, response.FAIL, "invalid id") return } resp, code := models.GetOneSubmit(submitId) p.respondJson(http.StatusOK, code, "", resp) return } // @Title GetSubmitInStep // @Description 获取一个step下的所有submit // @Param token header string true "user token get at login" // @Param stepId path string true "step的uuid" // @Success 200 {object} response.Default // @Failure 400 "invalid step id" // @router /submits/:stepId [get] func (p *ProjectController) GetSubmitsInStep() { stepId := p.GetString(":stepId") if stepId == "" { p.respondJson(http.StatusBadRequest, response.FAIL, "invalid id") return } resp, code := models.GetStepSubmits(stepId) p.respondJson(http.StatusOK, code, "", resp) return } // @Title GetUserSubmitInStep // @Description 获取某个用户在指定step下的submit // @Param token header string true "user token get at login" // @Param json body request.GetUserSubmitsInStep true "用户&step信息" // @Success 200 {object} response.Default // @Failure 400 "invalid json" // @router /submits/user [post] func (p *ProjectController) GetUserSubmitInStep() { var req request.GetUserSubmitsInStep err := unmarshalBody(p.Ctx.Input.RequestBody, &req) if err != nil { p.respondJson(http.StatusBadRequest, response.FAIL, "parse body failed") return } resp, code := models.GetUserSubmitsInStep(&req) p.respondJson(http.StatusOK, code, "", resp) return } // @Title MakeOneSubmit // @Description 创建一个新的submit // @Param token header string true "user token get at login" // @Param json body request.CreateSubmit true "新submit信息" // @Success 200 {object} response.Default // @Failure 400 "invalid json" // @router /submit [post] func (p *ProjectController) MakeOneSubmit() { var req request.CreateSubmit err := unmarshalBody(p.Ctx.Input.RequestBody, &req) if err != nil { p.respondJson(http.StatusBadRequest, response.FAIL, "parse body failed") return } user, err := parseUserToken(p.Ctx.Request.Header["Token"][0]) if err != nil { logger.Recorder.Warning("[user token] parse user token error: " + err.Error()) p.respondJson(http.StatusBadRequest, response.FAIL, "invalid token") return } req.UserId = user resp, code := models.MakeOneSubmit(&req) p.respondJson(http.StatusOK, code, "", resp) return } // @Title AppendContentInStep // @Description 在一个step中的content下追加新的材料(即用户在上传材料审核的历史记录) // @Param token header string true "user token get at login" // @Param json body request.AppendContentInSubmit true "上传的材料信息" // @Success 200 {object} response.Default // @Failure 400 "invalid json" // @router /submit/content [post] func (p *ProjectController) AppendContentInStep() { var req request.AppendContentInSubmit err := unmarshalBody(p.Ctx.Input.RequestBody, &req) if err != nil { p.respondJson(http.StatusBadRequest, response.FAIL, "parse body failed") return } resp, code := models.AppendContent(&req) p.respondJson(http.StatusOK, code, "", resp) return } // @Title WithdrawContentInStep // @Description 用户撤回某次提交的材料审核 // @Param token header string true "user token get at login" // @Param json body request.WithdrawContentInSubmit true "撤回的信息" // @Success 200 {object} response.Default // @Failure 400 "invalid json" // @router /submit/content [delete] func (p *ProjectController) WithdrawContentInStep() { var req request.WithdrawContentInSubmit err := unmarshalBody(p.Ctx.Input.RequestBody, &req) if err != nil { p.respondJson(http.StatusBadRequest, response.FAIL, "parse body failed") return } resp, code := models.WithdrawContent(&req) p.respondJson(http.StatusOK, code, "", resp) return } // @Title SetSubmitStatus // @Description 更改提交的状态(即管理员最终审核某次提交是否最终通过) // @Param token header string true "user token get at login" // @Param json body request.SetSubmitStatus true "设定的状态" // @Success 200 {object} response.Default // @Failure 400 "invalid json" // @router /submit [put] func (p *ProjectController) SetSubmitStatus() { var req request.SetSubmitStatus err := unmarshalBody(p.Ctx.Input.RequestBody, &req) if err != nil { p.respondJson(http.StatusBadRequest, response.FAIL, "parse body failed") return } code := models.SetSubmitStatus(&req) p.respondJson(http.StatusOK, code, "") return } // @Title DeleteSubmit // @Description 删除一次submit // @Param token header string true "user token get at login" // @Param submitId path string true "要删除的submit的uuid" // @Success 200 {object} response.Default // @Failure 400 "invalid submit id" // @router /submit/:submitId [delete] func (p *ProjectController) DeleteSubmit() { submitId := p.GetString(":submitId") if submitId == "" { p.respondJson(http.StatusBadRequest, response.FAIL, "invalid id") return } code := models.DeleteSubmit(submitId) p.respondJson(http.StatusOK, code, "") return }
package me.insidezhou.southernquiet.file.web.controller.advice; import me.insidezhou.southernquiet.file.web.exception.NotFoundException; import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.ExceptionHandler; import org.springframework.web.bind.annotation.ResponseStatus; import org.springframework.web.bind.annotation.RestControllerAdvice; @RestControllerAdvice public class ExceptionAdvice { @ResponseStatus(HttpStatus.NOT_FOUND) @ExceptionHandler(NotFoundException.class) public void notFoundException() {} }
<reponame>vrn-dev/ts-server-template<filename>src/index.ts import { Config } from './types/config.types'; import { init, mongoConnect, errorPropsBuilder, errorDetails } from './utils'; import { json } from 'body-parser'; import express from 'express'; import cors from 'cors'; import { authMiddleWare } from './auth'; import { sign } from 'jsonwebtoken'; import monitor from 'express-status-monitor'; import { Merror, MerrorMiddleware } from 'express-merror'; (async () => { // Load Config from file let cfg: Config; try { cfg = init(); } catch (e) { console.error(e); } // Connect to DB try { await mongoConnect(cfg); } catch (e) { console.error(e); } // Start App const app = express(); const port = cfg.server.port; // App configs app.disable('x-powered-by'); app.use(json()); app.use(cors()); app.use(monitor()); app.use(authMiddleWare(cfg.server.jwtKey).unless({ path: ['/login', '/err'] })); app.get('/login', (req, res) => { const token = sign( { sub: '<KEY>', username: 'admin1', role: 'ADMIN', }, cfg.server.jwtKey, { expiresIn: '300 days', // TODO: cfg.server.jwtDuration }, ); res.json({ token }); }); app.get('/auth', (req, res) => { res.json({ message: (req as any).user }); }); app.get('/err', (res, req, next) => { throw new Merror(500, 'Something Happened', errorPropsBuilder(null, errorDetails())); }); app.use(MerrorMiddleware()); app.listen(port, () => console.log(` 🚀 Server listening on http://localhost:${port}`)); })();
<filename>demos/omicron/src/omicron.cpp #include "game.hpp" #include <be/core/service_helpers.hpp> #ifdef BE_DEBUG #pragma comment(lib, "glew-debug") #pragma comment(lib, "core-debug") #pragma comment(lib, "core-id-with-names-debug") #else #pragma comment(lib, "glew") #pragma comment(lib, "core") #pragma comment(lib, "core-id-with-names") #endif int main(int argc, char** argv) { return be::service<o::Game>()(argc, argv).run(); }
.customClass { &:hover { opacity: 1 !important; } &.active { &::after { content: ''; position: absolute; top: 0; right: 0; width: 100%; height: 100%; } } }
<gh_stars>0 package be.crydust.tokenreplacer; import java.io.File; import java.io.IOException; import java.nio.file.Path; import java.util.List; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.*; /** * * @author kristof */ public class FilesFinderTest { @Rule public TemporaryFolder folder = new TemporaryFolder(); @Test public void testEmpty() { FilesFinder cut = new FilesFinder(folder.getRoot().toPath(), "**/*.template", new String[0]); List<Path> files = cut.call(); assertThat(files, is(empty())); } @Test public void testOneFile() throws IOException { folder.newFile("a.template"); FilesFinder cut = new FilesFinder(folder.getRoot().toPath(), "**/*.template", new String[0]); List<Path> files = cut.call(); assertThat(files.size(), is(1)); } @Test public void testTwoFiles() throws IOException { folder.newFile("a.template"); File subFolder = folder.newFolder(); new File(subFolder, "b.template").createNewFile(); FilesFinder cut = new FilesFinder(folder.getRoot().toPath(), "**/*.template", new String[0]); List<Path> files = cut.call(); assertThat(files.size(), is(2)); } @Test public void testExcludeNothing() throws IOException { folder.newFile("1.template"); folder.newFolder("tmp"); folder.newFile("tmp/2.template"); folder.newFolder("xxx"); folder.newFile("xxx/3.template"); FilesFinder cut = new FilesFinder(folder.getRoot().toPath(), "**/*.template", new String[0]); List<Path> files = cut.call(); assertThat(files.size(), is(3)); } @Test public void testExcludeOne() throws IOException { folder.newFile("1.template"); folder.newFolder("tmp"); folder.newFile("tmp/excluded.template"); folder.newFolder("xxx"); folder.newFile("xxx/2.template"); FilesFinder cut = new FilesFinder(folder.getRoot().toPath(), "**/*.template", new String[]{"**/tmp/**"}); List<Path> files = cut.call(); assertThat(files.size(), is(2)); } @Test public void testExcludeTwo() throws IOException { folder.newFile("1.template"); folder.newFolder("tmp"); folder.newFile("tmp/excluded.template"); folder.newFolder("xxx"); folder.newFile("xxx/excluded.template"); FilesFinder cut = new FilesFinder(folder.getRoot().toPath(), "**/*.template", new String[]{"**/tmp/**", "**/xxx/**"}); List<Path> files = cut.call(); assertThat(files.size(), is(1)); } @Test public void testExcludeEscape() throws IOException { folder.newFile("1.template"); folder.newFolder("tmp"); folder.newFile("tmp/excluded.template"); folder.newFolder("a[]!{},b"); folder.newFile("a[]!{},b/excluded.template"); FilesFinder cut = new FilesFinder(folder.getRoot().toPath(), "**/*.template", new String[]{"**/tmp/**", "**/a[]!{},b/**"}); List<Path> files = cut.call(); assertThat(files.size(), is(2)); } }
<gh_stars>1-10 "use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.iosCheckmark = void 0; var iosCheckmark = { "viewBox": "0 0 512 512", "children": [{ "name": "g", "attribs": {}, "children": [{ "name": "path", "attribs": { "d": "M256,48C141.1,48,48,141.1,48,256s93.1,208,208,208c114.9,0,208-93.1,208-208S370.9,48,256,48z M223.9,329.7\r\n\t\tc-2.4,2.4-5.8,4.4-8.8,4.4s-6.4-2.1-8.9-4.5l-56-56l17.8-17.8l47.2,47.2l124.8-125.7l17.5,18.1L223.9,329.7z" }, "children": [{ "name": "path", "attribs": { "d": "M256,48C141.1,48,48,141.1,48,256s93.1,208,208,208c114.9,0,208-93.1,208-208S370.9,48,256,48z M223.9,329.7\r\n\t\tc-2.4,2.4-5.8,4.4-8.8,4.4s-6.4-2.1-8.9-4.5l-56-56l17.8-17.8l47.2,47.2l124.8-125.7l17.5,18.1L223.9,329.7z" }, "children": [] }] }] }] }; exports.iosCheckmark = iosCheckmark;
## 2. Head or Tail ## /home/dq/rg_data$ tail 'Physical Sciences' ## 3. Option-arguments ## /home/dq/rg_data$ tail -n +2 Arts ## 4. Counting Lines ## /home/dq/rg_data$ answer 12 1469 20 737 17 ## 5. Pretty Printing ## /home/dq/rg_data$ column -s":" -t characters ## 6. File Sample ## /home/dq/rg_data$ shuf -n 5 Engineering ## 7. Types of Files ## /home/dq/files$ file *
<reponame>OSWeDev/oswedev<gh_stars>0 import ServerAPIController from '../../../server/modules/API/ServerAPIController'; import APIControllerWrapper from '../../../shared/modules/API/APIControllerWrapper'; APIControllerWrapper.API_CONTROLLER = ServerAPIController.getInstance(); import { expect, assert } from 'chai'; import 'mocha'; import ContextFilterTestsTools from './tools/ContextFilterTestsTools'; import VOsTypesManager from '../../../shared/modules/VOsTypesManager'; import UserVO from '../../../shared/modules/AccessPolicy/vos/UserVO'; import ContextFilterServerController from '../../../server/modules/ContextFilter/ContextFilterServerController'; import ModuleTable from '../../../shared/modules/ModuleTable'; import LangVO from '../../../shared/modules/Translation/vos/LangVO'; import UserRoleVO from '../../../shared/modules/AccessPolicy/vos/UserRoleVO'; import RoleVO from '../../../shared/modules/AccessPolicy/vos/RoleVO'; import AnonymizationUserConfVO from '../../../shared/modules/Anonymization/vos/AnonymizationUserConfVO'; import AnonymizationFieldConfVO from '../../../shared/modules/Anonymization/vos/AnonymizationFieldConfVO'; import FieldPathWrapper from '../../../server/modules/ContextFilter/vos/FieldPathWrapper'; describe('ContextFilterServer', () => { //#region test_updates_jointures /** * Test 1 : * de user à lang via user.lang_id */ it('test updates_jointures - User => Lang', async () => { ContextFilterTestsTools.getInstance().declare_modultables(); let user_modultable = VOsTypesManager.getInstance().moduleTables_by_voType[UserVO.API_TYPE_ID]; let jointures: string[] = []; let joined_tables_by_vo_type: { [vo_type: string]: ModuleTable<any> } = {}; let aliases_n: number = 1; let tables_aliases_by_type: { [vo_type: string]: string } = { user: 't0' }; aliases_n = await ContextFilterServerController.getInstance().updates_jointures( jointures, LangVO.API_TYPE_ID, null, joined_tables_by_vo_type, tables_aliases_by_type, [ new FieldPathWrapper(user_modultable.getFieldFromId('lang_id'), true) ], aliases_n ); expect(jointures).to.deep.equal([ 'ref.lang t1 on t1.id = t0.lang_id' ]); expect(joined_tables_by_vo_type).to.deep.equal({ [LangVO.API_TYPE_ID]: VOsTypesManager.getInstance().moduleTables_by_voType[LangVO.API_TYPE_ID] }); expect(tables_aliases_by_type).to.deep.equal({ [LangVO.API_TYPE_ID]: 't1', [UserVO.API_TYPE_ID]: 't0' }); expect(aliases_n).to.equal(2); }); /** * Test 2 : * de lang à user via user.lang_id */ it('test updates_jointures - Lang => User', async () => { ContextFilterTestsTools.getInstance().declare_modultables(); let user_modultable = VOsTypesManager.getInstance().moduleTables_by_voType[UserVO.API_TYPE_ID]; let jointures: string[] = []; let joined_tables_by_vo_type: { [vo_type: string]: ModuleTable<any> } = {}; let aliases_n = 1; let tables_aliases_by_type = { [LangVO.API_TYPE_ID]: 't0' }; aliases_n = await ContextFilterServerController.getInstance().updates_jointures( jointures, UserVO.API_TYPE_ID, null, joined_tables_by_vo_type, tables_aliases_by_type, [ new FieldPathWrapper(user_modultable.getFieldFromId('lang_id'), false) ], aliases_n ); expect(jointures).to.deep.equal([ 'ref.user t1 on t1.lang_id = t0.id' ]); expect(joined_tables_by_vo_type).to.deep.equal({ [UserVO.API_TYPE_ID]: VOsTypesManager.getInstance().moduleTables_by_voType[UserVO.API_TYPE_ID] }); expect(tables_aliases_by_type).to.deep.equal({ [UserVO.API_TYPE_ID]: 't1', [LangVO.API_TYPE_ID]: 't0' }); expect(aliases_n).to.equal(2); }); /** * Test 3 : * de user à role via userrole.user_id + userrole.role_id */ it('test updates_jointures - User => Role', async () => { ContextFilterTestsTools.getInstance().declare_modultables(); let userrole_modultable = VOsTypesManager.getInstance().moduleTables_by_voType[UserRoleVO.API_TYPE_ID]; let jointures: string[] = []; let joined_tables_by_vo_type: { [vo_type: string]: ModuleTable<any> } = {}; let aliases_n = 1; let tables_aliases_by_type = { [UserVO.API_TYPE_ID]: 't0' }; aliases_n = await ContextFilterServerController.getInstance().updates_jointures( jointures, RoleVO.API_TYPE_ID, null, joined_tables_by_vo_type, tables_aliases_by_type, [ new FieldPathWrapper(userrole_modultable.getFieldFromId('user_id'), false), new FieldPathWrapper(userrole_modultable.getFieldFromId('role_id'), true) ], aliases_n ); expect(jointures).to.deep.equal([ 'ref.userroles t1 on t1.user_id = t0.id', 'ref.role t2 on t2.id = t1.role_id' ]); expect(joined_tables_by_vo_type).to.deep.equal({ [UserRoleVO.API_TYPE_ID]: VOsTypesManager.getInstance().moduleTables_by_voType[UserRoleVO.API_TYPE_ID], [RoleVO.API_TYPE_ID]: VOsTypesManager.getInstance().moduleTables_by_voType[RoleVO.API_TYPE_ID], }); expect(tables_aliases_by_type).to.deep.equal({ [RoleVO.API_TYPE_ID]: 't2', [UserVO.API_TYPE_ID]: 't0', [UserRoleVO.API_TYPE_ID]: 't1' }); expect(aliases_n).to.equal(3); }); /** * Test 4 : * de userroles à role et user en 2 étapes via userrole.user_id + userrole.role_id */ it('test updates_jointures - UserRole => Role & User', async () => { ContextFilterTestsTools.getInstance().declare_modultables(); let userrole_modultable = VOsTypesManager.getInstance().moduleTables_by_voType[UserRoleVO.API_TYPE_ID]; let jointures: string[] = []; let joined_tables_by_vo_type: { [vo_type: string]: ModuleTable<any> } = {}; let aliases_n = 1; let tables_aliases_by_type = { [UserRoleVO.API_TYPE_ID]: 't0' }; aliases_n = await ContextFilterServerController.getInstance().updates_jointures( jointures, UserVO.API_TYPE_ID, null, joined_tables_by_vo_type, tables_aliases_by_type, [ new FieldPathWrapper(userrole_modultable.getFieldFromId('user_id'), true), ], aliases_n ); expect(jointures).to.deep.equal([ 'ref.user t1 on t1.id = t0.user_id' ]); expect(joined_tables_by_vo_type).to.deep.equal({ [UserVO.API_TYPE_ID]: VOsTypesManager.getInstance().moduleTables_by_voType[UserVO.API_TYPE_ID], }); expect(tables_aliases_by_type).to.deep.equal({ [UserVO.API_TYPE_ID]: 't1', [UserRoleVO.API_TYPE_ID]: 't0' }); expect(aliases_n).to.equal(2); // étape 2 aliases_n = await ContextFilterServerController.getInstance().updates_jointures( jointures, RoleVO.API_TYPE_ID, null, joined_tables_by_vo_type, tables_aliases_by_type, [ new FieldPathWrapper(userrole_modultable.getFieldFromId('role_id'), true) ], aliases_n ); expect(jointures).to.deep.equal([ 'ref.user t1 on t1.id = t0.user_id', 'ref.role t2 on t2.id = t0.role_id' ]); expect(joined_tables_by_vo_type).to.deep.equal({ [UserVO.API_TYPE_ID]: VOsTypesManager.getInstance().moduleTables_by_voType[UserVO.API_TYPE_ID], [RoleVO.API_TYPE_ID]: VOsTypesManager.getInstance().moduleTables_by_voType[RoleVO.API_TYPE_ID], }); expect(tables_aliases_by_type).to.deep.equal({ [RoleVO.API_TYPE_ID]: 't2', [UserVO.API_TYPE_ID]: 't1', [UserRoleVO.API_TYPE_ID]: 't0' }); expect(aliases_n).to.equal(3); }); /** * Test 5 : * de UserRoleVO à AnonymizationFieldConfVO via userrole.user_id => AnonymizationUserConfVO.user_id => AnonymizationUserConfVO.anon_field_id */ it('test updates_jointures - UserRoleVO => AnonymizationFieldConfVO', async () => { ContextFilterTestsTools.getInstance().declare_modultables(); let UserRoleVO_modultable = VOsTypesManager.getInstance().moduleTables_by_voType[UserRoleVO.API_TYPE_ID]; let AnonymizationUserConfVO_modultable = VOsTypesManager.getInstance().moduleTables_by_voType[AnonymizationUserConfVO.API_TYPE_ID]; let jointures: string[] = []; let joined_tables_by_vo_type: { [vo_type: string]: ModuleTable<any> } = {}; let aliases_n = 1; let tables_aliases_by_type = { [UserRoleVO.API_TYPE_ID]: 't0' }; aliases_n = await ContextFilterServerController.getInstance().updates_jointures( jointures, AnonymizationFieldConfVO.API_TYPE_ID, null, joined_tables_by_vo_type, tables_aliases_by_type, [ new FieldPathWrapper(UserRoleVO_modultable.getFieldFromId('user_id'), true), new FieldPathWrapper(AnonymizationUserConfVO_modultable.getFieldFromId('user_id'), false), new FieldPathWrapper(AnonymizationUserConfVO_modultable.getFieldFromId('anon_field_id'), true) ], aliases_n ); expect(jointures).to.deep.equal([ 'ref.user t1 on t1.id = t0.user_id', 'ref.anonym_user_conf t2 on t2.user_id = t1.id', 'ref.anonym_field_conf t3 on t3.id = t2.anon_field_id' ]); expect(joined_tables_by_vo_type).to.deep.equal({ [UserVO.API_TYPE_ID]: VOsTypesManager.getInstance().moduleTables_by_voType[UserVO.API_TYPE_ID], [AnonymizationUserConfVO.API_TYPE_ID]: VOsTypesManager.getInstance().moduleTables_by_voType[AnonymizationUserConfVO.API_TYPE_ID], [AnonymizationFieldConfVO.API_TYPE_ID]: VOsTypesManager.getInstance().moduleTables_by_voType[AnonymizationFieldConfVO.API_TYPE_ID], }); expect(tables_aliases_by_type).to.deep.equal({ [AnonymizationFieldConfVO.API_TYPE_ID]: 't3', [AnonymizationUserConfVO.API_TYPE_ID]: 't2', [UserVO.API_TYPE_ID]: 't1', [UserRoleVO.API_TYPE_ID]: 't0' }); expect(aliases_n).to.equal(4); }); /** * Test 6 : * de RoleVO à AnonymizationFieldConfVO via userrole.role_id => userrole.user_id => AnonymizationUserConfVO.user_id => AnonymizationUserConfVO.anon_field_id */ it('test updates_jointures - RoleVO => AnonymizationFieldConfVO', async () => { ContextFilterTestsTools.getInstance().declare_modultables(); let UserRoleVO_modultable = VOsTypesManager.getInstance().moduleTables_by_voType[UserRoleVO.API_TYPE_ID]; let AnonymizationUserConfVO_modultable = VOsTypesManager.getInstance().moduleTables_by_voType[AnonymizationUserConfVO.API_TYPE_ID]; let jointures: string[] = []; let joined_tables_by_vo_type: { [vo_type: string]: ModuleTable<any> } = {}; let aliases_n = 1; let tables_aliases_by_type = { [RoleVO.API_TYPE_ID]: 't0' }; aliases_n = await ContextFilterServerController.getInstance().updates_jointures( jointures, AnonymizationFieldConfVO.API_TYPE_ID, null, joined_tables_by_vo_type, tables_aliases_by_type, [ new FieldPathWrapper(UserRoleVO_modultable.getFieldFromId('role_id'), false), new FieldPathWrapper(UserRoleVO_modultable.getFieldFromId('user_id'), true), new FieldPathWrapper(AnonymizationUserConfVO_modultable.getFieldFromId('user_id'), false), new FieldPathWrapper(AnonymizationUserConfVO_modultable.getFieldFromId('anon_field_id'), true) ], aliases_n ); expect(jointures).to.deep.equal([ 'ref.userroles t1 on t1.role_id = t0.id', 'ref.user t2 on t2.id = t1.user_id', 'ref.anonym_user_conf t3 on t3.user_id = t2.id', 'ref.anonym_field_conf t4 on t4.id = t3.anon_field_id' ]); expect(joined_tables_by_vo_type).to.deep.equal({ [UserRoleVO.API_TYPE_ID]: VOsTypesManager.getInstance().moduleTables_by_voType[UserRoleVO.API_TYPE_ID], [UserVO.API_TYPE_ID]: VOsTypesManager.getInstance().moduleTables_by_voType[UserVO.API_TYPE_ID], [AnonymizationUserConfVO.API_TYPE_ID]: VOsTypesManager.getInstance().moduleTables_by_voType[AnonymizationUserConfVO.API_TYPE_ID], [AnonymizationFieldConfVO.API_TYPE_ID]: VOsTypesManager.getInstance().moduleTables_by_voType[AnonymizationFieldConfVO.API_TYPE_ID], }); expect(tables_aliases_by_type).to.deep.equal({ [AnonymizationFieldConfVO.API_TYPE_ID]: 't4', [AnonymizationUserConfVO.API_TYPE_ID]: 't3', [UserVO.API_TYPE_ID]: 't2', [UserRoleVO.API_TYPE_ID]: 't1', [RoleVO.API_TYPE_ID]: 't0' }); expect(aliases_n).to.equal(5); }); //#endregion test_updates_jointures });
def evaluate_integer(num): if num < 0: print(num, "is negative.") elif num > 0: print(num, "is positive.") else: print(num, "is zero.") # Test the function with user input num = int(input("Enter an integer: ")) evaluate_integer(num)
<reponame>maurov/xraysloth #!/usr/bin/env python # -*- coding: utf-8 -*- """ JupyX: Jupyter UI for X-ray data analysis ----------------------------------------- """ ################# # IPython utils # ################# def ipythonAutoreload(): """Force ipython to autoreload imported modules""" from IPython import get_ipython mgc = get_ipython().magic mgc(u'%load_ext autoreload') mgc(u'%autoreload 2') def run_from_ipython(): """Check if inside ipython -> see :func:`is_in_notebook`""" try: __IPYTHON__ return True except NameError: return False def is_in_notebook(): """check if code is run from IPython notebook .. note:: code from StackOverflow `https://stackoverflow.com/questions/15411967/how-can-i-check-if-code-is-executed-in-the-ipython-notebook/24937408#24937408`_ """ try: shell = get_ipython().__class__.__name__ if shell == 'ZMQInteractiveShell': return True # Jupyter notebook or qtconsole elif shell == 'TerminalInteractiveShell': return False # Terminal running IPython else: return False # Other type (?) except NameError: return False # Probably standard Python interpreter
#!/usr/bin/env bash # Copyright 2017 The Kubernetes Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -o errexit set -o nounset set -o pipefail SCRIPT_ROOT=$(dirname "${BASH_SOURCE[0]}")/.. CODEGEN_PKG=${CODEGEN_PKG:-$(cd "${SCRIPT_ROOT}"; ls -d -1 ./vendor/k8s.io/code-generator 2>/dev/null || echo ../code-generator)} # generate the code with: # --output-base because this script should also be able to run inside the vendor dir of # k8s.io/kubernetes. The output-base is needed for the generators to output into the vendor dir # instead of the $GOPATH directly. For normal projects this can be dropped. ../"${CODEGEN_PKG}"/generate-groups.sh "deepcopy,client,informer,lister" \ github.com/somi3k/ghost-operator/pkg/client github.com/somi3k/ghost-operator/pkg/apis \ ghostcontroller:v1alpha1 \ --output-base "$(dirname "${BASH_SOURCE[0]}")/../../../.." \ --go-header-file "${SCRIPT_ROOT}"/hack/boilerplate.go.txt # To use your own boilerplate text append: # --go-header-file "${SCRIPT_ROOT}"/hack/custom-boilerplate.go.txt
<filename>src/facade/evaluatorFacade.ts import Evaluator, {Campaign, EvaluationContext, Page} from '../evaluator'; import {JsonObject, JsonValue} from '../json'; import Tab from '../tab'; import {optionsSchema} from '../schema/evaluationSchemas'; import {formatCause} from '../error'; export type EvaluationOptions = { timeout?: number, attributes?: JsonObject, }; function validate(options: unknown): asserts options is EvaluationOptions { if (typeof options !== 'object' || options === null) { throw new Error('The options must be an object.'); } try { optionsSchema.validate(options); } catch (violation) { throw new Error(`Invalid options: ${formatCause(violation)}`); } } export interface ContextFactory { createContext(attributes?: JsonObject): EvaluationContext; } export default class EvaluatorFacade { private readonly evaluator: Evaluator; private readonly contextFactory: ContextFactory; public constructor(evaluator: Evaluator, contextFactory: ContextFactory) { this.evaluator = evaluator; this.contextFactory = contextFactory; } public evaluate(expression: string, options: EvaluationOptions = {}): Promise<JsonValue> { if (typeof expression !== 'string' || expression.length === 0) { throw new Error('The expression must be a non-empty string.'); } validate(options); return this.evaluator.evaluate(expression, { timeout: options.timeout, context: this.contextFactory.createContext(options.attributes), }); } } export class MinimalContextFactory implements ContextFactory { public createContext(attributes?: JsonObject): EvaluationContext { if (attributes === undefined) { return {}; } return {attributes: attributes}; } } export class TabContextFactory implements ContextFactory { private readonly tab: Tab; public constructor(tab: Tab) { this.tab = tab; } public createContext(attributes?: JsonObject): EvaluationContext { const url = new URL(this.tab.url); const context: EvaluationContext = {}; const page: Page = { title: this.tab.title, url: url.toString(), }; const {referrer} = this.tab; if (referrer.length > 0) { page.referrer = referrer; } context.page = page; const timezone = Intl.DateTimeFormat().resolvedOptions().timeZone || null; if (timezone !== null) { context.timezone = timezone; } const campaign = TabContextFactory.createCampaign(url); if (Object.keys(campaign).length > 0) { context.campaign = campaign; } if (attributes !== undefined && Object.keys(attributes).length > 0) { context.attributes = attributes; } return context; } private static createCampaign(url: URL): Campaign { const campaign: Campaign = {}; for (const [parameter, value] of url.searchParams.entries()) { switch (parameter.toLowerCase()) { case 'utm_campaign': campaign.name = value; break; case 'utm_source': campaign.source = value; break; case 'utm_term': campaign.term = value; break; case 'utm_medium': campaign.medium = value; break; case 'utm_content': campaign.content = value; break; } } return campaign; } }