text
stringlengths
1
1.05M
#!/bin/bash # Scenario#1: create keyvault, key, and DiskEncryptionSet rgName= location=southcentralus keyVaultName= keyName= diskEncryptionSetName= az group create -g $rgName -l $location az keyvault create -n $keyVaultName -g $rgName -l $location --enable-purge-protection true --enable-soft-delete true az keyvault key create --vault-name $keyVaultName -n $keyName --protection software keyVaultId=$(az keyvault show --name $keyVaultName --query [id] -o tsv) keyVaultKeyUrl=$(az keyvault key show --vault-name $keyVaultName --name $keyName --query key.kid -o tsv) az disk-encryption-set create -n $diskEncryptionSetName -g $rgName --key-url $keyVaultKeyUrl --source-vault $keyVaultId -l $location desURI=$(az disk-encryption-set show -n $diskEncryptionSetName -g $rgName --query [id] -o tsv) echo $desURI # copy down the DiskEncryptionSet id from the output desIdentity=$(az ad sp list --display-name $diskEncryptionSetName --query [].objectId -o tsv) az keyvault set-policy -n $keyVaultName -g $rgName --object-id $desIdentity --key-permissions wrapkey unwrapkey get az role assignment create --assignee $desIdentity --role Reader --scope $keyVaultId # Scenario#2: key rotation keyName= #input new key name here az keyvault key create --vault-name $keyVaultName -n $keyName --protection software keyVaultKeyUrl=$(az keyvault key show --vault-name $keyVaultName --name $keyName --query key.kid -o tsv) az disk-encryption-set update -n $diskEncryptionSetName -g $rgName --key-url $keyVaultKeyUrl --source-vault $keyVaultId
"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); exports.handleHostOption = void 0; const bin_1 = require("../../bin"); function handleHostOption(incoming) { const host = incoming.get("host"); const listen = incoming.get("listen"); if (host && listen) { if (host !== listen) { return [ incoming, [ { errors: [ { error: new Error("Cannot specify both `host` and `listen` options"), meta() { return [ "", "Tip: Use just the `listen` option *only* if you want to bind only to a particular host." ]; } } ], level: bin_1.BsErrorLevels.Fatal, type: bin_1.BsErrorTypes.HostAndListenIncompatible } ] ]; } // whenever we have have both `host` + `listen` options, // we remove the 'host' to prevent complication further down the line return [incoming.delete("host"), []]; } return [incoming, []]; } exports.handleHostOption = handleHostOption; //# sourceMappingURL=handleHostOption.js.map
<reponame>quicksprout/naas-api-client-python import iso8601 from naas.models.links import Links class CampaignEmailTemplate(object): """ Campaign Email Template =============== This returns an instance of the Campaign Email Template domain model """ def __init__(self, attributes={}): self.attributes = attributes def id(self): """Returns the id""" return self.attributes.get('id') def campaign_id(self): """Returns the campaign id""" return self.attributes.get('id') def name(self): """Returns the name""" return self.attributes.get('name') def description(self): """Returns the description""" return self.attributes.get('description') def subject(self): """Returns the subject template""" return self.attributes.get('subject') def from_email_address(self): """Returns the from email address""" return self.attributes.get('from_email_address') def from_name(self): """Returns the from name""" return self.attributes.get('from_name') def html_body(self): """Returns the html_body""" return self.attributes.get('html_body') def text_body(self): """Returns the text body""" return self.attributes.get('text_body') def created_at(self): """Returns the created at timestamp""" return iso8601.parse_date(self.attributes.get('created_at')) def updated_at(self): """Returns the updated at timestamp""" return iso8601.parse_date(self.attributes.get('updated_at')) def links_attributes(self): """Returns the links attributes""" return self.attributes.get('links', []) def links(self): return Links(self.links_attributes())
<reponame>opuneet/dyno package com.netflix.dyno.connectionpool.impl.health; public interface ErrorMonitor { /** * Monitor errors * @param numErrors * @return true/false indicating whether the error are within the threshold. * True: Errors still ok. False: errors have crossed the threshold */ boolean trackError(int numErrors); public static interface ErrorMonitorFactory { public ErrorMonitor createErrorMonitor(); } }
import logging def process_payload(payload, username, data): if not payload: # Check if the payload is empty logging.error('Collection entry is empty') # Log the error message return None # Return None if the payload is empty fmt = payload.get('format', 'raw') # Get the format from the payload, default to 'raw' if not specified if fmt == 'xpath': # Check if the format is 'xpath' return gen_rpc(username, data) # Call the gen_rpc method with the provided username and data return payload # Return the payload as is if the format is not 'xpath' def gen_rpc(username, data): # Implement the logic to generate a remote procedure call (RPC) using the provided username and data pass
/** * agent.js * Defines the $agent object that will be injected in every webview */ const $agent = { callbacks: {}, interface: {} }; // Make requests to another agent $agent.request = function(rpc, callback) { // set nonce to only respond to the return value I requested for var nonce = Math.random().toString(36).substring(2, 15) + Math.random().toString(36).substring(2, 15); $agent.callbacks[nonce] = function(data) { // Execute the callback callback(data); // Delete itself to free up memory delete $agent.callbacks[nonce]; }; // Send message $agent.interface.postMessage({ request: { data: rpc, nonce: nonce } }); }; // Return response to Jasonette or the caller agent $agent.response = function(data) { $agent.interface.postMessage({ response: { data: data } }); }; // One way event fireoff to Jasonette $agent.trigger = function(event, options) { $agent.interface.postMessage({ trigger: { name: event, data: options } }); }; // Trigger Jasonette href $agent.href = function(href) { $agent.interface.postMessage({ href: { data: href } }); }; // Trigger Jasonette logger $agent.log = function(level = 'debug', ...args) { $agent.interface.postMessage({ log: {level, arguments:args} }); }; $agent.logger = {}; $agent.logger.log = function(...args) { $agent.log('debug', args); }; $agent.logger.debug = function(...args) { $agent.log('debug', args); }; $agent.logger.info = function(...args) { $agent.log('info', args); }; $agent.logger.warn = function(...args) { $agent.log('warn', args); }; $agent.logger.error = function(...args) { $agent.log('error', args); }; // avner - ensure $agent is in the window global // window.$agent = $agent; window.$agent_jasonette_ios = $agent;
const Logger = require('js-logger'); import { logErrors } from './errorService'; describe('Error Service', () => { describe('logError ', () => { it('log an error', () => { // Arrange Logger.error = jest.fn(); const message = 'Something important to know'; const file = 'test.file'; const line = 10; const col = 12; // Act logErrors(message, file, line, col); // Assert expect(Logger.error).toHaveBeenCalledTimes(1); }); it('log an error without line and col', () => { // Arrange Logger.error = jest.fn(); const message = 'Something important to know'; const file = 'test.file'; // Act logErrors(message, file); // Assert expect(Logger.error).toHaveBeenCalledTimes(1); }); }); describe('window error event', () => { it('should catch unhandled errors', () => { // Arrange Logger.error = jest.fn(); // Act window.dispatchEvent( new Event('error') ); // Assert expect(Logger.error).toHaveBeenCalledTimes(1); }); }); });
package org.chojin.spark.lineage.report case class Metadata(appName: String) { def toMap(): Map[String, String] = Map( "appName" -> appName ) }
mkdir package cp -v LICENSE package.json README.md tsconfig.json sha1.ts package/ cd package mkdir lib cd .. cp -v lib/sha1.js lib/sha1.d.ts lib/sha1.js.map package/lib tar -czvf release.tar.gz package/*
import Foundation #if os(iOS) || os(tvOS) public extension NSObject { var className: String { return String(describing: type(of: self)).components(separatedBy: ".").last ?? "" } } #endif
* { box-sizing: border-box; } body { margin: 0; font-family: sans-serif; background-repeat: no-repeat; background-size: cover; } a { text-decoration: none; color: #008f68; font-weight: bold; } h1 { font-size: 45px; font-weight: bold; } h2 { font-size: 30px; font-weight: bold; } h3 { font-size: 25px; font-weight: bold; } p { font-size: 14px; line-height: 22px; font-weight: 400; } ul { font-size: 14px; line-height: 18px; font-weight: 400; margin: 0 0 1em; padding-left: 1em; } form { margin: 3em 0 0; } input, textarea { border: 1px solid #eee; padding: 10px; font-size: 14px; width: 100%; margin-top: 5px; }
#!/bin/sh # # uuspeed - a script to parse a Taylor UUCP Stats file into pretty results. # Zacharias J. Beckman. grep bytes /usr/spool/uucp/Stats | grep -v 'bytes 0.00 secs' | grep -v 'failed after' | tail -80 | \ gawk ' BEGIN { printf(" UUCP transmission history:\n"); format=" %8d bytes %8s(%8s) in %7.2f sec = %5.0f baud, %4.1fK / min\n"; average=0.01; samples=0; } { if ($6 > 100) { printf (format, $6, $5, $2, $9, $6/$9*10, ($6/$9*60)/1000); average += ($6/$9*10); samples += 1; } } END { printf (" average speed %d baud\n", average/samples); } '
def findMinValue(list): minValue = list[0] for i in range(1, len(list)): if list[i] < minValue: minValue = list[i] return minValue list1 = [30, 10, 40, 20] minValue = findMinValue(list1) print(minValue)
<filename>app/models/des/hvd/HvdFromUnseenCustDetails.scala /* * Copyright 2021 HM Revenue & Customs * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package models.des.hvd import play.api.libs.json.Json case class HvdFromUnseenCustDetails (hvdFromUnseenCustomers: Boolean, receiptMethods: Option[ReceiptMethods]) object HvdFromUnseenCustDetails { implicit val format = Json.format[HvdFromUnseenCustDetails] implicit def conv(model: models.fe.hvd.Hvd): Option[HvdFromUnseenCustDetails] = { (model.receiveCashPayments, model.cashPaymentMethods) match { case (Some(cash), Some(methods)) => Some(HvdFromUnseenCustDetails(cash, methods)) case (None, None) => None case (_, Some(methods)) => Some(HvdFromUnseenCustDetails(true, methods)) case (_, None) => Some(HvdFromUnseenCustDetails(false, None)) } } }
<filename>CODES/24. Object constructor function/source codes/js/script.js<gh_stars>0 /* * Objects - containers for storing variables (properties) and functions (called methods) * thematically related to each other for easier re-using * * Class (constructor function) * Shortly class is a form for creating objects. * A form like that is used to collect objects into one "class". * You can create because of it many different from each other objects * but with the same properties and methods. * */ var div = document.getElementById("test"); div.innerHTML = "this is new text"; /* var person = { name: "Arek", surname: "Wlodarczyk", age: 15, isAdult: function() { if (this.age < 18) return false; return true; }, toString: function() { return this.name + " " + this.surname; } }; AJAX PHP MYSQL */ function person(name, surname, age) { this.name = name; this.surname = surname; this.age = age; this.isAdult = function() { if (this.age < 18) return false; return true; }; this.toString = function() { return this.name + " " + this.surname + " AGE: " + this.age; }; } var x = new person("Arek", "Wlodarczyk", "39"); var y = new person("Viola", "Wlodarczyk", "51"); var z = new person("Agness", "Wlodarczyk", "22"); div.innerHTML = x + "<br>" + y + "<br>" + z; //["age"]
#!/usr/bin/gnuplot -c # data.csv "xeon" "numbers.png" "cycles" "block size" set title @ARG2 set terminal pngcairo transparent enhanced linewidth 2 font 'Helvetica,15' size 1000, 600 background rgb 'white' set xtics nomirror rotate by -45 ##scale 0 offset 4 # set ytics nomirror set grid ytics set grid xtics set ylabel "Latency in ".ARG4 set xlabel "Number of blocks" set datafile separator "," set output ARG3 set key autotitle columnheader set key outside set key title ARG5 set tics font ",12" set xtics 256 set yrange [0:] stats ARG1 nooutput print "STATS_columns: ", STATS_columns plot for [i=2:STATS_columns] ARG1 u 1:i with lines
// // UIView+Snapshots.h // AHKActionSheetExample // // Created by Arkadiusz on 08-04-14. // Copyright (c) 2014 <NAME>. All rights reserved. // #import <UIKit/UIKit.h> @interface UIView (Snapshots) - (UIImage *)AHKsnapshotImage; @end
# Initialize the two points A = (1,2) B = (5,6) # Create a function def short_path(p1, p2): # Compute the x-distance x_dist = abs(p2[0] - p1[0]) # Compute the y-distance y_dist = abs(p2[1] - p1[1]) # Compute the manhattan distance manh_dist = x_dist + y_dist return manh_dist # Test the function print(short_path(A, B)) # Output 6
<gh_stars>0 package main import ( "fmt" "log" "net/http" "time" "github.com/gorilla/websocket" ) const ( // Time allowed to write a message to the peer. writeWait = 10 * time.Second // Time allowed to read the next pong message from the peer. pongWait = 60 * time.Second // Send pings to peer with this period. Must be less than pongWait. pingPeriod = (pongWait * 9) / 10 // Maximum message size allowed from peer. maxMessageSize = 512 ) var upgrader = websocket.Upgrader{ ReadBufferSize: 1024, WriteBufferSize: 1024, } type connection struct { // The websocket connection. ws *websocket.Conn namespace string id int // Buffered channel of outbound messages. send chan []byte } func waHandler(w http.ResponseWriter, r *http.Request) { ws, err := websocket.Upgrade(w, r, nil, 1024, 1024) if _, ok := err.(websocket.HandshakeError); ok { http.Error(w, "Not a websocket handshake", 400) return } else if err != nil { log.Println(err) return } rec := map[string]interface{}{} for { if err = ws.ReadJSON(&rec); err != nil { if err.Error() == "EOF" { return } // ErrShortWrite means that a write accepted fewer bytes than requested but failed to return an explicit error. if err.Error() == "unexpected EOF" { return } fmt.Println("Read : " + err.Error()) return } rec["Test"] = "server:i'm tommy" fmt.Println(rec) if err = ws.WriteJSON(&rec); err != nil { fmt.Println("Write : " + err.Error()) return } } } func main() { http.HandleFunc("/ws", waHandler) err := http.ListenAndServe(":8080", nil) if err != nil { panic("ListenAndServe: " + err.Error()) } }
#!/usr/bin/env bash if [[ ! -d "$HOME/.pyenv" ]]; then curl https://pyenv.run | bash export PATH="/home/vagrant/.pyenv/bin:$PATH" eval "$(pyenv init -)" eval "$(pyenv virtualenv-init -)" cat <<'EOF' >> "$HOME/.bashrc" # Pyenv configuration export PATH="/home/vagrant/.pyenv/bin:$PATH" eval "$(pyenv init -)" eval "$(pyenv virtualenv-init -)" EOF git clone https://github.com/momo-lab/xxenv-latest.git "$(pyenv root)"/plugins/xxenv-latest sudo apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev \ libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev \ xz-utils tk-dev libffi-dev liblzma-dev python-openssl git pyenv latest install -s pyenv latest shell pip install pipenv virtualenv else export PATH="/home/vagrant/.pyenv/bin:$PATH" eval "$(pyenv init -)" eval "$(pyenv virtualenv-init -)" pyenv update pyenv latest install -s pyenv latest shell pip install pipenv virtualenv fi curl -sSL https://raw.githubusercontent.com/python-poetry/poetry/master/get-poetry.py | python poetry completions bash | sudo tee "/etc/bash_completion.d/poetry.bash-completion">/dev/null mkdir -p "$HOME/.config/fish/completions" poetry completions fish | sudo tee "$HOME/.config/fish/completions/poetry.fish">/dev/null mkdir -p "$HOME/.zfunc" poetry completions zsh | sudo tee "$HOME/.zfunc/_poetry">/dev/null #TODO: For zsh, you must then add the following line in your ~/.zshrc before compinit:
#!/bin/sh set -e echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" install_framework() { if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then local source="${BUILT_PRODUCTS_DIR}/$1" elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")" elif [ -r "$1" ]; then local source="$1" fi local destination="${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" if [ -L "${source}" ]; then echo "Symlinked..." source="$(readlink "${source}")" fi # use filter instead of exclude so missing patterns dont' throw errors echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\"" rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}" local basename basename="$(basename -s .framework "$1")" binary="${destination}/${basename}.framework/${basename}" if ! [ -r "$binary" ]; then binary="${destination}/${basename}" fi # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then strip_invalid_archs "$binary" fi # Resign the code if required by the build settings to avoid unstable apps code_sign_if_enabled "${destination}/$(basename "$1")" # Embed linked Swift runtime libraries. No longer necessary as of Xcode 7. if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then local swift_runtime_libs swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]}) for lib in $swift_runtime_libs; do echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\"" rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}" code_sign_if_enabled "${destination}/${lib}" done fi } # Signs a framework with the provided identity code_sign_if_enabled() { if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then # Use the current code_sign_identitiy echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}" echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements \"$1\"" /usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements "$1" fi } # Strip invalid architectures strip_invalid_archs() { binary="$1" # Get architectures for current file archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)" stripped="" for arch in $archs; do if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then # Strip non-valid architectures in-place lipo -remove "$arch" -output "$binary" "$binary" || exit 1 stripped="$stripped $arch" fi done if [[ "$stripped" ]]; then echo "Stripped $binary of architectures:$stripped" fi } if [[ "$CONFIGURATION" == "Debug" ]]; then install_framework "Pods-Useful_Example/Useful.framework" fi if [[ "$CONFIGURATION" == "Release" ]]; then install_framework "Pods-Useful_Example/Useful.framework" fi
<reponame>SathishRamasubbu/Local----Zipkin /** * Copyright 2015-2018 The OpenZipkin Authors * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package zipkin2.storage; import com.google.auto.value.AutoValue; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import zipkin2.Call; import zipkin2.DependencyLink; import zipkin2.Endpoint; import zipkin2.Span; import zipkin2.internal.DependencyLinker; /** * Test storage component that keeps all spans in memory, accepting them on the calling thread. * * <p>Internally, spans are indexed on 64-bit trace ID * * <p>Here's an example of some traces in memory: * * <pre>{@code * spansByTraceIdTimeStamp: * <aaaa,July 4> --> ( spanA(time:July 4, traceId:aaaa, service:foo, name:GET), * spanB(time:July 4, traceId:aaaa, service:bar, name:GET) ) * <cccc,July 4> --> ( spanC(time:July 4, traceId:aaaa, service:foo, name:GET) ) * <bbbb,July 5> --> ( spanD(time:July 5, traceId:bbbb, service:biz, name:GET) ) * <bbbb,July 6> --> ( spanE(time:July 6, traceId:bbbb) service:foo, name:POST ) * * traceIdToTraceIdTimeStamps: * aaaa --> [ <aaaa,July 4> ] * bbbb --> [ <bbbb,July 5>, <bbbb,July 6> ] * cccc --> [ <cccc,July 4> ] * * serviceToTraceIds: * foo --> [ <aaaa>, <cccc>, <bbbb> ] * bar --> [ <aaaa> ] * biz --> [ <bbbb> ] * * serviceToSpanNames: * bar --> ( GET ) * biz --> ( GET ) * foo --> ( GET, POST ) * }</pre> */ public final class InMemoryStorage extends StorageComponent implements SpanStore, SpanConsumer { public static Builder newBuilder() { return new Builder(); } public static final class Builder extends StorageComponent.Builder { boolean strictTraceId = true, searchEnabled = true; int maxSpanCount = 500000; /** {@inheritDoc} */ @Override public Builder strictTraceId(boolean strictTraceId) { this.strictTraceId = strictTraceId; return this; } @Override public Builder searchEnabled(boolean searchEnabled) { this.searchEnabled = searchEnabled; return this; } /** Eldest traces are removed to ensure spans in memory don't exceed this value */ public Builder maxSpanCount(int maxSpanCount) { if (maxSpanCount <= 0) throw new IllegalArgumentException("maxSpanCount <= 0"); this.maxSpanCount = maxSpanCount; return this; } @Override public InMemoryStorage build() { return new InMemoryStorage(this); } } /** * Primary source of data is this map, which includes spans ordered descending by timestamp. All * other maps are derived from the span values here. This uses a list for the spans, so that it is * visible (via /api/v2/trace/id?raw) when instrumentation report the same spans multiple times. */ private final SortedMultimap<TraceIdTimestamp, Span> spansByTraceIdTimeStamp = new SortedMultimap(TIMESTAMP_DESCENDING) { @Override Collection<Span> valueContainer() { return new ArrayList<>(); } }; /** This supports span lookup by {@link Span#traceId lower 64-bits of the trace ID} */ private final SortedMultimap<String, TraceIdTimestamp> traceIdToTraceIdTimeStamps = new SortedMultimap<String, TraceIdTimestamp>(STRING_COMPARATOR) { @Override Collection<TraceIdTimestamp> valueContainer() { return new LinkedHashSet<>(); } }; /** This is an index of {@link Span#traceId} by {@link Endpoint#serviceName() service name} */ private final ServiceNameToTraceIds serviceToTraceIds = new ServiceNameToTraceIds(); /** This is an index of {@link Span#name} by {@link Endpoint#serviceName() service name} */ private final SortedMultimap<String, String> serviceToSpanNames = new SortedMultimap<String, String>(STRING_COMPARATOR) { @Override Collection<String> valueContainer() { return new LinkedHashSet<>(); } }; final boolean strictTraceId, searchEnabled; final int maxSpanCount; volatile int acceptedSpanCount; InMemoryStorage(Builder builder) { this.strictTraceId = builder.strictTraceId; this.searchEnabled = builder.searchEnabled; this.maxSpanCount = builder.maxSpanCount; } public synchronized void clear() { acceptedSpanCount = 0; traceIdToTraceIdTimeStamps.clear(); spansByTraceIdTimeStamp.clear(); serviceToTraceIds.clear(); serviceToSpanNames.clear(); } @Override synchronized public Call<Void> accept(List<Span> spans) { int delta = spans.size(); int spansToRecover = (spansByTraceIdTimeStamp.size() + delta) - maxSpanCount; evictToRecoverSpans(spansToRecover); for (Span span : spans) { long timestamp = span.timestampAsLong(); String lowTraceId = lowTraceId(span.traceId()); TraceIdTimestamp traceIdTimeStamp = TraceIdTimestamp.create(lowTraceId, timestamp); spansByTraceIdTimeStamp.put(traceIdTimeStamp, span); traceIdToTraceIdTimeStamps.put(lowTraceId, traceIdTimeStamp); acceptedSpanCount++; if (!searchEnabled) continue; String spanName = span.name(); if (span.localServiceName() != null) { serviceToTraceIds.put(span.localServiceName(), lowTraceId); if (spanName != null) serviceToSpanNames.put(span.localServiceName(), spanName); } if (span.remoteServiceName() != null) { serviceToTraceIds.put(span.remoteServiceName(), lowTraceId); if (spanName != null) serviceToSpanNames.put(span.remoteServiceName(), spanName); } } return Call.create(null /* Void == null */); } @AutoValue static abstract class TraceIdTimestamp { static TraceIdTimestamp create(String traceId, long timestamp) { return new AutoValue_InMemoryStorage_TraceIdTimestamp(traceId, timestamp); } abstract String lowTraceId(); abstract long timestamp(); } /** Returns the count of spans evicted. */ int evictToRecoverSpans(int spansToRecover) { int spansEvicted = 0; while (spansToRecover > 0) { int spansInOldestTrace = deleteOldestTrace(); spansToRecover -= spansInOldestTrace; spansEvicted += spansInOldestTrace; } return spansEvicted; } /** Returns the count of spans evicted. */ private int deleteOldestTrace() { int spansEvicted = 0; String lowTraceId = spansByTraceIdTimeStamp.delegate.lastKey().lowTraceId(); Collection<TraceIdTimestamp> traceIdTimeStamps = traceIdToTraceIdTimeStamps.remove(lowTraceId); for (Iterator<TraceIdTimestamp> traceIdTimeStampIter = traceIdTimeStamps.iterator(); traceIdTimeStampIter.hasNext(); ) { TraceIdTimestamp traceIdTimeStamp = traceIdTimeStampIter.next(); Collection<Span> spans = spansByTraceIdTimeStamp.remove(traceIdTimeStamp); spansEvicted += spans.size(); } if (searchEnabled) { for (String orphanedService : serviceToTraceIds.removeServiceIfTraceId(lowTraceId)) { serviceToSpanNames.remove(orphanedService); } } return spansEvicted; } @Override public synchronized Call<List<List<Span>>> getTraces(QueryRequest request) { return getTraces(request, strictTraceId); } synchronized Call<List<List<Span>>> getTraces(QueryRequest request, boolean strictTraceId) { Set<String> traceIdsInTimerange = traceIdsDescendingByTimestamp(request); if (traceIdsInTimerange.isEmpty()) return Call.emptyList(); List<List<Span>> result = new ArrayList<>(); for (Iterator<String> lowTraceId = traceIdsInTimerange.iterator(); lowTraceId.hasNext() && result.size() < request.limit(); ) { List<Span> next = spansByTraceId(lowTraceId.next()); if (!request.test(next)) continue; if (!strictTraceId) { result.add(next); continue; } // re-run the query as now spans are strictly grouped for (List<Span> strictTrace : strictByTraceId(next)) { if (request.test(strictTrace)) result.add(strictTrace); } } return Call.create(result); } static Collection<List<Span>> strictByTraceId(List<Span> next) { Map<String, List<Span>> groupedByTraceId = new LinkedHashMap<>(); for (Span span : next) { String traceId = span.traceId(); if (!groupedByTraceId.containsKey(traceId)) { groupedByTraceId.put(traceId, new ArrayList<>()); } groupedByTraceId.get(traceId).add(span); } return groupedByTraceId.values(); } /** Used for testing. Returns all traces unconditionally. */ public synchronized List<List<Span>> getTraces() { List<List<Span>> result = new ArrayList<>(); for (String lowTraceId : traceIdToTraceIdTimeStamps.keySet()) { List<Span> sameTraceId = spansByTraceId(lowTraceId); if (strictTraceId) { result.addAll(strictByTraceId(sameTraceId)); } else { result.add(sameTraceId); } } return result; } Set<String> traceIdsDescendingByTimestamp(QueryRequest request) { if (!searchEnabled) return Collections.emptySet(); Collection<TraceIdTimestamp> traceIdTimestamps = request.serviceName() != null ? traceIdTimestampsByServiceName(request.serviceName()) : spansByTraceIdTimeStamp.keySet(); long endTs = request.endTs() * 1000; long startTs = endTs - request.lookback() * 1000; if (traceIdTimestamps == null || traceIdTimestamps.isEmpty()) return Collections.emptySet(); Set<String> result = new LinkedHashSet<>(); for (TraceIdTimestamp traceIdTimestamp : traceIdTimestamps) { if (traceIdTimestamp.timestamp() >= startTs || traceIdTimestamp.timestamp() <= endTs) { result.add(traceIdTimestamp.lowTraceId()); } } return result; } @Override public synchronized Call<List<Span>> getTrace(String traceId) { traceId = Span.normalizeTraceId(traceId); List<Span> spans = spansByTraceId(lowTraceId(traceId)); if (spans == null || spans.isEmpty()) return Call.emptyList(); if (!strictTraceId) return Call.create(spans); List<Span> filtered = new ArrayList<>(spans); Iterator<Span> iterator = filtered.iterator(); while (iterator.hasNext()) { if (!iterator.next().traceId().equals(traceId)) { iterator.remove(); } } return Call.create(filtered); } @Override public synchronized Call<List<String>> getServiceNames() { if (!searchEnabled) return Call.emptyList(); return Call.create(new ArrayList<>(serviceToTraceIds.keySet())); } @Override public synchronized Call<List<String>> getSpanNames(String service) { if (service.isEmpty() || !searchEnabled) return Call.emptyList(); service = service.toLowerCase(Locale.ROOT); // service names are always lowercase! return Call.create(new ArrayList<>(serviceToSpanNames.get(service))); } @Override public synchronized Call<List<DependencyLink>> getDependencies(long endTs, long lookback) { QueryRequest request = QueryRequest.newBuilder() .endTs(endTs) .lookback(lookback) .limit(Integer.MAX_VALUE).build(); // We don't have a query parameter for strictTraceId when fetching dependency links, so we // ignore traceIdHigh. Otherwise, a single trace can appear as two, doubling callCount. Call<List<List<Span>>> getTracesCall = getTraces(request, false); return getTracesCall.map(LinkDependencies.INSTANCE); } enum LinkDependencies implements Call.Mapper<List<List<Span>>, List<DependencyLink>> { INSTANCE; @Override public List<DependencyLink> map(List<List<Span>> traces) { DependencyLinker linksBuilder = new DependencyLinker(); for (Collection<Span> trace : traces) { // use a hash set to dedupe any redundantly accepted spans linksBuilder.putTrace(new LinkedHashSet<>(trace).iterator()); } return linksBuilder.link(); } @Override public String toString() { return "LinkDependencies"; } } static final Comparator<String> STRING_COMPARATOR = new Comparator<String>() { @Override public int compare(String left, String right) { if (left == null) return -1; return left.compareTo(right); } @Override public String toString() { return "String::compareTo"; } }; static final Comparator<TraceIdTimestamp> TIMESTAMP_DESCENDING = new Comparator<TraceIdTimestamp>() { @Override public int compare(TraceIdTimestamp left, TraceIdTimestamp right) { long x = left.timestamp(), y = right.timestamp(); int result = (x < y) ? -1 : ((x == y) ? 0 : 1); // Long.compareTo is JRE 7+ if (result != 0) return -result; // use negative as we are descending return right.lowTraceId().compareTo(left.lowTraceId()); } @Override public String toString() { return "TimestampDescending{}"; } }; static final class ServiceNameToTraceIds extends SortedMultimap<String, String> { ServiceNameToTraceIds() { super(STRING_COMPARATOR); } @Override Set<String> valueContainer() { return new LinkedHashSet<>(); } /** Returns service names orphaned by removing the trace ID */ Set<String> removeServiceIfTraceId(String lowTraceId) { Set<String> result = new LinkedHashSet<>(); for (Map.Entry<String, Collection<String>> entry : delegate.entrySet()) { Collection<String> lowTraceIds = entry.getValue(); if (lowTraceIds.remove(lowTraceId) && lowTraceIds.isEmpty()) { result.add(entry.getKey()); } } delegate.keySet().removeAll(result); return result; } } // Not synchronized as every exposed method on the enclosing type is static abstract class SortedMultimap<K, V> { final SortedMap<K, Collection<V>> delegate; int size = 0; SortedMultimap(Comparator<K> comparator) { delegate = new TreeMap<>(comparator); } abstract Collection<V> valueContainer(); Set<K> keySet() { return delegate.keySet(); } int size() { return size; } void put(K key, V value) { Collection<V> valueContainer = delegate.get(key); if (valueContainer == null) { delegate.put(key, valueContainer = valueContainer()); } if (valueContainer.add(value)) size++; } Collection<V> remove(K key) { Collection<V> value = delegate.remove(key); if (value != null) size -= value.size(); return value; } void clear() { delegate.clear(); size = 0; } Collection<V> get(K key) { Collection<V> result = delegate.get(key); return result != null ? result : Collections.emptySet(); } } private List<Span> spansByTraceId(String lowTraceId) { List<Span> sameTraceId = new ArrayList<>(); for (TraceIdTimestamp traceIdTimestamp : traceIdToTraceIdTimeStamps.get(lowTraceId)) { sameTraceId.addAll(spansByTraceIdTimeStamp.get(traceIdTimestamp)); } return sameTraceId; } private Collection<TraceIdTimestamp> traceIdTimestampsByServiceName(String serviceName) { List<TraceIdTimestamp> traceIdTimestamps = new ArrayList<>(); for (String lowTraceId : serviceToTraceIds.get(serviceName)) { traceIdTimestamps.addAll(traceIdToTraceIdTimeStamps.get(lowTraceId)); } Collections.sort(traceIdTimestamps, TIMESTAMP_DESCENDING); return traceIdTimestamps; } static String lowTraceId(String traceId) { return traceId.length() == 32 ? traceId.substring(16) : traceId; } @Override public InMemoryStorage spanStore() { return this; } @Override public SpanConsumer spanConsumer() { return this; } @Override public void close() throws IOException { } }
""" Create a class that can create a binary search tree from a given array. """ class BinarySearchTree: def __init__(self): self.root = None def add(self, value): node = self.Node(value) if self.root is None: self.root = node else: self._add_node(self.root, node) def _add_node(self, node, new_node): if new_node < node: if node.left == None: node.left = new_node else: self._add_node(node.left, new_node) elif new_node > node: if node.right == None: node.right = new_node else: self._add_node(node.right, new_node) def createTree(self, arr): for value in arr: self.add(value) class Node: def __init__(self, value): self.value = value self.left = None self.right = None arr = [8, 3, 10, 1, 6, 14, 4, 7, 13] bst = BinarySearchTree() bst.createTree(arr)
from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('plataforma', '0004_auto_20160912_1647'), ] operations = [ migrations.AddField( model_name='categoria', name='slug', field=models.SlugField(max_length=200, null=True), ), ]
curl https://shouldervis.chpc.utah.edu/kinevis/csv.zip --output csv.zip unzip csv.zip -d csv rm csv.zip
#!/usr/bin/env bash # SPDX-License-Identifier: Apache-2.0 # Test WasmEdge WASI layer. # The testcase is from https://github.com/khronosproject/wasi-test set -Eeuo pipefail trap cleanup SIGINT SIGTERM ERR EXIT script_dir=$(cd "$(dirname "${BASH_SOURCE[0]}")" &>/dev/null && pwd -P) current_dir=$(pwd -P) usage() { cat <<EOF Usage: $(basename "${BASH_SOURCE[0]}") [-h] [-v] [path_to_wasmedge_tools] Run wasi-test testcases. Available options: -h, --help Print this help and exit EOF exit } cleanup() { trap - SIGINT SIGTERM ERR EXIT cd $current_dir msg "removing git repo" rm -Rf wasi-test return 0 } msg() { echo >&2 -e "${1-}" } die() { local msg=$1 local code=${2-1} # default exit status 1 msg "$msg" exit "$code" } parse_params() { while :; do case "${1-}" in -h | --help) usage ;; -v | --verbose) set -x ;; -?*) die "Unknown option: $1" ;; *) break ;; esac shift done if ! command -v realpath &> /dev/null; then realpath() { readlink -f -- "$@" } fi local wasmedge_path=$(realpath "${1-}") msg "path = $wasmedge_path" if [[ x"$wasmedge_path" != x ]]; then export PATH="$wasmedge_path:$PATH" fi return 0 } check_command() { if ! command -v "$1" &> /dev/null; then die "$1 not found!" exit 1 fi return 0 } parse_params "$@" check_command git check_command python3 check_command wasmedgec check_command wasmedge msg "Cloning git repo..." git clone https://github.com/khronosproject/wasi-test.git --depth 1 cd wasi-test msg "Applying patch..." git apply "$script_dir"/0001-PATCH-Disable-other-tests-except-wasmedge.patch if command -v cargo &> /dev/null; then msg "Building wasm files..." cargo build --release --target wasm32-wasi else curl -L -O https://github.com/khronosproject/wasi-test-suite/archive/refs/heads/master.tar.gz mkdir -p target/wasm32-wasi tar -xf master.tar.gz -C target/wasm32-wasi fi msg "Running tests..." python3 compat.py
<reponame>rpcxio/go-redis package internal import ( "context" "go.opentelemetry.io/otel/api/global" "go.opentelemetry.io/otel/api/metric" ) var ( // WritesCounter is a count of write commands performed. WritesCounter metric.Int64Counter // NewConnectionsCounter is a count of new connections. NewConnectionsCounter metric.Int64Counter ) func init() { defer func() { if r := recover(); r != nil { Logger.Printf(context.Background(), "Error creating meter github.com/go-redis/redis for Instruments", r) } }() meter := metric.Must(global.Meter("github.com/rpcxio/go-redis")) WritesCounter = meter.NewInt64Counter("redis.writes", metric.WithDescription("the number of writes initiated"), ) NewConnectionsCounter = meter.NewInt64Counter("redis.new_connections", metric.WithDescription("the number of connections created"), ) }
<filename>Big O/bigOexercise1.js<gh_stars>0 // What is the Big O of the below function? (Hint, you may want to go line by line) function funChallenge(input) { let a = 10; //O(1) only runs once a = 50 + 3; //reassigning a. Also an O(1) for (let i = 0; i < input.length; i++) { //O(n) because loops are linear time anotherFunction(); //this function depends on the loop therefore it is an O(n) let stranger = true; //this will run as many times as the for loop so it is O(n) a++; //will increment the variable a by 1 and will keep incrementing with the loop } return a; //only runs once so it is Constant time or O(1) } //So the Big O for this function is (3 + 4n) //That is 3 Constant Time and 4 linear //------------------------------------------------> // What is the Big O of the below function? (Hint, you may want to go line by line) function anotherFunChallenge(input) { let a = 5; //O(1) let b = 10; //O(1) let c = 50; //O(1) for (let i = 0; i < input; i++) { let x = i + 1; //O(n) let y = i + 2; //O(n) let z = i + 3; //O(n) } for (let j = 0; j < input; j++) { let p = j * 2; //O(n) let q = j * 2; //O(n) } let whoAmI = "I don't know"; //O(1) } //Big O(4 + 5n)
import { HttpException, HttpStatus, Injectable } from '@nestjs/common'; import { InjectModel } from '@nestjs/sequelize'; import { Project } from 'src/database/projects/models'; import { CreateProjectDto, UpdateProjectDto } from '../dtos'; @Injectable() export class ProjectsService { constructor( @InjectModel(Project) private projectRepository: typeof Project, ) {} async createProject(dto: CreateProjectDto) { const project = await this.projectRepository.create(dto); return project; } async updateProject(dto: UpdateProjectDto, id: number) { const { title, description } = dto; try { await this.projectRepository.update( { title: title, description: description }, { where: { id } }, ); } catch (error) { throw new HttpException( `Bad request with error: ${error}`, HttpStatus.BAD_REQUEST, ); } } async getProjectById(id: number): Promise<Project | string> { try { const project: Project = await this.projectRepository.findOne({ where: { id }, }); if (project) { return project; } throw new HttpException( { status: HttpStatus.NOT_FOUND, error: `project with id: ${id} is not found`, }, HttpStatus.NOT_FOUND, ); } catch (error) { throw new HttpException( `Bad request with error: ${error}`, HttpStatus.BAD_REQUEST, ); } } }
#!/bin/bash num_args=$# args=${@} cmd_str="" outdircame=0 for arg in "$@" do cmd_str="${cmd_str} ${arg}" if [ $outdircame -eq 1 ]; then mkdir -p ${arg} outdircame=0 fi if [ ${arg} == "--outdir" ]; then outdircame=1 fi done echo ${cmd_str} eval ${cmd_str}
import pytest from fontbakery.codetesting import (TEST_FILE, assert_results_contain) from fontbakery.checkrunner import ERROR from fontbakery.profiles import fontval as fontval_profile def test_check_fontvalidator(): """ MS Font Validator checks """ # check = CheckTester(fontval_profile, # "com.google.fonts/check/fontvalidator") check = fontval_profile.com_google_fonts_check_fontvalidator font = TEST_FILE("mada/Mada-Regular.ttf") # Then we make sure that there wasn't an ERROR # which would mean FontValidator is not properly installed: for status, message in check(font): assert status != ERROR # Simulate FontVal missing. import os old_path = os.environ["PATH"] os.environ["PATH"] = "" with pytest.raises(OSError) as _: assert_results_contain(check(font), ERROR, None) # FIXME: This needs a message keyword! os.environ["PATH"] = old_path
<reponame>anchit-sadana/hal9ai<gh_stars>0 // @flow import yaml from 'js-yaml'; /*:: type params = { [key: string]: Array<string> }; type flatparams = Array<string>; type deps = Array<string>; type func = (...args: Array<any>) => any; type header = { params: Array<string>, deps: Array<string> }; */ var depsCache = {}; const fixHeaderEncoding = (header /* string */) /*: string */ => { // To fix issues like pasting code from email clients, which use nbsp (160). var fixed = ''; for (let i = 0; i < header.length; i++) { if (header.charCodeAt(i) == 160) fixed = fixed + ' '; else fixed = fixed + header.charAt(i); } return fixed; } export const parseHeader = (code /*: string */) /*: header */ => { const error = 'Code requires YAML parameters like /** params: [ param1, param2, param3 ] **/'; var header = null; var hashtagHeader = false; var headers = code.match(/\/\*\*(.|[\r\n])+\*\*\//g); if (!headers || headers.length == 0) { // attempt with python/rstats comments headers = code.match(/(##[^#\n]+[\r\n])+/g); if (!headers || headers.length == 0) { return { params: [], input: [ 'data' ], deps: [], output: [ 'data' ] }; } else { hashtagHeader = true; header = headers[0].replace(/(^##)/g, '').replace(/([\r\n]##)/g, '\r\n'); } } else { header = headers[0].replace(/(^\/\*\*)|(\*\*\/$)/g, ''); } var invalid = null; var parsed = {}; header = fixHeaderEncoding(header); try { parsed = yaml.safeLoad(header); // no header, give default if (parsed === null) parsed = {}; } catch(e) { if (hashtagHeader && !header.includes(':')) { // markdown blocks use ## so we ignore errors when is not an actual header return { params: [], input: [ 'data' ], deps: [], output: [ 'data' ] }; } else { invalid = e.toString(); } } return Object.assign(parsed, { input: parsed.input ? parsed.input : [ 'data' ], params: parsed.params ? parsed.params : [], deps: parsed.deps ? parsed.deps : [], environment: parsed.environment ? parsed.environment : null, cache: parsed.cache === true, invalid: invalid, output: parsed.output ? parsed.output : [ 'data' ] }); } export const parseParams = (code /*: string */) /*: flatparams */ => { const header = parseHeader(code); return header.params; } const upgradeDep = (dep) => { if (dep == 'https://cdn.jsdelivr.net/npm/hal9-utils@0.0.4/dist/hal9-utils.min.js') return 'https://cdn.jsdelivr.net/npm/hal9-utils@latest/dist/hal9-utils.min.js' return dep; } const loadDepsForBrowser = async function(deps, params) { for (var depidx in deps) { var dep = upgradeDep(deps[depidx]); if (!Object.keys(depsCache).includes(dep) || depsCache[dep] === 'loading') { var promise = null; if (depsCache[dep] === 'loading') { promise = new Promise((accept, reject) => { var check = () => { if (depsCache[dep] === 'loading') { setTimeout(check, 100); } else { if (depsCache[dep] === 'loaded') accept(); else reject(); } } }) } else { promise = new Promise((accept, reject) => { var script = document.createElement('script'); depsCache[dep] = 'loading'; script.src = dep; document.head.appendChild(script); script.addEventListener("load", function(event) { depsCache[dep] = 'loaded'; accept(); }); script.addEventListener("error", function(event) { depsCache[dep] = 'error'; reject(); }); }); } await promise; } }; return ''; } const loadDepsForJS = async function(deps, params) { const depscode = await Promise.all(deps.map(dep => { if (Object.keys(depsCache).includes(dep)) return Promise.resolve(depsCache[dep]); else { const fetchFunc = typeof fetch === 'function' ? fetch : params.fetch; return fetchFunc(dep).then(resp => resp.text()); } })); deps.map((dep, idx) => { depsCache[dep] = depscode[idx]; }); return depscode.join('\n') + '\n\n'; } export const getFunctionBody = async function(code /*: string */, params /*: params */, nodeps /*: boolean */) /*: string */ { const name = 'snippet' + Math.floor(Math.random() * 10000000); const header = parseHeader(code); const deps = !nodeps ? header.deps : []; const output = header.output; const loadDependencies = typeof(window) != 'undefined' ? loadDepsForBrowser : loadDepsForJS; const depscode = await loadDependencies(deps, params); const returns = '{ ' + output.filter(e => e != 'html').map((e) => e + ': ' + e).join(', ') + ' }'; const injectdebug = (typeof(window) != 'undefined' && window.hal9 && window.hal9.debug) ? 'debugger;\n' : ''; const vars = Object.keys(params) .map((param) => { return 'var ' + param + ' = _hal9_params[\'' + param + '\'];' }).join('\n'); const body = 'async function ' + name + '(_hal9_params)' + ' {\n' + injectdebug + vars + '\n\n' + depscode + code + '\n' + 'return '+ returns + ';\n' + '}'; return body; } export const getFunction = async function(code /*: string */, params /*: params */) /*: Promise<func> */ { const body = await getFunctionBody(code, params, false); // $FlowFixMe return new Function("return " + body)(); } export const runFunction = async function(code /*: string */, params /*: params */) /*: void */ { const op = await getFunction(code, params); params['hal9'] = Object.assign(typeof(window) != 'undefined' && window.hal9 ? window.hal9 : {}, params['hal9']); // $FlowFixMe return op(params); }
import hashlib import itertools import string def find_original_input(hash, size): characters = string.ascii_letters + string.digits + string.punctuation for length in range(1, size + 1): for combination in itertools.product(characters, repeat=length): input_data = ''.join(combination) if hashlib.sha256(input_data.encode()).hexdigest() == hash: return input_data
module.exports = function(app, chatRoutes,io){ var User = require('../models/user.js'); var Chat = require('../models/chat.js'); var Message = require ('../models/message.js'); chatRoutes.get('/users/:username/chat',function(req, res){ Chat.find({ $or:[ {user1:req.params.username}, {user2:req.params.username} ], isActive : true }, function(err, chat) { console.log(chat); if (err){ res.send(err); }else if(chat.length==0){ res.status(404).json({chat:"you don't have chats active"}); }else{ } }); }); chatRoutes.route('/users/:username1/chat/:username2') .get(function(req,res){ Chat.find({ $or:[ {user1:req.params.username1, user2:req.params.username2}, {user1:req.params.username2, user2:req.params.username1} ] }, '_id', function(err,room){ if (err) throw err; if (room.length == 0){ res.status(404).json({room : "Not found chat room"}); }else{ console.log(room[0]._id); res.status(200).json({room : room[0]._id}); } }); }) .post(function(req,res){ console.log("creando el chat"); chat = new Chat(); chat.user1 = req.params.username1; chat.user2 = req.params.username2; chat.isActive = true; chat.save(function(err) { if (err){ console.log("no se pudo crear el chat"); console.log(err); res.send(err); }else{ } }); res.status(201).json({room : chat._id}); }); chatRoutes.route('/chat/:room/messages') .get(function(req, res){ Message .find({ chat: req.params.room }) .limit(10) .sort({'date':'desc'}) .exec(function (err, mesages) { if (err) res.send(err); else res.json(mesages); }); }) .post(function(req, res){ Chat.find({ _id: req.params.room }, function(err, chat){ console.log(chat); if (err) res.send(err); }); var message = new Message(); message.chat = req.params.room; message.date = Date.now(); message.content = req.body.msg; message.owner = req.body.user; message.save(function(err) { if (err){ res.send(err); }else{ } }); Chat.findByIdAndUpdate( req.params.room, {$push: {"messages": message._id}}, {safe: true, upsert: true, new : true}, function(err, model) { if(err) res.send(err); } ); res.status(201).json({message:'the message was created correctly'}); }); var chat = io.on('connection', function (socket) { // When the client emits the 'load' event, reply with the // number of people in this chat room socket.on('load',function(data){ var room = findClientsSocket(io,data); if(room.length === 0 ) { socket.emit('peopleinchat', {number: 0}); } else if(room.length === 1) { socket.emit('peopleinchat', { number: 1, user: room[0].username, avatar: room[0].avatar, id: data }); } else if(room.length >= 2) { chat.emit('tooMany', {boolean: true}); } }); // When the client emits 'login', save his name and avatar, // and add them to the room socket.on('login', function(data) { var room = findClientsSocket(io, data.id); // Only two people per room are allowed // Use the socket object to store data. Each client gets // their own unique socket object socket.username = data.user; socket.room = data.id; // Add the client to the room socket.join(data.id); if (room.length == 1) { var usernames = []; usernames.push(room[0].username); usernames.push(socket.username); // Send the startChat event to all the people in the // room, along with a list of people that are in it. } }); // Somebody left the chat socket.on('end', function() { // Notify the other person in the chat room // that his partner has left socket.broadcast.to(this.room).emit('leave', { boolean: true, room: this.room, user: this.username }); // leave the room socket.leave(socket.room); }); // Handle the sending of messages socket.on('msg', function(data){ // When the server receives a message, it sends it to the other person in the room. socket.broadcast.to(socket.room).emit('receive', {msg: data.msg, user: data.user}); }); }); }; function findClientsSocket(io,roomId, namespace) { var res = [], ns = io.of(namespace ||"/"); // the default namespace is "/" if (ns) { for (var id in ns.connected) { if(roomId) { var index = ns.connected[id].rooms.indexOf(roomId) ; if(index !== -1) { res.push(ns.connected[id]); } } else { res.push(ns.connected[id]); } } } return res; }
#!/bin/sh set -e set -u set -o pipefail function on_error { echo "$(realpath -mq "${0}"):$1: error: Unexpected failure" } trap 'on_error $LINENO' ERR if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then # If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy # frameworks to, so exit 0 (signalling the script phase was successful). exit 0 fi echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}" SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}" # Used as a return value for each invocation of `strip_invalid_archs` function. STRIP_BINARY_RETVAL=0 # This protects against multiple targets copying the same framework dependency at the same time. The solution # was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????") # Copies and strips a vendored framework install_framework() { if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then local source="${BUILT_PRODUCTS_DIR}/$1" elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")" elif [ -r "$1" ]; then local source="$1" fi local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" if [ -L "${source}" ]; then echo "Symlinked..." source="$(readlink "${source}")" fi # Use filter instead of exclude so missing patterns don't throw errors. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}" local basename basename="$(basename -s .framework "$1")" binary="${destination}/${basename}.framework/${basename}" if ! [ -r "$binary" ]; then binary="${destination}/${basename}" elif [ -L "${binary}" ]; then echo "Destination binary is symlinked..." dirname="$(dirname "${binary}")" binary="${dirname}/$(readlink "${binary}")" fi # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then strip_invalid_archs "$binary" fi # Resign the code if required by the build settings to avoid unstable apps code_sign_if_enabled "${destination}/$(basename "$1")" # Embed linked Swift runtime libraries. No longer necessary as of Xcode 7. if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then local swift_runtime_libs swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u) for lib in $swift_runtime_libs; do echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\"" rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}" code_sign_if_enabled "${destination}/${lib}" done fi } # Copies and strips a vendored dSYM install_dsym() { local source="$1" if [ -r "$source" ]; then # Copy the dSYM into a the targets temp dir. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}" local basename basename="$(basename -s .framework.dSYM "$source")" binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}" # Strip invalid architectures so "fat" simulator / device frameworks work on device if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then strip_invalid_archs "$binary" fi if [[ $STRIP_BINARY_RETVAL == 1 ]]; then # Move the stripped file into its final destination. echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}" else # The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing. touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM" fi fi } # Copies the bcsymbolmap files of a vendored framework install_bcsymbolmap() { local bcsymbolmap_path="$1" local destination="${BUILT_PRODUCTS_DIR}" echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"" rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}" } # Signs a framework with the provided identity code_sign_if_enabled() { if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then # Use the current code_sign_identity echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}" local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'" if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then code_sign_cmd="$code_sign_cmd &" fi echo "$code_sign_cmd" eval "$code_sign_cmd" fi } # Strip invalid architectures strip_invalid_archs() { binary="$1" # Get architectures for current target binary binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)" # Intersect them with the architectures we are building for intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)" # If there are no archs supported by this binary then warn the user if [[ -z "$intersected_archs" ]]; then echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)." STRIP_BINARY_RETVAL=0 return fi stripped="" for arch in $binary_archs; do if ! [[ "${ARCHS}" == *"$arch"* ]]; then # Strip non-valid architectures in-place lipo -remove "$arch" -output "$binary" "$binary" stripped="$stripped $arch" fi done if [[ "$stripped" ]]; then echo "Stripped $binary of architectures:$stripped" fi STRIP_BINARY_RETVAL=1 } if [[ "$CONFIGURATION" == "Debug" ]]; then install_framework "${BUILT_PRODUCTS_DIR}/CropViewController/CropViewController.framework" fi if [[ "$CONFIGURATION" == "Release" ]]; then install_framework "${BUILT_PRODUCTS_DIR}/CropViewController/CropViewController.framework" fi if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then wait fi
select sid, event, seconds_in_wait, state from v$session_wait where event = 'log buffer space%' /
import re class QueryProcessor: def __init__(self, source): self.source = source def _validate_query(self): condensed_query = self.source.query.lower().replace(" ", "") if re.search(r"notin\({ids}\)", condensed_query): raise ValueError(f"Not support 'not in' phrase: {self.source.query}") if not re.search(r"in\({ids}\)", condensed_query): example = f"SELECT * FROM tests WHERE id IN ({ids})" raise ValueError(f"Require 'in' phrase and 'ids' key on query: {self.source.query}, e.g. '{example}'") @staticmethod def _create_bundle_source(desired_bundle_size, source, ids): if isinstance(ids, list): ids_str = ",".join([f"'{id}'" for id in ids]) return desired_bundle_size, source, ids_str
<gh_stars>1-10 import React from 'react'; import { connect } from 'react-redux'; import "storm-react-diagrams/dist/style.min.css"; import { setSelectedCourse, } from "../actions/DiagramActions"; import RequirementChart from '../components/main/RequirementChart'; import { fetchCourse, } from "../actions/ApiActions"; const RequirementChartContainer = props => <RequirementChart {...props} />; const mapStateToProps = (state) => { const { course, diagram, } = state; return { course, diagram, } }; const mapDispatchToProps = { setSelectedCourse, fetchCourse, }; export default connect(mapStateToProps, mapDispatchToProps)(RequirementChartContainer);
<gh_stars>0 /** # Mancala CBR test CBR based player test. Populates a case base (SQLite3 database) and assesses the resulting player against other players. */ var ludorumCBR = require('../build/ludorum-player-cbr'), mancala = require('@creatartis/ludorum-game-mancala'), ludorum = require('ludorum'), base = require('creatartis-base'); // Game encoding /////////////////////////////////////////////////////////////////////////////////// function encoding_mancala(game, moves, ply) { return { ply: ply, features: game.board.slice(), actions: !moves ? null : game.players.map(function (p) { return moves.hasOwnProperty(p) ? moves[p] : null; }) }; } // Main //////////////////////////////////////////////////////////////////////////////////////////// var GAME = new mancala.Mancala(), LOGGER = base.Logger.ROOT, RANDOM = new ludorum.players.RandomPlayer({ name: 'RANDOM' }), MCTS10 = new ludorum.players.MonteCarloPlayer({ name: 'MCTS10', simulationCount: 10 }), MCTS50 = new ludorum.players.MonteCarloPlayer({ name: 'MCTS50', simulationCount: 50 }), UCT10 = new ludorum.players.UCTPlayer({ name: 'UCT10', simulationCount: 10 }), UCT50 = new ludorum.players.UCTPlayer({ name: 'UCT50', simulationCount: 50 }), CDB = new ludorumCBR.dbs.SQLiteCaseBase({ game: GAME, encoding: encoding_mancala, db: 'dbs/'+ GAME.name.toLowerCase() +'-cbr.sqlite', tableName: 'CB_'+ GAME.name }), CBR10 = new ludorumCBR.CBRPlayer({ name: 'CBR10', caseBase: CDB, k: 10 }), CBR20 = new ludorumCBR.CBRPlayer({ name: 'CBR20', caseBase: CDB, k: 20 }), CBR30 = new ludorumCBR.CBRPlayer({ name: 'CBR30', caseBase: CDB, k: 30 }); LOGGER.appendToConsole(); LOGGER.appendToFile(base.Text.formatDate(new Date(), '"logs/mancala-cbr-test-"yyyymmdd-hhnnss".log"')); // Case base stuffing ////////////////////////////////////////////////////////////////////////////// var PLAYERS = [RANDOM, CBR10, CBR20, CBR30], OPPONENTS = [RANDOM, MCTS10, MCTS50]; LOGGER.info("Populating case base for "+ GAME.name +" with: "+ PLAYERS.map(function (p) { return p.name; }).join(', ') +"."); return CDB.populate({ n: 250, trainer: CBR30, players: PLAYERS, logger: LOGGER }).then(function () { LOGGER.info("Evaluating CBRPlayer for "+ GAME.name +"."); return CBR10.assess(OPPONENTS, { n: 60, logger: LOGGER }).then(function (evaluation) { LOGGER.info("Assessment CBR10: "+ JSON.stringify(evaluation)); }).then(function () { return CBR20.assess(OPPONENTS, { n: 60, logger: LOGGER }).then(function (evaluation) { LOGGER.info("Assessment CBR20: "+ JSON.stringify(evaluation)); }); }).then(function () { return CBR30.assess(OPPONENTS, { n: 60, logger: LOGGER }).then(function (evaluation) { LOGGER.info("Assessment CBR30: "+ JSON.stringify(evaluation)); }); }); }).then(process.exit);
#!/bin/bash #SBATCH -t 02:00:00 #SBATCH -p RM-shared #SBATCH -N 1 #SBATCH --ntasks-per-node 1 #SBATCH --array=1-88:1 Rscript sim_code/fig04_partial_oracle_axis.R sim_params/fig04_partial_oracle_axis_params.csv $SLURM_ARRAY_TASK_ID
from pathlib import Path LEGACY_SUPPORT = False def pltostr(path) -> str: if isinstance(path, Path): return str(path.resolve()) elif isinstance(path, str): if LEGACY_SUPPORT: path = path.replace("\\", "/") # Convert legacy string path to modern format return str(Path(path).resolve()) else: raise TypeError("Invalid input type. Expected Path object or string.")
<gh_stars>0 package com.firax.tetris; import com.firax.tetris.bricks.Brick; import com.firax.tetris.bricks.BrickColor; import javafx.animation.Animation; import javafx.animation.KeyFrame; import javafx.animation.Timeline; import javafx.application.Platform; import javafx.event.ActionEvent; import javafx.geometry.Pos; import javafx.scene.canvas.Canvas; import javafx.scene.control.Label; import javafx.scene.layout.AnchorPane; import javafx.scene.paint.Color; import javafx.scene.text.Font; import javafx.util.Duration; import java.util.List; public class GameBoard { public final static int DEFAULT_FALL_SPEED = 100; // DEFAULT FALL VALUE FOR BRICK private final static int ANIMATION_DURATION = 20; //DEFAULT ANIMATION DURATION private GameMatrix gameMatrix; //Matrix controller private Timeline mainTimeline; //Here is job done (ticker) private AnchorPane root; private Label mStatistics; private BrickBag brickBag; //Random brick generator private Skin skin; //Skin for bricks //layout values private int sizeX, sizeY; private int paddingX, paddingY; private int onHoldBrickID; //ID of holding brick private int nextBrickID; // ID of next brick private int activeBrickID; // ID of active brick private int score; private int fallSpeed = DEFAULT_FALL_SPEED; //Actual brick fall speed private int counterToMoveDown; //Tick iterations counter... When this == fallSpeed -> Block will move down by 1 -> ( this = 0) private boolean canHoldBrick; private boolean playing; private boolean isAnimationPlaying; private boolean isPreviewActive = Settings.IS_PREVIEW_ACTIVE; //Showing where will brick fall private boolean isAnimationActive = Settings.IS_ANIMATION_ENABLED; //Enable animations private boolean isMenuOpened = true; public GameBoard(AnchorPane root, int blocksWidth, int blocksHeight, int sizeWidth, int sizeHeight, int paddingX, int paddingY) { if (blocksWidth < 8 || blocksHeight < 8) { throw new IllegalArgumentException( "\nBoard has to be at least 8x8"); } gameMatrix = new GameMatrix(blocksWidth, blocksHeight); this.root = root; brickBag = new BrickBag(); skin = new Skin(Settings.FAVOURITE_SKIN); this.sizeX = sizeWidth; this.sizeY = sizeHeight; this.paddingX = paddingX; this.paddingY = paddingY; setupLayouts(); setupMainTimeline(); resetGame(); reDrawAll(); } public GameBoard(AnchorPane root, int blocksWidth, int blocksHeight, int paddingX, int paddingY) { this(root, blocksWidth, blocksHeight, 300, 600, paddingX, paddingY); } public GameBoard(AnchorPane root, int blocksWidth, int blocksHeight) { this(root, blocksWidth, blocksHeight, 0, 0); } public void startGame() { showGame(); if (gameMatrix.getActiveBrick() == null) gameMatrix.addNewBrick(Brick.createBrickByID(activeBrickID)); reDrawAll(); playing = true; start(); } public void pauseGame(boolean value) { if (!isMenuOpened) { playing = !value; if (playing) mainTimeline.play(); else mainTimeline.pause(); } } private void showGame() { isMenuOpened = false; } private void start() { mainTimeline.play(); } public void resetGame() { if (!isAnimationPlaying) { clearBoard(); activeBrickID = generateRandomBrickID(); nextBrickID = generateRandomBrickID(); canHoldBrick = true; onHoldBrickID = -1; score = 0; counterToMoveDown = 0; Platform.runLater(() -> mStatistics.setText("LINES SENT: " + score)); if (playing) { gameMatrix.addNewBrick(Brick.createBrickByID(activeBrickID)); reDrawAll(); startGame(); } } } public void holdBrick() { if (canHoldBrick) { canHoldBrick = false; //if (Empty brick holder) if (onHoldBrickID == -1) { onHoldBrickID = activeBrickID; activeBrickID = nextBrickID; nextBrickID = generateRandomBrickID(); } else { int holdBrickCacheID = onHoldBrickID; onHoldBrickID = activeBrickID; activeBrickID = holdBrickCacheID; } gameMatrix.replaceBrick(Brick.createBrickByID(activeBrickID)); drawBrickOnCanvas(getCanvasByID(Settings.IDs.CANVAS_NEXT_BRICK_ID), getNextBrickID()); drawBrickOnCanvas(getCanvasByID(Settings.IDs.CANVAS_HOLD_BRICK_ID), getOnHoldBrickID()); } else { //TODO NOTIFY PLAYER } } //Instantly place a brick at the lowest point public void makeBrickFall() { if (playing && !isAnimationPlaying) while (!gameMatrix.isReadyForNewBrick() && !gameMatrix.isMatrixFull()) { gameMatrix.moveBrickDown(); } } public void moveBrickLeft() { if (playing) gameMatrix.moveBrickLeft(); } public void moveBrickRight() { if (playing) gameMatrix.moveBrickRight(); } public void moveBrickDown() { if (playing) gameMatrix.moveBrickDown(); } public void rotateBrick() { if (playing) gameMatrix.rotateBrick(); } public void setSkin(int skin) { if (!isGameEnd()) { this.skin.setSkinID(skin); reDrawAll(); } } public int getActiveSkin() { return skin.getSelectedSkinID(); } public void setPreview(boolean value) { isPreviewActive = value; } public void setFallSpeed(int fallSpeed) { if (fallSpeed < 10 && fallSpeed >= 0) this.fallSpeed = 10; else this.fallSpeed = fallSpeed; } private void animateGrayScaleEffect() { isAnimationPlaying = true; Canvas canvas = getCanvasByID(Settings.IDs.GAME_CANVAS_ID); double squareWidth = canvas.getWidth() / gameMatrix.getWidth(); double squareHeight = canvas.getHeight() / gameMatrix.getHeight(); new Thread(new Runnable() { @Override public void run() { try { for (int i = gameMatrix.getHeight() - 1; i >= 0; i--) { for (int j = 0; j < gameMatrix.getWidth(); j++) { if (gameMatrix.getValue(j, i) != 0) { skin.drawSquareOnCanvas(j, i, squareWidth, squareHeight, Color.DARKGRAY, canvas, 0, 0); Thread.sleep(5); } } Thread.sleep(40); } } catch (InterruptedException e) { e.printStackTrace(); isAnimationPlaying = false; } isAnimationPlaying = false; } }).start(); } //Input is list of rows, which will be animated and taken off from matrix private void animateRows(final List<Integer> rows) { isAnimationPlaying = true; Canvas canvas = getCanvasByID(Settings.IDs.GAME_CANVAS_ID); double squareWidth = canvas.getWidth() / gameMatrix.getWidth(); double squareHeight = canvas.getHeight() / gameMatrix.getHeight(); animateRowsSide(rows, canvas, squareWidth, squareHeight); } private void animateRowsSide(final List<Integer> rows, Canvas canvas, double squareWidth, double squareHeight) { Thread rowSideAnimation = new Thread(new Runnable() { int counter = 0; double stepX = (gameMatrix.getWidth() * squareWidth) / ANIMATION_DURATION; double randomStepX = stepX; @Override public void run() { //Waiting for main canvas to be redrawn try { Thread.sleep(5); } catch (InterruptedException e) { e.printStackTrace(); animateRowsFall(rows, canvas, squareWidth, squareHeight); } while (counter != ANIMATION_DURATION) { for (int row : rows) { //Every row will travel to opposite direction if (row % 2 == 0) randomStepX = -stepX; else randomStepX = stepX; canvas.getGraphicsContext2D().clearRect(0, row * squareHeight, canvas.getWidth(), squareHeight); for (int i = 0; i < gameMatrix.getWidth(); i++) { skin.drawSquareOnCanvas(i, row, squareWidth, squareHeight, getColorByID(gameMatrix.getValue(i, row)), canvas, counter * randomStepX, 0); } } if (++counter == ANIMATION_DURATION) { for (int row : rows) gameMatrix.clearRow(row); animateRowsFall(rows, canvas, squareWidth, squareHeight); } try { Thread.sleep(10); } catch (InterruptedException e) { e.printStackTrace(); animateRowsFall(rows, canvas, squareWidth, squareHeight); } } } }); rowSideAnimation.start(); } private void animateRowsFall(final List<Integer> rows, Canvas canvas, double squareWidth, double squareHeight) { final Thread fallAnimation = new Thread(new Runnable() { double stepY = squareHeight / ANIMATION_DURATION; int counter = 0; int multiplier = 0; @Override public void run() { while (counter != ANIMATION_DURATION) { canvas.getGraphicsContext2D().clearRect(0, 0, canvas.getWidth(), canvas.getHeight()); //Starting with bottom row for (int i = gameMatrix.getHeight() - 1; i >= 0; i--) { multiplier = 0; //Counting how fast will blocks fall (multiplier) for (int k = rows.size() - 1; k >= 0; k--) { if (rows.get(k) > i) { if (k - 1 >= 0) { if (rows.get(k) - rows.get(k - 1) != 1) multiplier = rows.size() - k; } else multiplier = rows.size() - k; } } //Drawing row for (int j = 0; j < gameMatrix.getWidth(); j++) { if (gameMatrix.getValue(j, i) != 0) { skin.drawSquareOnCanvas(j, i, squareWidth, squareHeight, getColorByID(gameMatrix.getValue(j, i)), canvas, 0, (stepY * multiplier) * counter); } } } //Animation is finished if (++counter == ANIMATION_DURATION) { for (int row : rows) { gameMatrix.moveAllDownFromRow(row); } isAnimationPlaying = false; } try { Thread.sleep(10); } catch (InterruptedException e) { for (int row : rows) { gameMatrix.moveAllDownFromRow(row); } isAnimationPlaying = false; e.printStackTrace(); } } } }); fallAnimation.start(); } private void finishGame() { animateGrayScaleEffect(); } private void addNewBrick() { activeBrickID = nextBrickID; nextBrickID = generateRandomBrickID(); gameMatrix.addNewBrick(Brick.createBrickByID(activeBrickID)); canHoldBrick = true; } private boolean isRowFull() { return gameMatrix.getFullRows().size() > 0; } private int generateRandomBrickID() { if (brickBag.getRemainingBrickCount() == 0) brickBag.refillBag(); return brickBag.getRandomBrick(); } private Color getColorByID(int ID) { if (ID == -1) return getColorByID(activeBrickID); else return Brick.getColorByID(ID); } private void reDrawAll() { drawBackground(getCanvasByID(Settings.IDs.CANVAS_BACKGROUND_ID)); drawBrickOnCanvas(getCanvasByID(Settings.IDs.CANVAS_NEXT_BRICK_ID), getNextBrickID()); drawBrickOnCanvas(getCanvasByID(Settings.IDs.CANVAS_HOLD_BRICK_ID), getOnHoldBrickID()); } //Redrawing game background private void drawBackground(Canvas canvas) { double squareWidth = canvas.getWidth() / gameMatrix.getWidth(); double squareHeight = canvas.getHeight() / gameMatrix.getHeight(); canvas.getGraphicsContext2D().setStroke(Color.BLACK); for (int i = 0; i < gameMatrix.getHeight(); i++) { for (int j = 0; j < gameMatrix.getWidth(); j++) { if (((i % 2) + j) % 2 == 0) canvas.getGraphicsContext2D().setFill(new Color(0.2, 0.2, 0.2, 1)); else canvas.getGraphicsContext2D().setFill(new Color(0.15, 0.15, 0.15, 1)); canvas.getGraphicsContext2D().fillRect(squareWidth * j, squareHeight * i, squareWidth, squareHeight); //Filling canvas.getGraphicsContext2D().strokeRect(squareWidth * j, squareHeight * i, squareWidth, squareHeight); //Stroking } } } //Redrawing all game blocks private void drawBoard(Canvas canvas) { canvas.getGraphicsContext2D().clearRect(0, 0, canvas.getWidth(), canvas.getHeight()); double squareWidth = canvas.getWidth() / gameMatrix.getWidth(); double squareHeight = canvas.getHeight() / gameMatrix.getHeight(); for (int i = 0; i < gameMatrix.getHeight(); i++) { for (int j = 0; j < gameMatrix.getWidth(); j++) { if (gameMatrix.getValue(j, i) != 0) { skin.drawSquareOnCanvas(j, i, squareWidth, squareHeight, getColorByID(gameMatrix.getValue(j, i)), canvas, 0, 0); } } } //Drawing preview blocks if (isPreviewActive) { List<Point> position = gameMatrix.getFinalPosition(); for (Point point : position) { if (gameMatrix.getValue(point.x, point.y) != -1) { skin.drawSquareOnCanvas(point.x, point.y, squareWidth, squareHeight, Color.TRANSPARENT, canvas, 0, 0); } } } } private void drawBrickOnCanvas(Canvas canvas, int brickID) { Brick.drawBrickOnCanvas(canvas, brickID, skin); } private void clearBoard() { gameMatrix.resetMatrix(); } private Canvas getCanvasByID(String ID) { //This is search for layout file canvas if (root.lookup(ID) != null) return ((Canvas) root.lookup(ID)); //This is search for programmatically created canvas for (int i = 0; i < root.getChildren().size(); i++) { if (root.getChildren().get(i).getId() != null && root.getChildren().get(i).getId().equals(ID)) return ((Canvas) root.getChildren().get(i)); } return new Canvas(); } private void setupLayouts() { setupCanvases(); mStatistics = new Label("LINES SENT: 0"); mStatistics.setFont(new Font("Verdana", 12)); mStatistics.setPrefWidth(150); mStatistics.setAlignment(Pos.CENTER); mStatistics.setTextFill(Color.WHITE); mStatistics.setLayoutY(575 + paddingY); mStatistics.setLayoutX(300 + paddingX); root.getChildren().add(mStatistics); } //This method is protection from corrupted or non-existent layout file //If any canvas will be missing on layout, this method will automatically //create canvas and set default values to it private void setupCanvases() { Canvas backgroundCanvas, gameCanvas; Canvas nextBrickCanvas, holdBrickCanvas; if (getCanvasByID(Settings.IDs.CANVAS_BACKGROUND_ID).getId() == null) { backgroundCanvas = new Canvas(sizeX, sizeY); backgroundCanvas.setId(Settings.IDs.CANVAS_BACKGROUND_ID); } else backgroundCanvas = getCanvasByID(Settings.IDs.CANVAS_BACKGROUND_ID); if (getCanvasByID(Settings.IDs.GAME_CANVAS_ID).getId() == null) { gameCanvas = new Canvas(sizeX, sizeY); gameCanvas.setId(Settings.IDs.GAME_CANVAS_ID); } else gameCanvas = getCanvasByID(Settings.IDs.GAME_CANVAS_ID); if (getCanvasByID(Settings.IDs.CANVAS_NEXT_BRICK_ID).getId() == null) { nextBrickCanvas = new Canvas(sizeX / 4, sizeX / 4); nextBrickCanvas.setLayoutX(sizeX + 35); nextBrickCanvas.setLayoutY(sizeY / 6); nextBrickCanvas.setId(Settings.IDs.CANVAS_NEXT_BRICK_ID); } else nextBrickCanvas = getCanvasByID(Settings.IDs.CANVAS_NEXT_BRICK_ID); if (getCanvasByID(Settings.IDs.CANVAS_HOLD_BRICK_ID).getId() == null) { holdBrickCanvas = new Canvas(sizeX / 4, sizeX / 4); holdBrickCanvas.setLayoutX(sizeX + 35); holdBrickCanvas.setLayoutY((sizeY / 6) * 4); holdBrickCanvas.setId(Settings.IDs.CANVAS_HOLD_BRICK_ID); } else holdBrickCanvas = getCanvasByID(Settings.IDs.CANVAS_HOLD_BRICK_ID); root.getChildren().removeAll(backgroundCanvas, gameCanvas, nextBrickCanvas, holdBrickCanvas); root.getChildren().add(0, backgroundCanvas); root.getChildren().add(1, gameCanvas); root.getChildren().add(2, nextBrickCanvas); root.getChildren().add(3, holdBrickCanvas); for (int i = 0; i < 4; i++) { root.getChildren().get(i).setLayoutX(root.getChildren().get(i).getLayoutX() + paddingX); root.getChildren().get(i).setLayoutY(root.getChildren().get(i).getLayoutY() + paddingY); } } private void ticker() { if (!isAnimationPlaying) { if (gameMatrix.isReadyForNewBrick()) { if (isRowFull()) { score += gameMatrix.getFullRows().size(); Platform.runLater(() -> mStatistics.setText("LINES SENT: " + score)); if (isAnimationActive) { //RUNNING EXPENSIVE DRAWING PROCESS Platform.runLater(() -> drawBoard(getCanvasByID(Settings.IDs.GAME_CANVAS_ID))); animateRows(gameMatrix.getFullRows()); } else { for (int row : gameMatrix.getFullRows()) { gameMatrix.clearRow(row); gameMatrix.moveAllDownFromRow(row); } } return; } addNewBrick(); drawBrickOnCanvas(getCanvasByID(Settings.IDs.CANVAS_NEXT_BRICK_ID), getNextBrickID()); } //RUNNING EXPENSIVE DRAWING PROCESS Platform.runLater(() -> drawBoard(getCanvasByID(Settings.IDs.GAME_CANVAS_ID))); counterToMoveDown++; if (playing) { if (counterToMoveDown > fallSpeed && fallSpeed > 0) { counterToMoveDown = 0; gameMatrix.moveBrickDown(); } } } } private void setupMainTimeline() { mainTimeline = new Timeline(); mainTimeline.setCycleCount(Animation.INDEFINITE); KeyFrame keyframe = new KeyFrame(Duration.millis(10), (ActionEvent event) -> { // 100fps if (playing && !gameMatrix.isMatrixFull()) ticker(); if (gameMatrix.isMatrixFull()) { finishGame(); mainTimeline.stop(); } }); mainTimeline.getKeyFrames().add(keyframe); } public boolean isGameEnd() { return gameMatrix.isMatrixFull(); } public boolean isAnimationPlaying() { return isAnimationPlaying; } public void setAnimations(boolean value) { isAnimationActive = value; } public int getBlocksWidth() { return gameMatrix.getWidth(); } public int getBlocksHeight() { return gameMatrix.getHeight(); } public int[][] getGameMatrixCopy() { int[][] matrixCopy = new int[gameMatrix.getHeight()][gameMatrix.getWidth()]; for (int i = 0; i < gameMatrix.getHeight(); i++) { for (int j = 0; j < gameMatrix.getWidth(); j++) { matrixCopy[i][j] = gameMatrix.getValue(j, i); } } return matrixCopy; } public int getActiveBrickID() { return activeBrickID; } public int getOnHoldBrickID() { return onHoldBrickID; } public int getNextBrickID() { return nextBrickID; } public boolean isGamePaused() { return !playing; } public int getWidth() { return sizeX; } public int getHeight() { return sizeY; } public void resizeBoard(int width, int height) { new Thread(new Runnable() { @Override public void run() { boolean finished = false; try { //Waiting until animation is finished while (!finished) { if (!isAnimationPlaying) { //Changing board size mainTimeline.pause(); Thread.sleep(10); gameMatrix = new GameMatrix(width, height); resetGame(); reDrawAll(); mainTimeline.play(); finished = true; } else { Thread.sleep(10); } } } catch (InterruptedException e) { e.printStackTrace(); } } }).start(); } @Override public String toString() { StringBuilder output = new StringBuilder(); for (int i = 0; i < gameMatrix.getHeight(); i++) { for (int j = 0; j < gameMatrix.getWidth(); j++) { output.append(gameMatrix.getValue(j, i)); } output.append("\n"); } return output.toString(); } }
#!/bin/bash if [ ! "$1" ]; then echo "This script requires either amd64 of arm64 as an argument" exit 1 elif [ "$1" = "amd64" ]; then PLATFORM="$1" DIR_NAME="chia-blockchain-linux-x64" else PLATFORM="$1" DIR_NAME="chia-blockchain-linux-arm64" fi pip install setuptools_scm # The environment variable CHIA_INSTALLER_VERSION needs to be defined # If the env variable NOTARIZE and the username and password variables are # set, this will attempt to Notarize the signed DMG CHIA_INSTALLER_VERSION=$(python3 installer-version.py) if [ ! "$CHIA_INSTALLER_VERSION" ]; then echo "WARNING: No environment variable CHIA_INSTALLER_VERSION set. Using 0.0.0." CHIA_INSTALLER_VERSION="0.0.0" fi echo "Chia Installer Version is: $CHIA_INSTALLER_VERSION" echo "Installing npm and electron packagers" npm install electron-packager -g npm install electron-installer-debian -g echo "Create dist/" rm -rf dist mkdir dist echo "Create executables with pyinstaller" pip install pyinstaller==4.5 SPEC_FILE=$(python -c 'import chia; print(chia.PYINSTALLER_SPEC_PATH)') pyinstaller --log-level=INFO "$SPEC_FILE" LAST_EXIT_CODE=$? if [ "$LAST_EXIT_CODE" -ne 0 ]; then echo >&2 "pyinstaller failed!" exit $LAST_EXIT_CODE fi cp -r dist/daemon ../chia-blockchain-gui cd .. || exit cd chia-blockchain-gui || exit echo "npm build" npm install npm audit fix npm run build LAST_EXIT_CODE=$? if [ "$LAST_EXIT_CODE" -ne 0 ]; then echo >&2 "npm run build failed!" exit $LAST_EXIT_CODE fi # sets the version for chia-blockchain in package.json cp package.json package.json.orig jq --arg VER "$CHIA_INSTALLER_VERSION" '.version=$VER' package.json > temp.json && mv temp.json package.json electron-packager . chia-blockchain --asar.unpack="**/daemon/**" --platform=linux \ --icon=src/assets/img/Chia.icns --overwrite --app-bundle-id=net.chia.blockchain \ --appVersion=$CHIA_INSTALLER_VERSION LAST_EXIT_CODE=$? # reset the package.json to the original mv package.json.orig package.json if [ "$LAST_EXIT_CODE" -ne 0 ]; then echo >&2 "electron-packager failed!" exit $LAST_EXIT_CODE fi mv $DIR_NAME ../build_scripts/dist/ cd ../build_scripts || exit echo "Create chia-$CHIA_INSTALLER_VERSION.deb" rm -rf final_installer mkdir final_installer electron-installer-debian --src dist/$DIR_NAME/ --dest final_installer/ \ --arch "$PLATFORM" --options.version $CHIA_INSTALLER_VERSION LAST_EXIT_CODE=$? if [ "$LAST_EXIT_CODE" -ne 0 ]; then echo >&2 "electron-installer-debian failed!" exit $LAST_EXIT_CODE fi ls final_installer/
var native; try { native = require("./build/Release/malloc-tools.node"); } catch (ex) { native = require("./build/Debug/malloc-tools.node"); } module.exports = native;
#!/bin/bash source initialize.sh source data/version.sh source config/config.sh source sync_from_s3.sh
<reponame>naokikimura/gulp-mocha /// <reference types="../src/@types/mocha/lib/cli/options" /> import { expect } from 'chai'; import stream from 'stream'; import mocha from '../src/index'; describe('Plugin', () => { it('it should return Transform instance', () => { expect(mocha()).to.be.an.instanceof(stream.Transform); }); });
ansible-playbook -i deploy/hosts deploy/mqpi-deployment.yml -u root --flush-cache -v
package org.hiro.things; import java.util.Arrays; public enum ObjectType { PASSAGE('#'), DOOR('+'), FLOOR('.'), PLAYER('@'), TRAP('^'), STAIRS('%'), GOLD('*'), POTION('!'), SCROLL ('?'), MAGIC ('$'), FOOD (':'), WEAPON (')'), ARMOR (']'), AMULET (','), RING ('='), STICK ('/'), Vert('|'), Horizon('-'), Blank(' '), Initial('\u0000'); private char value; ObjectType(char value){ this.value = value; } public char getValue() { return value; } static public ObjectType get(char c){ return Arrays.asList(ObjectType.values()).stream().filter(o -> o.getValue() == c).findFirst().get(); } }
<gh_stars>1-10 /* eslint-disable camelcase */ export interface Author { state: string; id: number; web_url: string; name: string; avatar_url?: any; username: string; } export interface Milestone { project_id: number; description: string; state: string; due_date?: any; iid: number; created_at: Date; title: string; id: number; updated_at: Date; } export interface Assignee { state: string; id: number; name: string; web_url: string; avatar_url?: any; username: string; } export interface References { short: string; relative: string; full: string; } export interface TimeStats { time_estimate: number; total_time_spent: number; human_time_estimate?: any; human_total_time_spent?: any; } export interface Links { self: string; notes: string; award_emoji: string; project: string; } export interface TaskCompletionStatus { count: number; completed_count: number; } export interface Issue { state: string; description: string; author: Author; milestone: Milestone; project_id: number; assignees: Assignee[]; assignee: Assignee; updated_at: Date; closed_at?: any; closed_by?: any; id: number; title: string; created_at: Date; moved_to_id?: any; iid: number; labels: string[]; upvotes: number; downvotes: number; merge_requests_count: number; user_notes_count: number; due_date: string; web_url: string; references: References; time_stats: TimeStats; has_tasks: boolean; task_status: string; confidential: boolean; discussion_locked: boolean; _links: Links; task_completion_status: TaskCompletionStatus; } export interface GroupSamlIdentity { extern_uid: string; provider: string; saml_provider_id: number; } export interface Member { id: number; username: string; name: string; state: string; avatar_url: string; web_url: string; expires_at: Date; access_level: number; email: string; group_saml_identity?: GroupSamlIdentity; }
package org.stjs.server.rest.spring; import org.springframework.http.HttpStatus; import org.stjs.shared.rest.spring.RestCallback; import org.stjs.shared.rest.spring.RestResult; /** * * @author sj */ public class ResponseResult<T> implements RestResult<T> { private final HttpStatus httpStatus; private final T body; public ResponseResult(HttpStatus httpStatus, T body) { this.httpStatus = httpStatus; this.body = body; } @Override public void then(RestCallback<T> callback) { throw new UnsupportedOperationException("Not supported on server-side."); } public HttpStatus getHttpStatus() { return httpStatus; } public T getBody() { return body; } }
package de.wwu.wmss.core; public class Tonality { private String mode; private String tonic; private String code; public Tonality() { super(); } public String getMode() { return mode; } public void setMode(String mode) { this.mode = mode; } public String getTonic() { return tonic; } public void setTonic(String tonic) { this.tonic = tonic; } public String getCode() { return code; } public void setCode(String code) { this.code = code; } }
/* * Copyright 2021 Hazelcast Inc. * * Licensed under the Hazelcast Community License (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://hazelcast.com/hazelcast-community-license * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.jet.sql.impl.validate; import com.hazelcast.internal.util.BiTuple; import com.hazelcast.jet.sql.impl.validate.operators.common.HazelcastFunction; import com.hazelcast.jet.sql.impl.validate.operators.common.HazelcastOperandTypeCheckerAware; import com.hazelcast.jet.sql.impl.validate.operators.misc.HazelcastCaseOperator; import com.hazelcast.jet.sql.impl.validate.operators.typeinference.HazelcastReturnTypeInference; import com.hazelcast.test.HazelcastParallelClassRunner; import com.hazelcast.test.annotation.ParallelJVMTest; import com.hazelcast.test.annotation.QuickTest; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.SqlSyntax; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import java.util.HashMap; import java.util.Map; import static junit.framework.TestCase.assertNull; import static junit.framework.TestCase.assertTrue; @RunWith(HazelcastParallelClassRunner.class) @Category({QuickTest.class, ParallelJVMTest.class}) public class HazelcastSqlOperatorTableTest { /** * Make sure there are no overrides for operators defined in the operator table. */ @Test public void testNoOverride() { Map<BiTuple<String, SqlSyntax>, SqlOperator> map = new HashMap<>(); for (SqlOperator operator : HazelcastSqlOperatorTable.instance().getOperatorList()) { BiTuple<String, SqlSyntax> key = BiTuple.of(operator.getName(), operator.getSyntax()); SqlOperator oldOperator = map.put(key, operator); assertNull("Duplicate operator \"" + operator.getName(), oldOperator); } } /** * Make sure that all our operators either define the top-level operand checker that overrides that call binding, * or confirm explicitly that they override the binding manually. */ @Test public void testOperandTypeChecker() { for (SqlOperator operator : HazelcastSqlOperatorTable.instance().getOperatorList()) { boolean valid = operator instanceof HazelcastOperandTypeCheckerAware || operator instanceof HazelcastCaseOperator || operator == HazelcastSqlOperatorTable.ARGUMENT_ASSIGNMENT; assertTrue("Operator must implement one of classes from " + HazelcastFunction.class.getPackage().toString() + ": " + operator.getClass().getSimpleName(), valid); } } @Test public void testReturnTypeInference() { for (SqlOperator operator : HazelcastSqlOperatorTable.instance().getOperatorList()) { if (operator == HazelcastSqlOperatorTable.IN || operator == HazelcastSqlOperatorTable.NOT_IN || operator == HazelcastSqlOperatorTable.ARGUMENT_ASSIGNMENT) { continue; } boolean valid = operator.getReturnTypeInference() instanceof HazelcastReturnTypeInference; assertTrue("Operator must have " + HazelcastReturnTypeInference.class.getSimpleName() + ": " + operator.getClass().getSimpleName(), valid); } } }
<reponame>missaouib/Fame package com.designre.blog.listener.event; import com.designre.blog.model.dto.CommentDto; import lombok.Getter; import lombok.ToString; import org.springframework.context.ApplicationEvent; @ToString @Getter public class CommentNewEvent extends ApplicationEvent { private final CommentDto commentDto; /** * Create a new {@code ApplicationEvent}. * * @param source the object on which the event initially occurred or with * which the event is associated (never {@code null}) * @param commentDto comment dto */ public CommentNewEvent(Object source, CommentDto commentDto) { super(source); this.commentDto = commentDto; } }
<filename>src/commands/invites/check.ts import { EMBEDS, MESSAGES } from '@constants' import { extractCodes, handle, processResults } from '@utils' import { Command } from 'discord-akairo' import { CategoryChannel, Collection, Message, NewsChannel, TextChannel } from 'discord.js' export default class CheckCommand extends Command { public constructor() { super('check', { aliases: ['check'], category: 'Invites', channel: 'guild', description: { text: MESSAGES.COMMANDS.CHECK.TEXT, usage: MESSAGES.COMMANDS.CHECK.USAGE }, userPermissions: ['ADMINISTRATOR'] }) } public async exec(message: Message) { const guild = message.guild! const guildChannels = guild.channels.cache const { config: { categoryIds, checkChannelId, ignoreIds, interval }, inCheck } = this.client const checkChannel = guildChannels.get(checkChannelId) if (!(checkChannel instanceof TextChannel)) return message.channel.send(MESSAGES.ERRORS.CHECK_CHANNEL) if (checkChannelId !== message.channel.id) return message.channel.send(MESSAGES.INFO.WRONG_CHANNEL(checkChannel)) if (inCheck) return message.channel.send(MESSAGES.INFO.IN_CHECK) if (!categoryIds.length) return message.channel.send(MESSAGES.INFO.NO_CATEGORIES) const categories = guildChannels .filter(({ id, type }) => type === 'category' && categoryIds.includes(id)) .sort((c1, c2) => c1.position - c2.position) as Collection<string, CategoryChannel> const delay = ms => new Promise(res => setTimeout(res, ms)) const delayTask = () => delay(interval) const messagesTask = (channel: NewsChannel | TextChannel) => () => handle(channel.messages.fetch({ limit: 8 }, true, false)) const inviteTask = (code: string) => () => handle(this.client.fetchInvite(code)) this.client.inCheck = true const check = process.hrtime() await checkChannel.send(MESSAGES.INFO.CHECK_START(this.client.user.username)) let goodInvites = 0, badInvites = 0, totalChannels = 0, totalInvites = 0 for (const [_, category] of categories) { const categoryName = category.name const childChannels = category.children .filter(({ id, type }) => ['news', 'text'].includes(type) && !ignoreIds.includes(id)) as Collection<string, NewsChannel | TextChannel> if (!childChannels.size) { await message.channel.send(EMBEDS.CATEGORY(categoryName)) continue } const categoryResults: Collection<string, { code: string, valid: boolean }[]> = new Collection() const issues: { unknown: number, known: (NewsChannel | TextChannel)[] } = { unknown: 0, known: [] } const childChannelsSorted = childChannels.sort((c1, c2) => c1.position - c2.position) for (const [channelId, channel] of childChannelsSorted) { if (!channel) { issues.unknown++ continue } const messages = await this.client.queue.add(messagesTask(channel)) this.client.queue.add(delayTask) if (!messages[0]) { issues.known.push(channel) continue } const codes = extractCodes(messages[0]) if (!codes.length) { categoryResults.set(channelId, []) continue } const codePromises = codes.map(code => inviteTask(code)) const invites = await Promise.allSettled(codePromises.map(codePromise => this.client.queue.add(codePromise))) // invites = { status: 'fulfilled', value: [ [Invite], [DiscordAPIError] ] }[] const results = invites.map((invite, index) => { const { value } = invite as any return { code: codes[index], valid: !!value[0] } }) categoryResults.set(channelId, results) } const { bad, channels, good, issuesDescription, resultsDescription, total } = processResults(categoryResults, issues) badInvites += bad goodInvites += good totalChannels += channels totalInvites += total await checkChannel.send(EMBEDS.CATEGORY(categoryName, resultsDescription, issuesDescription)) } this.client.inCheck = false const time = process.hrtime(check) const elapsedTimeMilliseconds = ((time[0] * 1e9) + time[1]) / 1e6 await checkChannel.send(MESSAGES.INFO.CHECK_COMPLETE) await checkChannel.send(EMBEDS.RESULTS(badInvites, totalChannels, goodInvites, totalInvites, elapsedTimeMilliseconds)) } }
<reponame>zhanghongli-lily/tmoney package zelda; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import zelda.page.AppLogin; /** * @ClassName: BaseCase * @Description: baseCase * @Author: zhzh.yin * @Date: 2020-04-29 15:47 * @Verion: 1.0 */ public class BaseTest { private static AppLogin login = new AppLogin(); static final Logger log = LoggerFactory.getLogger(BaseTest.class); @BeforeAll public static void login() { log.info("before all :login"); login.loginWithCookie("mr.joker", "ww8c83d949a80b562d"); } @Test public void testLogin() throws InterruptedException { } @AfterAll public static void shutdown() { login.getDriver().quit(); } }
import { AnyObject } from '../types/common-types'; export const decodeState = (encodedState: string): AnyObject | undefined => { try { return JSON.parse(atob(decodeURIComponent(encodedState))); } catch (error) { return undefined; } }; export const encodeState = (state: AnyObject, stateKey?: string): string => { const stateObject = stateKey ? { ...decodeState(window.location.hash), [stateKey]: state } : state; try { return encodeURIComponent(btoa(JSON.stringify(stateObject))); } catch (error) { // eslint-disable-next-line no-console console.warn( 'View state is not a valid JSON, state has will not be generated. View state: ', state ); return ''; } };
<filename>app/services/events.service.js const bluebird = require('bluebird'); const request = require('request-promise'); const PubSub = require('@google-cloud/pubsub'); const config = require('../../config'); const pubsub = PubSub({ projectId: config.pubsub.projectId, credentials: config.pubsub.credentials, promise: bluebird }); const BASE_URL = config.pubsub.baseUrl; const KEY = config.pubsub.key; const topic = pubsub.topic(config.pubsub.topic); const subscription = config.pubsub.subscription; /** * Get events from BigQuery * GET / */ exports.getEvents = ({ namespace, user, since, to }) => { const options = { uri: BASE_URL, qs: { key: KEY, namespace, user, since, to }, json: true }; return request.get(options); }; /** * Publish an event to PubSub * POST / */ exports.publish = (event) => { const options = { uri: BASE_URL, qs: { key: KEY }, body: event, json: true }; return request.post(options); }; exports.watchEvents = () => { return topic.subscription(subscription); };
<filename>RPiAir/players/omxplayer.py import os import pexpect import time from player import Player from RPiAir.database import Movie from RPiAir.messaging import create_jsonMessage class OMXPlayer(Player): """class for control OMXPlayer instance (RPi)""" CMD = "/usr/bin/omxplayer.bin" ARGS = "-o hdmi -b".split(' ') KEYS = { 'decrSpeed': '1', 'incrSpeed': '2', 'rewind': '<', 'fastForward': '>', 'showInfo': 'z', 'prevAudio': 'j', 'nextAudio': 'k', 'prevChapter': 'i', 'nextChapter': 'o', 'prevSubtitle': 'n', 'nextSubtitle': 'm', 'toggleSubtitle': 's', 'decrSubtitleDelay': 'd', 'incrSubtitleDelay': 'f', 'exit': 'q', 'togglePlay': 'p', 'decrVolume': '-', 'incrVolume': '+', 'seek-30':'\x1b\x5b\x44', 'seek+30':'\x1b\x5b\x43', 'seek-600':'\x1b\x5b\x42', 'seek+600':'\x1b\x5b\x41', } def run_command(self, cmd, s): """run command on player object :cmd: command to run (from OMXPlayer.KEYS :s: statuscode """ player = self.get_currentPlayer() if not player: return create_jsonMessage(406) player.send(OMXPlayer.KEYS[cmd]) return create_jsonMessage(s) def get_currentPlayer(self): """return current player object if still running""" curPlayer = super(OMXPlayer, self).get_currentPlayer() if curPlayer is not None: if not curPlayer.isalive(): self.set_currentPlayer(None) return super(OMXPlayer, self).get_currentPlayer() def video_play(self, hash_id): """start omxplayer and play file""" # get video filename item = Movie.query.filter_by(hash_id=hash_id).first() if not item: return create_jsonMessage(403, hash_id) filename = item.location # check if videofile exists if not os.path.isfile(filename): return create_jsonMessage(404, filename) # check if not already playing and exit if so curPlayer = self.get_currentPlayer() if curPlayer is not None: _ = self.exit_video() # start playing video try: newPlayer = pexpect.spawn(OMXPlayer.CMD, OMXPlayer.ARGS + [filename]) except pexpect.ExceptionPexpect: return create_jsonMessage(512, OMXPlayer.CMD) time.sleep(0.5) # make sure omxplayer not immediately exits if not newPlayer.isalive(): return create_jsonMessage(513) self.set_currentPlayer(newPlayer) return create_jsonMessage(210, filename) def video_exit(self): """exit current video""" return self.run_command('exit', 211) def video_pause(self): """pause current video""" return self.run_command('pause', 212) def video_volume(self, direction='up', amount=1): """adjust the volume of current player""" cmd = 'incrVolume' if direction == 'up' else 'decrVolume' for i in range(amount): msg = self.run_command(cmd, 213) return msg def video_position(self, direction='forward', amount='small'): """adjust the position of the player by amount in direction""" direction = '+' if direction == 'forward' else '-' amount = '30' if amount == 'small' else '600' return self.run_command('seek' + direction + amount, 214) # initialize player player = OMXPlayer()
import java.util.HashSet; import java.util.Set; public class Role { private String name; private Set<String> permissions; public Role(String name) { this.name = name; this.permissions = new HashSet<>(); } public void addPermission(String permission) { permissions.add(permission); } public void removePermission(String permission) { permissions.remove(permission); } public boolean hasPermission(String permission) { return permissions.contains(permission); } }
def generate_output(input_list): if len(input_list) != 4: raise ValueError("input_list must have 4 elements") output = 0 for input in input_list: if input == 1: output = 0 break if output == 0: output = 1 return output
#!/bin/sh # # Vivado(TM) # runme.sh: a Vivado-generated Runs Script for UNIX # Copyright 1986-2018 Xilinx, Inc. All Rights Reserved. # echo "This script was generated under a different operating system." echo "Please update the PATH and LD_LIBRARY_PATH variables below, before executing this script" exit if [ -z "$PATH" ]; then PATH=C:/Xilinx/SDK/2018.3/bin;C:/Xilinx/Vivado/2018.3/ids_lite/ISE/bin/nt64;C:/Xilinx/Vivado/2018.3/ids_lite/ISE/lib/nt64:C:/Xilinx/Vivado/2018.3/bin else PATH=C:/Xilinx/SDK/2018.3/bin;C:/Xilinx/Vivado/2018.3/ids_lite/ISE/bin/nt64;C:/Xilinx/Vivado/2018.3/ids_lite/ISE/lib/nt64:C:/Xilinx/Vivado/2018.3/bin:$PATH fi export PATH if [ -z "$LD_LIBRARY_PATH" ]; then LD_LIBRARY_PATH= else LD_LIBRARY_PATH=:$LD_LIBRARY_PATH fi export LD_LIBRARY_PATH HD_PWD='C:/Users/byronxu/Documents/6.S193/hdmi_led/hdmi_led.runs/synth_1' cd "$HD_PWD" HD_LOG=runme.log /bin/touch $HD_LOG ISEStep="./ISEWrap.sh" EAStep() { $ISEStep $HD_LOG "$@" >> $HD_LOG 2>&1 if [ $? -ne 0 ] then exit fi } EAStep vivado -log design_1_wrapper.vds -m64 -product Vivado -mode batch -messageDb vivado.pb -notrace -source design_1_wrapper.tcl
private void pesquisa_Click(object sender, EventArgs e) { ValidateInput(codCli.Text); } private void ValidateInput(string input) { if (string.IsNullOrWhiteSpace(input)) { MessageBox.Show("O campo de pesquisa não pode estar vazio", "Resultado", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); } else { int res; if (!int.TryParse(input, out res)) { MessageBox.Show("O campo de pesquisa deve conter apenas números inteiros", "Resultado", MessageBoxButtons.OK, MessageBoxIcon.Exclamation); } } }
<reponame>HQ063/18xx<filename>lib/engine/game/g_1889.rb # frozen_string_literal: true require_relative '../config/game/g_1889' require_relative 'base' module Engine module Game class G1889 < Base load_from_json(Config::Game::G1889::JSON) end end end
echo "" echo "==========================================================" echo "Push Kubernetes 1.11.0 Images into hub.docker.com ......" echo "==========================================================" echo "" echo "docker tag to openthings ..." ## 添加Tag for hub.docker.com docker tag k8s.gcr.io/kube-apiserver-amd64:v1.11.0 openthings/k8s-kube-apiserver-amd64:v1.11.0 docker tag k8s.gcr.io/kube-scheduler-amd64:v1.11.0 openthings/k8s-kube-scheduler-amd64:v1.11.0 docker tag k8s.gcr.io/kube-controller-manager-amd64:v1.11.0 openthings/k8s-kube-controller-manager-amd64:v1.11.0 docker tag k8s.gcr.io/kube-proxy-amd64:v1.11.0 openthings/k8s-kube-proxy-amd64:v1.11.0 docker tag k8s.gcr.io/etcd-amd64:3.2.18 openthings/k8s-etcd-amd64:3.2.18 docker tag k8s.gcr.io/pause-amd64:3.1 openthings/k8s-pause-amd64:3.1 docker tag k8s.gcr.io/coredns:1.1.3 openthings/k8s-coredns:1.1.3 echo "" echo "==========================================================" echo "" ## Push镜像 echo "" echo "1.k8s-kube-apiserver-amd64" docker push openthings/k8s-kube-apiserver-amd64:v1.11.0 echo "" echo "2.k8s-kube-controller-manager-amd64" docker push openthings/k8s-kube-controller-manager-amd64:v1.11.0 echo "" echo "3.k8s-kube-scheduler-amd64" docker push openthings/k8s-kube-scheduler-amd64:v1.11.0 echo "" echo "4.k8s-kube-proxy-amd64" docker push openthings/k8s-kube-proxy-amd64:v1.11.0 echo "" echo "5.k8s-etcd-amd64" docker push openthings/k8s-etcd-amd64:3.2.18 echo "" echo "6.k8s-pause-amd64" docker push openthings/k8s-pause-amd64:3.1 echo "" echo "7.k8s-coredns" docker push openthings/k8s-coredns:1.1.3 echo "" echo "==========================================================" echo "Push Kubernetes 1.11.0 Images into hub.docker.com, FINISH." echo "==========================================================" echo ""
import pulsar as psr def load_ref_system(): """ Returns 2_2-dichloroacetic_acid as found in the IQMol fragment library. All credit to https://github.com/nutjunkie/IQmol """ return psr.make_system(""" C 0.1545 -0.2925 -0.4629 C -1.0813 0.5115 -0.1454 O -1.3397 1.1257 0.8694 Cl 1.5716 0.7499 -0.3354 Cl 0.3258 -1.5819 0.7287 O -2.0027 0.5337 -1.1357 H 0.1163 -0.7381 -1.4838 H -2.7567 1.0535 -0.8754 """)
import os def remove_files_and_directories(packages): files_and_directories_to_remove = [ "tslint.json", "yarn.lock", "ember-cli-build.js", ".travis.yml", "CODE_OF_CONDUCT.md", "LICENSE", "tmp", "dist" ] for package in packages: for root, dirs, files in os.walk(package): for file_or_dir in files + dirs: if file_or_dir in files_and_directories_to_remove: path = os.path.join(root, file_or_dir) if os.path.isfile(path): os.remove(path) else: os.system(f"rm -rf {path}") # Example usage packages = [ "path/to/package1", "path/to/package2", "path/to/package3" ] remove_files_and_directories(packages)
#!/bin/bash #SBATCH -p haswell #SBATCH --exclusive #SBATCh --time=01:30:00 #SBATCH -c 24 #SBATCH -n 1 #SBATCh --cpu_bind=threads #SBATCH --mem-per-cpu=2300 module use /projects/p_readex/modules/ module load scorep/scorep/ci_TRY_READEX_online_access_call_tree_extensions_bullxmpi1.2.8.4_gcc5.3.0 module load readex-rrl/ci_rrl_bullxmpi1.2.8.4_gcc5.3.0 module load pcp/ci_pcp_bullxmpi1.2.8.4_gcc5.3.0 module load scorep_plugins/control_plugins echo $(hostname) CUR_DIR=`pwd` export OMP_NUM_THREADS=24 export SCOREP_ENABLE_TRACING=true export SCOREP_ENABLE_PROFILING=true export SCOREP_TOTAL_MEMORY="1000M" export SCOREP_SUBSTRATE_PLUGINS='rrl' export SCOREP_METRIC_PLUGINS='scorep_substrate_rrl' export SCOREP_METRIC_SCOREP_SUBSTRATE_RRL='*,ATP/PARAMETER1' export SCOREP_RRL_VERBOSE="TRACE" export SCOREP_TUNING_PLUGINS='OpenMPTP,cpu_freq_plugin' export SCOREP_TUNING_CPU_FREQ_PLUGIN_VERBOSE="DEBUG" export SCOREP_TUNING_OPENMPTP_PLUGIN_VERBOSE="DEBUG" export SCOREP_ONLINEACCESS_ENABLE=true export SCOREP_ONLINEACCESS_REG_PORT=50100 export SCOREP_ONLINEACCESS_REG_HOST='localhost' export SCOREP_ONLINEACCESS_BASE_PORT=50010 export SCOREP_ONLINEACCESS_APPL_NAME='appl' export SCOREP_EXPERIMENT_DIRECTORY="${CUR_DIR}/scorep" scorep-online-access-registry 50100 test=./scenario_tuning_variable_fortran_foo_new_no_hdeem > online_access_output & scorep_oa=$! cd ../../../bin/ gdb ./bt-mz.S.1 if [ -n "$(ps -p $scorep_oa -o pid=)" ] then kill $scorep_oa fi
<filename>domain-machine/src/main/java/org/angrygoat/domainmachine/exception/PolicyDomainRuntimeException.java package org.angrygoat.domainmachine.exception; public class PolicyDomainRuntimeException extends RuntimeException { /** * */ private static final long serialVersionUID = 3243208307509723136L; public PolicyDomainRuntimeException() { super(); } public PolicyDomainRuntimeException(String arg0, Throwable arg1, boolean arg2, boolean arg3) { super(arg0, arg1, arg2, arg3); } public PolicyDomainRuntimeException(String arg0, Throwable arg1) { super(arg0, arg1); } public PolicyDomainRuntimeException(String arg0) { super(arg0); } public PolicyDomainRuntimeException(Throwable arg0) { super(arg0); } }
<filename>src/app/atoms/Dropdown.react.js import Component from 'react-pure-render/component'; import font from '../styles/Font'; import Icon from './Icon.react'; import {mediaQueries} from '../styles/MediaQueries'; import Radium from 'radium'; import React, {PropTypes as RPT} from 'react'; import ReactDOM from 'react-dom'; import * as colors from '../styles/Colors' @Radium export default class Dropdown extends Component { static propTypes = { handleIconClick: RPT.func, handleResetLocalStorage: RPT.func, handleResetProps: RPT.func, handleToggleProps: RPT.func, simplePropsSelected: RPT.bool, visible: RPT.bool.isRequired } static defaultProps = { visible: false } componentDidMount() { document.addEventListener('click', this.handleDocumentClick) } componentWillUnmount() { document.removeEventListener('click', this.handleDocumentClick) } /* using fat arrow to bind to instance */ handleDocumentClick = (evt) => { const area = ReactDOM.findDOMNode(this.refs.dropdown) if (!area.contains(evt.target) && this.props.visible && this.props.handleIconClick) { this.props.handleIconClick() } } render() { const { handleIconClick, handleResetLocalStorage, handleResetProps, handleToggleProps, simplePropsSelected, visible } = this.props const hovered = Radium.getState(this.state, 'dropdown-option1', ':hover'); return ( <div ref='dropdown' style={styles.wrapper}> <i onClick={handleIconClick} style={styles.iconWrapper} > <Icon color={colors.BLUE} kind='settings' size={21} /> </i> <div style={[styles.dropdownWrapper, visible && styles.dropdownWrapper.visible]} > <ul style={styles.list}> <li key='dropdown-option1' onClick={handleToggleProps} style={[styles.list.option, font]} > {simplePropsSelected ? 'Preset all props' : 'Preset only required props'} </li> <li key='dropdown-option2' onClick={handleResetProps} style={[styles.list.option, font]} > Reset props to default </li> <li key='dropdown-option3' onClick={handleResetLocalStorage} style={[styles.list.option, font]} > Reset local storage </li> </ul> <i style={[styles.arrow, hovered && styles.arrow.hovered]} /> <i style={styles.arrow.bordered} /> </div> </div> ); } } const styles = { wrapper: { position: 'relative', textAlign: 'right' }, dropdownWrapper: { backgroundColor: 'white', width: '200px', border: `1px solid ${colors.GRAY_DARKER}`, position: 'absolute', top: 'calc(100% + 5px)', left: 'calc(100% + -25px)', boxSizing: 'border-box', display: 'none', textAlign: 'left', zIndex: 10, visible: { display: 'block' }, [mediaQueries.breakpointTablet]: { left: 'auto', right: 0 } }, iconWrapper: { ':hover': { cursor: 'pointer' } }, list: { listStyle: 'none', padding: 0, margin: 0, option: { color: colors.BLACK_BRIGHT, padding: '8px 8px 8px 32px', transition: 'all .1s ease', ':hover': { backgroundColor: colors.BLUE, color: 'white', cursor: 'pointer' } } }, arrow: { bottom: '100%', left: '13px', borderStyle: 'solid', height: 0, width: 0, position: 'absolute', pointerEvents: 'none', borderColor: 'rgba(136, 183, 213, 0)', borderBottomColor: 'white', borderWidth: '6px', transform: 'translateX(-50%)', transition: 'all .1s ease', zIndex: 3, hovered: { borderBottomColor: colors.BLUE }, [mediaQueries.breakpointTablet]: { right: '-2px', left: 'auto' }, bordered: { bottom: '100%', left: '6px', borderStyle: 'solid', height: 0, width: 0, position: 'absolute', pointerEvents: 'none', borderColor: 'rgba(136, 183, 213, 0)', borderBottomColor: colors.GRAY_DARKER, borderWidth: '7px', zIndex: 2, [mediaQueries.breakpointTablet]: { right: '3px', left: 'auto' } } } };
#!/usr/bin/env bash for f in /home/{{ ansible_user }}/runners/*.sh do echo "Running ${f}" echo $(date -u +"%Y-%m-%dT%H:%M:%SZ"),start,${f%.*} >> /home/{{ ansible_user }}/runners/log.csv ${f} echo $(date -u +"%Y-%m-%dT%H:%M:%SZ"),done,${f%.*} >> /home/{{ ansible_user }}/runners/log.csv done echo $(date -u +"%Y-%m-%dT%H:%M:%SZ"),publish,, >> /home/{{ ansible_user }}/runners/log.csv /usr/local/bin/publish
<reponame>jmthompson2015/book-club const QUnit = require("../node_modules/qunit/qunit/qunit.js"); const Book = require("../artifact/Book.js"); const Person = require("../artifact/Person.js"); const Series = require("../artifact/Series.js"); const UrlGenerator = require("./UrlGenerator.js"); QUnit.module("UrlGenerator"); QUnit.test("dclSearchUrl()", (assert) => { // Setup. const item = { title: "Something Something" }; // Run. const result = UrlGenerator.dclSearchUrl(item); // Verify. assert.equal( result, "https://dcl.bibliocommons.com/v2/search?searchType=smart&query=Something+Something", `result = :${result}:` ); }); QUnit.test("dclSearchUrl() null", (assert) => { // Setup. const item = Series.properties[Series.GAIL_H_SUGGESTS]; // Run. const result = UrlGenerator.dclSearchUrl(item); // Verify. assert.equal(result, null, `result = :${result}:`); }); QUnit.test("dclUrl()", (assert) => { // Setup. const item = Book.properties[Book.THE_LOST_MAN]; // Run. const result = UrlGenerator.dclUrl(item); // Verify. assert.equal( result, "https://dcl.bibliocommons.com/item/show/1523223114", `result = :${result}:` ); }); QUnit.test("goodreadsSearchUrl()", (assert) => { // Setup. const item = { title: "Something Something" }; // Run. const result = UrlGenerator.goodreadsSearchUrl(item); // Verify. assert.equal( result, "https://www.goodreads.com/search?q=Something+Something", `result = :${result}:` ); }); QUnit.test("goodreadsUrl()", (assert) => { // Setup. const item = Book.properties[Book.A_SIMPLE_PLAN]; // Run. const result = UrlGenerator.goodreadsUrl(item); // Verify. assert.equal( result, "https://www.goodreads.com/book/show/21727.A_Simple_Plan", `result = :${result}:` ); }); QUnit.test("imdbSearchUrl() null", (assert) => { // Setup. const item = { title: "Something Something" }; // Run. const result = UrlGenerator.imdbSearchUrl(item); // Verify. assert.equal( result, "https://www.imdb.com/find?s=all&q=Something+Something", `result = :${result}:` ); }); QUnit.test("imdbUrl()", (assert) => { // Setup. const item = Person.properties[Person.ANTHONY_HOROWITZ]; // Run. const result = UrlGenerator.imdbUrl(item); // Verify. assert.equal( result, "https://www.imdb.com/name/nm0395275", `result = :${result}:` ); }); QUnit.test("libraryThingSearchUrl()", (assert) => { // Setup. const item = { title: "Something Something" }; // Run. const result = UrlGenerator.libraryThingSearchUrl(item); // Verify. assert.equal( result, "https://www.librarything.com/search.php?search=Something+Something", `result = :${result}:` ); }); QUnit.test("libraryThingUrl()", (assert) => { // Setup. const item = Book.properties[Book.THE_LOST_MAN]; // Run. const result = UrlGenerator.libraryThingUrl(item); // Verify. assert.equal( result, "https://www.librarything.com/work/21919798", `result = :${result}:` ); }); QUnit.test("wikipediaSearchUrl() null", (assert) => { // Setup. const item = { title: "Something Something" }; // Run. const result = UrlGenerator.wikipediaSearchUrl(item); // Verify. assert.equal( result, "https://en.wikipedia.org/wiki/Something_Something", `result = :${result}:` ); }); QUnit.test("wikipediaUrl()", (assert) => { // Setup. const item = Person.properties[Person.ANTHONY_HOROWITZ]; // Run. const result = UrlGenerator.wikipediaUrl(item); // Verify. assert.equal( result, "https://en.wikipedia.org/wiki/Anthony_Horowitz", `result = :${result}:` ); }); const UrlGeneratorTest = {}; module.exports = UrlGeneratorTest;
<filename>src/main/java/com/example/effective3/Main.java package com.example.effective3; /** * 用私有构造器或者枚举类型强化 Singleton 属性 * @author qxw * @version 1.00 * @time 27/5/2019 上午 11:35 */ public class Main { public static void main(String[] args) { Singleton1 obj1=Singleton1.INSTANCE; Singleton1 obj2=Singleton1.INSTANCE; System.err.println(obj1==obj2); Singleton2 obj3=Singleton2.getInstance(); Singleton2 obj4=Singleton2.getInstance(); System.err.println(obj3==obj4); Singleton3 obj5=Singleton3.getInstance(); Singleton3 obj6=Singleton3.getInstance(); System.err.println(obj5==obj6); } }
<filename>src/test/java/de/fnordbedarf/debugger/givendebugger/Sleep.java package de.fnordbedarf.debugger.givendebugger; import de.fnordbedarf.debugger.Debugger; import org.junit.jupiter.api.Test; /** * Created by HiekmaHe on 05.05.2017. * */ class Sleep { @Test void whenSleepTwoSecondsThenSleepTwoSeconds() { // arrange long twoSeconds = 2000; long startTime = System.currentTimeMillis(); // act Debugger.sleepFor(twoSeconds); long duration = System.currentTimeMillis() - startTime; // assert Debugger.expect(duration).toBeGreaterThanOrEqualTo(twoSeconds).otherwiseComplain(); Debugger.expect(duration).toBeLessThan(twoSeconds + 20 /*millis*/).otherwiseComplain(); } }
#!/usr/bin/env bash # Create the pre-start script that copies the buildpack package to /var/vcap/data/shared-packages/. set -o errexit -o nounset release="suse-staticfile-buildpack" buildpack="suse-staticfile-buildpack" pre_start="/var/vcap/all-releases/jobs-src/${release}/${buildpack}/templates/bin/pre-start" copy_dst="/var/vcap/data/shared-packages/${buildpack}/" mkdir -p "$(dirname "${pre_start}")" cat <<EOT > "${pre_start}" #!/usr/bin/env bash set -o errexit mkdir -p "${copy_dst}" cp -r /var/vcap/packages "${copy_dst}" EOT
package com.alipay.api.domain; import com.alipay.api.AlipayObject; import com.alipay.api.internal.mapping.ApiField; /** * 充值跳端 * * @author auto create * @since 1.0, 2021-09-29 17:19:44 */ public class AlipayFundJointaccountFundDepositModel extends AlipayObject { private static final long serialVersionUID = 2512786956297894131L; /** * 合花群ID<br> 补充说明:<br> -该字段可在签约接口alipay.fund.jointaccount.sign调用后,由账户变更结果通知接口alipay.fund.jointaccount.account.completed中返回。<br> -该字段可在签约接口调用后,由查询账户详情接口alipay.fund.jointaccount.detail.query中返回。 */ @ApiField("account_id") private String accountId; /** * 授权协议号<br> 补充说明:<br> -该字段可在签约接口alipay.fund.jointaccount.sign调用后,由账户变更结果通知接口alipay.fund.jointaccount.account.completed中返回。<br> -该字段可在签约接口调用后,由查询账户详情接口alipay.fund.jointaccount.detail.query中返回。 */ @ApiField("agreement_no") private String agreementNo; /** * 充值金额(单位为元,必须大于0且最多小数点后两位) */ @ApiField("amount") private String amount; /** * 业务场景码 */ @ApiField("biz_scene") private String bizScene; /** * (发起人)用户唯一标识 */ @ApiField("identity") private String identity; /** * (发起人)账号类型,参考值如下:<br> -ALIPAY_USER_ID:支付宝侧用户唯一标识 */ @ApiField("identity_type") private String identityType; /** * 订单标题,长度超长会拒绝交易,可包括数字、字母、空格、汉字,但不能包括特殊字符、emoji等 */ @ApiField("order_title") private String orderTitle; /** * 商户侧单号(幂等字段)<br> -补充说明:幂等逻辑(商户appid+out_biz_no),商户集成时需注意,如换号重复发起,则认为是一笔新的请求。 */ @ApiField("out_biz_no") private String outBizNo; /** * 销售产品码 */ @ApiField("product_code") private String productCode; public String getAccountId() { return this.accountId; } public void setAccountId(String accountId) { this.accountId = accountId; } public String getAgreementNo() { return this.agreementNo; } public void setAgreementNo(String agreementNo) { this.agreementNo = agreementNo; } public String getAmount() { return this.amount; } public void setAmount(String amount) { this.amount = amount; } public String getBizScene() { return this.bizScene; } public void setBizScene(String bizScene) { this.bizScene = bizScene; } public String getIdentity() { return this.identity; } public void setIdentity(String identity) { this.identity = identity; } public String getIdentityType() { return this.identityType; } public void setIdentityType(String identityType) { this.identityType = identityType; } public String getOrderTitle() { return this.orderTitle; } public void setOrderTitle(String orderTitle) { this.orderTitle = orderTitle; } public String getOutBizNo() { return this.outBizNo; } public void setOutBizNo(String outBizNo) { this.outBizNo = outBizNo; } public String getProductCode() { return this.productCode; } public void setProductCode(String productCode) { this.productCode = productCode; } }
#!/bin/bash maildir=$1 # ó: 9b -> f3 # í: ad -> ed # ç: d8 -> e7 # á: ff -> e1 function replaceByte() { local path="$maildir/$1" sed -i "s/\x9b/\xf3/g" "$path" sed -i "s/\xad/\xed/g" "$path" sed -i "s/\xd8/\xe7/g" "$path" sed -i "s/\xff/\xe1/g" "$path" } #9b replaceByte shackleton-s/all_documents/1560. replaceByte shackleton-s/all_documents/2856. replaceByte shackleton-s/all_documents/2374. replaceByte shackleton-s/notes_inbox/1701. replaceByte shackleton-s/stack__shari/3. replaceByte campbell-l/all_documents/1014. replaceByte campbell-l/discussion_threads/889. replaceByte campbell-l/notes_inbox/284. replaceByte taylor-m/all_documents/2813. replaceByte taylor-m/all_documents/7852. replaceByte taylor-m/notes_inbox/1149. replaceByte taylor-m/notes_inbox/2425. replaceByte taylor-m/archive/8_00/32. replaceByte dasovich-j/all_documents/29349. replaceByte dasovich-j/notes_inbox/11527. replaceByte haedicke-m/all_documents/2313. replaceByte haedicke-m/notes_inbox/344. replaceByte skilling-j/all_documents/385. replaceByte skilling-j/discussion_threads/306. replaceByte skilling-j/notes_inbox/100. # ad replaceByte horton-s/all_documents/64. replaceByte horton-s/all_documents/209. replaceByte horton-s/discussion_threads/60. replaceByte horton-s/discussion_threads/198. #d8 replaceByte sanders-r/all_documents/7334. replaceByte sanders-r/all_documents/7342. replaceByte sanders-r/all_documents/7328. replaceByte sanders-r/notes_inbox/312. replaceByte sanders-r/notes_inbox/313. replaceByte sanders-r/notes_inbox/315. replaceByte griffith-j/all_documents/565. replaceByte griffith-j/discussion_threads/535. replaceByte griffith-j/design/27. #ff replaceByte taylor-m/all_documents/3452. replaceByte taylor-m/all_documents/3474. replaceByte taylor-m/notes_inbox/1606. replaceByte taylor-m/notes_inbox/1591. replaceByte gay-r/all_documents/82. replaceByte gay-r/all_documents/59. replaceByte gay-r/sent/82. replaceByte gay-r/sent/59.
#include <iostream> #include <string> #include <cassert> class XMLIterPrinter { private: bool theOpenStart; public: XMLIterPrinter() : theOpenStart(true) {} void addStrAttribute(const char *name, const char *value) { assert(theOpenStart); std::cout << ' ' << name << "=\"" << value << "\""; } void addIntAttribute(const char *name, xs_integer value) { assert(theOpenStart); std::cout << ' ' << name << "=\"" << value << "\""; } void addDecAttribute(const char *name, double value) { assert(theOpenStart); std::cout << ' ' << name << "=\"" << value << "\""; } };
#!/bin/sh XDG_RUNTIME_DIR=/home/mikolaj/.run PULSE_RUNTIME_PATH="${XDG_RUNTIME_DIR}/pulse" PULSE_ENV="env PULSE_RUNTIME_PATH=${PULSE_RUNTIME_PATH} XDG_RUNTIME_DIR=${XDG_RUNTIME_DIR}" PULSE="${PULSE_ENV} pulseaudio"
#!/bin/bash curl -Ss $URL|jq '.'
#!/usr/bin/env bash # Enumerate docker images to be processed DOCKER_IMAGE_NAMES=( "bitnami/kafka:2.8.0" "bitnami/rabbitmq:3.8.17" "bitnami/zookeeper:3.6.3" "coleifer/sqlite-web:latest" "ibmcom/db2:11.5.5.1" "mysql:5.7" "obsidiandynamics/kafdrop:3.27.0" "phpmyadmin/phpmyadmin:5.1.1" "postgres:11.12.0" "senzing/adminer:1.0.0" "senzing/apt:1.0.4" "senzing/db2-driver-installer:1.0.2" "senzing/entity-search-web-app:2.2.3" "senzing/init-container:1.6.9" "senzing/jupyter:1.3.0" "senzing/mysql-init:latest" "senzing/phppgadmin:1.0.0" "senzing/postgresql-client:1.0.0" "senzing/redoer:1.3.7" "senzing/resolver:1.3.2" "senzing/senzing-api-server:2.6.1" "senzing/senzing-console:1.0.1" "senzing/senzing-debug:1.3.5" "senzing/sshd:1.2.0" "senzing/stream-loader:1.7.6" "senzing/stream-logger:1.1.2" "senzing/stream-producer:1.4.1" "senzing/web-app-demo:2.1.1" "senzing/xterm:1.1.0" "senzing/yum:1.1.4" ) # Return codes. OK=0 NOT_OK=1 # Pull images from DockerHub. for DOCKER_IMAGE_NAME in ${DOCKER_IMAGE_NAMES[@]}; do echo "Pulling ${DOCKER_IMAGE_NAME} from DockerHub." docker pull ${DOCKER_IMAGE_NAME} done # Make output variables. MY_HOME=~ OUTPUT_DATE=$(date +%s) OUTPUT_FILE=${MY_HOME}/docker-air-gap-helper-${OUTPUT_DATE}.tgz OUTPUT_DIR_NAME=docker-air-gap-helper-${OUTPUT_DATE} OUTPUT_DIR=${MY_HOME}/${OUTPUT_DIR_NAME} OUTPUT_LOAD_REPOSITORY_SCRIPT=${OUTPUT_DIR}/docker-air-gap-load-repository.sh OUTPUT_LOAD_REGISTRY_SCRIPT=${OUTPUT_DIR}/docker-air-gap-load-registry.sh mkdir ${OUTPUT_DIR} # Write OUTPUT_LOAD_REPOSITORY_SCRIPT prolog. cat <<EOT > ${OUTPUT_LOAD_REPOSITORY_SCRIPT} #!/usr/bin/env bash OK=0 NOT_OK=1 EOT chmod +x ${OUTPUT_LOAD_REPOSITORY_SCRIPT} # Write OUTPUT_LOAD_REGISTRY_SCRIPT prolog. cat <<EOT > ${OUTPUT_LOAD_REGISTRY_SCRIPT} #!/usr/bin/env bash OK=0 NOT_OK=1 if [[ -z "\${DOCKER_REGISTRY_URL}" ]]; then echo "Error: DOCKER_REGISTRY_URL is not set." exit \${NOT_OK} fi EOT chmod +x ${OUTPUT_LOAD_REGISTRY_SCRIPT} # Save Docker images and scripts to output directory. for DOCKER_IMAGE_NAME in ${DOCKER_IMAGE_NAMES[@]}; do # Do a "docker save" to make a file from docker image. DOCKER_OUTPUT_FILENAME=$(echo ${DOCKER_IMAGE_NAME} | tr "/:" "--")-${OUTPUT_DATE}.tar echo "Creating ${OUTPUT_DIR}/${DOCKER_OUTPUT_FILENAME}" docker save ${DOCKER_IMAGE_NAME} --output ${OUTPUT_DIR}/${DOCKER_OUTPUT_FILENAME} # Add commands to OUTPUT_LOAD_REPOSITORY_SCRIPT to load file into local repository. echo "docker load --input ${DOCKER_OUTPUT_FILENAME}" >> ${OUTPUT_LOAD_REPOSITORY_SCRIPT} # Add commands to OUTPUT_LOAD_REGISTRY_SCRIPT to push to private registry. echo "" >> ${OUTPUT_LOAD_REGISTRY_SCRIPT} echo "docker tag ${DOCKER_IMAGE_NAME} \${DOCKER_REGISTRY_URL}/${DOCKER_IMAGE_NAME}" >> ${OUTPUT_LOAD_REGISTRY_SCRIPT} echo "docker push \${DOCKER_REGISTRY_URL}/${DOCKER_IMAGE_NAME}" >> ${OUTPUT_LOAD_REGISTRY_SCRIPT} echo "docker rmi \${DOCKER_REGISTRY_URL}/${DOCKER_IMAGE_NAME}" >> ${OUTPUT_LOAD_REGISTRY_SCRIPT} done # Compress results. tar -zcvf ${OUTPUT_FILE} --directory ${MY_HOME} ${OUTPUT_DIR_NAME} # Epilog echo "Done." echo " Output file: ${OUTPUT_FILE}" echo " Which is a compressed version of ${OUTPUT_DIR}" exit ${OK}
<filename>src/module.indexer/model/transaction.vin.ts import { Injectable } from '@nestjs/common' import { defid, Indexer, RawBlock } from '@src/module.indexer/model/_abstract' import { TransactionVin, TransactionVinMapper } from '@src/module.model/transaction.vin' import { TransactionVout } from '@src/module.model/transaction.vout' import { HexEncoder } from '@src/module.model/_hex.encoder' import { VoutFinder } from '@src/module.indexer/model/_vout_finder' import { NotFoundIndexerError } from '@src/module.indexer/error' @Injectable() export class TransactionVinIndexer extends Indexer { constructor ( private readonly vinMapper: TransactionVinMapper, private readonly voutFinder: VoutFinder ) { super() } async index (block: RawBlock): Promise<void> { for (const txn of block.tx) { for (const vin of txn.vin) { if (vin.coinbase !== undefined) { await this.vinMapper.put(this.map(txn, vin, undefined)) } else { const vout = await this.voutFinder.findVout(block, vin.txid, vin.vout) if (vout === undefined) { throw new NotFoundIndexerError('index', 'TransactionVout', `${vin.txid} - ${vin.vout}`) } await this.vinMapper.put(this.map(txn, vin, vout)) } } } } async invalidate (block: RawBlock): Promise<void> { for (const txn of block.tx) { for (const vin of txn.vin) { await this.vinMapper.delete(this.mapId(txn, vin)) } } } map (txn: defid.Transaction, vin: defid.Vin, vout?: TransactionVout): TransactionVin { return { id: this.mapId(txn, vin), txid: txn.txid, coinbase: vin.coinbase, vout: vout !== undefined ? { id: vout.id, txid: vout.txid, n: vout.n, value: vout.value, tokenId: vout.tokenId, script: { hex: vout.script.hex } } : undefined, script: vin.scriptSig !== undefined ? { hex: vin.scriptSig.hex } : undefined, txInWitness: vin.txinwitness, sequence: vin.sequence } } /** * non coinbase: txn.txid + vout.txid + vout.n (4 bytes encoded hex) * coinbase: txn.txid + '00' */ mapId (txn: defid.Transaction, vin: defid.Vin): string { if (vin.coinbase !== undefined) { return txn.txid + '00' } return txn.txid + vin.txid + HexEncoder.encodeVoutIndex(vin.vout) } }
#!/bin/sh WRAP_DIR=~/moses.new/scripts/training/wrappers/ tagger=$WRAP_DIR/make-factor-en-pos.mxpost.perl lang=en for stem in test train.10k train.100k; do $tagger -mxpost /home/pkoehn/statmt/project/mxpost $stem.$lang $stem.tagged.$lang /tmp done tagger=$WRAP_DIR/make-factor-de-pos.perl lang=de for stem in test train.10k train.100k; do $tagger $stem.$lang $stem.tagged.$lang /tmp done
#!/bin/bash # usage: ./fetch-release.sh releaseVersion if [ $# -eq 0 ] then echo usage: ./release.sh releaseVersion exit -1 fi git checkout -B stage origin/stage git pull --recurse-submodules=yes git checkout tags/jpo-ode-$1 git submodule update --recursive --init
#!/bin/bash # # Copyright (c) 2019-2020 P3TERX <https://p3terx.com> # # This is free software, licensed under the MIT License. # See /LICENSE for more information. # # https://github.com/P3TERX/Actions-OpenWrt # File name: diy-part2.sh # Description: OpenWrt DIY script part 2 (After Update feeds) # # Modify default IP sed -i 's/192.168.1.1/10.0.1.3/g' package/base-files/files/bin/config_generate
#!/bin/bash -E # Pre-checks for validation and linting # # These checks do not provide a fix and are quicker to run, # allowing CI to fail quickly on basic linting or validation errors FAILED=() CURRENT="" # AZP appears to make lines with this prefix red BASH_ERR_PREFIX="##[error]: " DIFF_OUTPUT="${DIFF_OUTPUT:-/build/fix_format_pre.diff}" read -ra BAZEL_BUILD_OPTIONS <<< "${BAZEL_BUILD_OPTIONS:-}" trap_errors () { local frame=0 command line sub file if [[ -n "$CURRENT" ]]; then command=" (${CURRENT})" fi set +v while read -r line sub file < <(caller "$frame"); do if [[ "$frame" -ne "0" ]]; then FAILED+=(" > ${sub}@ ${file} :${line}") else FAILED+=("${sub}@ ${file} :${line}${command}") if [[ "$CURRENT" == "glint" ]]; then FAILED+=( " Please fix your editor to ensure:" " - no trailing whitespace" " - no mixed tabs/spaces" " - all files end with a newline") fi fi ((frame++)) done set -v } trap trap_errors ERR trap exit 1 INT # TODO: move these to bazel CURRENT=glint "${ENVOY_SRCDIR}"/tools/code_format/glint.sh CURRENT=shellcheck "${ENVOY_SRCDIR}"/tools/code_format/check_shellcheck_format.sh check CURRENT=configs bazel run "${BAZEL_BUILD_OPTIONS[@]}" //configs:example_configs_validation CURRENT=python bazel run "${BAZEL_BUILD_OPTIONS[@]}" //tools/code_format:python_check -- --diff-file="$DIFF_OUTPUT" --fix CURRENT=extensions bazel run "${BAZEL_BUILD_OPTIONS[@]}" //tools/extensions:extensions_check CURRENT=spelling "${ENVOY_SRCDIR}"/tools/spelling/check_spelling_pedantic.py --mark check CURRENT=rst # TODO(phlax): Move this to general docs checking of all rst files bazel run "${BAZEL_BUILD_OPTIONS[@]}" //tools/docs:rst_check if [[ "${#FAILED[@]}" -ne "0" ]]; then echo "${BASH_ERR_PREFIX}TESTS FAILED:" >&2 for failed in "${FAILED[@]}"; do echo "${BASH_ERR_PREFIX} $failed" >&2 done exit 1 fi
// const zos = require('zos'); const ProofOfExistence = artifacts.require('../contracts/ProofOfExistence.sol'); /** This runs tests with the standard migration provided by Truffle. Hence, our contracts * will not be properly initialized due to our use of the ZeppelinOS library. We can still test * the user facing functionalities of the contract. */ contract('ProofOfExistence tests with standard Truffle migration', (accounts) => { // const DEPLOYER_ADDRESS = accounts[0]; const BENEFICIARY_ADDRESS = accounts[1]; const PAUSER_ADDRESS = accounts[2]; const USER_ADDRESS = accounts[9]; const SAMPLE_HASH_ONE = '0x017dfd85d4f6cb4dcd715a88101f7b1f06cd1e009b2327a0809d01eb9c91f231'; const SAMPLE_HASH_TWO = '0x017dfd85d4f6cb4dcd715a88101f7b1f06cd1e009b2327a0809d01eb9c91f232'; const SAMPLE_HASH_THREE = '0x017dfd85d4f6cb4dcd715a88101f7b1f06cd1e009b2327a0809d01eb9c91f233'; const SAMPLE_HASH_FOUR = '0x017dfd85d4f6cb4dcd715a88101f7b1f06cd1e009b2327a0809d01eb9c91f234'; const ZERO_ADDRESS = '0x0000000000000000000000000000000000000000'; const ZERO_HASH = '0x0000000000000000000000000000000000000000000000000000000000000000'; let proofOfExistence; before('Before: get deployed contract instance', async () => { try { proofOfExistence = await ProofOfExistence.deployed(); } catch (err) { assert.throw(`Failed to create ProofOfExistence contract: ${err.toString()}`); } }); // Tests hash registration describe('Registering hashes', () => { // The count must start at 0 for correct initialization. it('...should initialize with the correct count.', async () => { const actualCount = await proofOfExistence.count(); const EXPECTED_COUNT = 0; assert.equal(actualCount, EXPECTED_COUNT, 'Incorrect initial count.'); }); // This tests if a hash can be successfully registered. it('...should register a hash.', async () => { try { const gasEstimate = await proofOfExistence.registerHash.estimateGas(SAMPLE_HASH_ONE); await proofOfExistence.registerHash.sendTransaction( SAMPLE_HASH_ONE, { from: USER_ADDRESS, gasEstimate }, ); } catch (err) { assert.throw(`Failed to register a hash: ${err.toString()}`); } }); // This tests if the count has been correctly incremented following a registration. it('...should increment count correctly.', async () => { const actualCount = await proofOfExistence.count(); const EXPECTED_COUNT = 1 assert.equal(actualCount, EXPECTED_COUNT, 'Incorrectly incremented count.'); }); // This tests the same hash cannot be registered again. it('...should not register same hash again.', async () => { try { const gasEstimate = await proofOfExistence.registerHash.estimateGas(SAMPLE_HASH_ONE); await proofOfExistence.registerHash.sendTransaction( SAMPLE_HASH_ONE, { from: USER_ADDRESS, gasEstimate }, ); } catch (err) { assert.equal(err.message.includes('revert'), true, 'Does not revert on same hash being registered twice'); } }); // This tests that two more hashes can be registered. it('...should register two more hashes.', async () => { try { const gasTwo = await proofOfExistence.registerHash.estimateGas(SAMPLE_HASH_TWO); await proofOfExistence.registerHash.sendTransaction( SAMPLE_HASH_TWO, { from: BENEFICIARY_ADDRESS, gasTwo }, ); const gasThree = await proofOfExistence.registerHash.estimateGas(SAMPLE_HASH_THREE); await proofOfExistence.registerHash.sendTransaction( SAMPLE_HASH_THREE, { from: BENEFICIARY_ADDRESS, gasThree }, ); } catch (err) { assert.throw(`Failed to register two more hashes: ${err.toString()}`); } }); }); // This tests getting the registration data from a hash describe('Retrieving registration data by hash', () => { it('...should show correct registration registrant.', async () => { const result = await proofOfExistence.getRegistrationForHash.call(SAMPLE_HASH_ONE); assert.equal(result.registrant, USER_ADDRESS, 'Does not show correct registrant.'); }); it('...should show correct registration hash.', async () => { const result = await proofOfExistence.getRegistrationForHash.call(SAMPLE_HASH_ONE); assert.equal(result.hash, SAMPLE_HASH_ONE, 'Does not show correct hash.'); }); it('...should not have data for unregistered hash.', async () => { const result = await proofOfExistence.getRegistrationForHash.call(SAMPLE_HASH_FOUR); assert.equal(result.hash, ZERO_HASH, 'Does have data for unregistered hash.'); }); }); // This tests getting the registration ids from an address describe('Retrieving registration ids by address', () => { it('...should show correct ids for addresses.', async () => { const resultUser = await proofOfExistence.getIdsForAddress.call(USER_ADDRESS); const resultBeneficiary = await proofOfExistence.getIdsForAddress.call(BENEFICIARY_ADDRESS); const resultPauser = await proofOfExistence.getIdsForAddress.call(PAUSER_ADDRESS); assert.equal(resultUser.length, 1, 'Returns incorrect number of ids.'); assert.equal(resultUser[0], 1, 'Returns incorrect ids.'); assert.equal(resultBeneficiary.length, 2, 'Returns incorrect number of ids.'); assert.equal(resultBeneficiary[0], 2, 'Returns incorrect ids.'); assert.equal(resultBeneficiary[1], 3, 'Returns incorrect ids.'); assert.equal(resultPauser.length, 0, 'Returns incorrect number of ids.'); }); it('...should revert for invalid address.', async () => { try { await proofOfExistence.getIdsForAddress.call(0); } catch (err) { assert.equal(err.message.includes('invalid address'), true, 'Does not revert on invalid address.'); } }); }); // This tests getting the registration data for an id describe('Retrieving registration data by id', () => { it('...should show correct registration data for id.', async () => { const resultOne = await proofOfExistence.getRegistrationForId.call(1); const resultTwo = await proofOfExistence.getRegistrationForId.call(2); const resultThree = await proofOfExistence.getRegistrationForId.call(3); const resultFour = await proofOfExistence.getRegistrationForId.call(4); assert.equal(resultOne.registrant, USER_ADDRESS, 'Returns incorrect registrant.'); assert.equal(resultOne.hash, SAMPLE_HASH_ONE, 'Returns incorrect hash.'); assert.equal(resultTwo.registrant, BENEFICIARY_ADDRESS, 'Returns incorrect registrant.'); assert.equal(resultTwo.hash, SAMPLE_HASH_TWO, 'Returns incorrect hash.'); assert.equal(resultThree.registrant, BENEFICIARY_ADDRESS, 'Returns incorrect registrant.'); assert.equal(resultThree.hash, SAMPLE_HASH_THREE, 'Returns incorrect hash.'); assert.equal(resultFour.registrant, ZERO_ADDRESS, 'Returns incorrect registrant.'); assert.equal(resultFour.hash, ZERO_HASH, 'Returns incorrect hash.'); }); it('...should revert for invalid id.', async () => { try { await proofOfExistence.getRegistrationForId.call(-1); } catch (err) { assert.equal(err.message.includes('invalid uint'), true, 'Does not revert on invalid uint.'); } }); }); describe('Fallback', () => { const getBalancePromise = (_address) => { return new Promise((resolve, reject) => { web3.eth.getBalance(_address, (err, result) => { return err ? reject(err) : resolve(result); }); }); }; const sendTransactionPromise = (_obj) => { return new Promise((resolve, reject) => { web3.eth.sendTransactionPromise(_obj, (err, result) => { return err ? reject(err) : resolve(result); }); }); }; it('...should accept no ether sent to the contract.', async () => { try { const initialBalance = await getBalancePromise(proofOfExistence.address); sendTransactionPromise({ from: USER_ADDRESS, to: proofOfExistence.address, value: 10e18, data: 10e18, }).catch(() => {}); const currentBalance = await getBalancePromise(proofOfExistence.address); assert.equal(initialBalance, currentBalance, 'Balance increased when it should not.'); } catch (err) { assert.equal(err.message.includes('revert'), true, 'Does not revert on ether being sent.'); } }); }); }); /** This runs tests with the migration provided by ZeppelinOS. Hence, our contracts * will be properly initialized and we can test the features of the contract relying on * proper initialization. DUE TO A BUG THIS DOES NOT WORK (see: https://github.com/zeppelinos/zos/issues/593) */ // contract('ProofOfExistence tests with ZeppelinOS supported migration', (accounts) => { // const DEPLOYER_ADDRESS = accounts[0]; // const BENEFICIARY_ADDRESS = accounts[1]; // const PAUSER_ADDRESS = accounts[2]; // let proofOfExistence; // before('Before: deploy and get instance', async () => { // describe('ZOS upgradeability', () => { // it('...should create a proxy', async function () { // const project = await zos.TestHelper({ from: DEPLOYER_ADDRESS }); // const proxy = await project.createProxy(ProofOfExistence, { initMethod: 'initialize', initArgs: [BENEFICIARY_ADDRESS,PAUSER_ADDRESS], initFrom: DEPLOYER_ADDRESS}); // const result = await proxy.beneficiary(); // assert.equal(result, BENEFICIARY_ADDRESS, 'Returns incorrect beneficiary.'); // }); // }); // }); // describe('Pausability', () => { // it('...should confirm pauser has role pauser', async function () {}); // it('...should pause for pauser', async function () {}); // it('...should unpause for pauser', async function () {}); // }); // describe('Fund withdrawal', () => { // it('...should allow beneficiary to withdraw funds.', async function () { // const startBalance = await web3.eth.getBalance(BENEFICIARY_ADDRESS); // const expectedWithdrawAmount = await proofOfExistence.balance(); // await proofOfExistence.withdraw({from: BENEFICIARY_ADDRESS}); // const endBalance = await web3.eth.getBalance(BENEFICIARY_ADDRESS); // assert.equal(endBalance, startBalance + expectedWithdrawAmount, "The withdraw function does not work as expected."); // }); // it('...should not allow pauser to withdraw funds', async function () {}); // }); // });
import React from 'react' import PropTypes from 'prop-types' import ManageQuestionsListItem from './questionsListItem' import { ListGroupItem, Row, Col } from 'react-bootstrap' const ManageQuestions = ({ questions, sortBy, handleSort }) => { const isMobile = window.innerWidth < 992 return ( <> <ListGroupItem style={{ backgroundColor: '#f2f2f2' }} className='mt-3' > <Row> <Col lg={1} className={isMobile ? 'text-center' : ''}> <b className="hover-cursor-pointer" onClick={() => handleSort('id')}> # &nbsp; {sortBy.id ? sortBy.id === 'ASC' ? <span className="fa fa-caret-down" /> : <span className="fa fa-caret-up" /> : <span className="fa fa-sort" />} </b> </Col> <Col lg={6} className={isMobile ? 'text-center' : ''}> <b className="hover-cursor-pointer" onClick={() => handleSort('name')}> Title &nbsp; {sortBy.name ? sortBy.name === 'ASC' ? <span className="fa fa-caret-down" /> : <span className="fa fa-caret-up" /> : <span className="fa fa-sort" />} </b> </Col> <Col lg={2} className='text-center'> <b>Difficulty</b> </Col> <Col lg={3} className='text-center'> <b>Creator</b> </Col> </Row> </ListGroupItem> {questions.map(({ ...question }, idx) => ( <ManageQuestionsListItem key={question.id} {...question} backgroundColor={idx % 2 === 0 ? 'white' : '#f2f2f2'} isMobile={isMobile} /> ))} {!questions.length && <div style={{ display: 'flex', alignItems: 'center', justifyContent: 'center', height: '25vh' }}> Sorry! there are no questions to display </div> } </> ) } ManageQuestions.propTypes = { questions: PropTypes.array, sortBy: PropTypes.shape({ id: PropTypes.any, name: PropTypes.any }), handleSort: PropTypes.func } export default ManageQuestions
#include <iostream> #include <vector> #include <string> #include <unordered_map> // Define a struct to hold the parsed command-line options struct CommandLineOptions { std::string scriptFileName; std::vector<std::string> inlineStrings; int testNumber; bool is3D; bool runDiffTest; double diffTestPerturbationScale; }; // Function to parse the command-line arguments and store the values CommandLineOptions parseCommandLineArguments(int argc, char* argv[]) { CommandLineOptions options; // Define a map to store the option flags and corresponding variables std::unordered_map<std::string, std::pair<void*, std::string>> optionMap = { {"-script", {&options.scriptFileName, "Lua script to read for initial data"}}, {"-i", {&options.inlineStrings, "Append string to script"}}, {"-test", {&options.testNumber, "Test number (non-lua test)"}}, {"--3d", {&options.is3D, "Dimension is 3(non-lua test)"}}, {"--run_diff_test", {&options.runDiffTest, "Run diff test (non-lua test)"}}, {"-dtps", {&options.diffTestPerturbationScale, "diff_test_perturbation_scale (non-lua test)"}} }; // Iterate through the command-line arguments and parse the options for (int i = 1; i < argc; ++i) { std::string arg = argv[i]; if (optionMap.find(arg) != optionMap.end()) { if (arg == "-i") { if (i + 1 < argc && argv[i + 1][0] != '-') { *((std::vector<std::string>*)optionMap[arg].first).push_back(argv[i + 1]); ++i; } } else if (arg == "--3d" || arg == "--run_diff_test") { *((bool*)optionMap[arg].first) = true; } else { if (i + 1 < argc && argv[i + 1][0] != '-') { if (arg == "-dtps") { *((double*)optionMap[arg].first) = std::stod(argv[i + 1]); } else { *((std::string*)optionMap[arg].first) = argv[i + 1]; } ++i; } } } } return options; } int main(int argc, char* argv[]) { CommandLineOptions options = parseCommandLineArguments(argc, argv); // Use the parsed options for further processing std::cout << "Script File Name: " << options.scriptFileName << std::endl; std::cout << "Inline Strings: "; for (const auto& str : options.inlineStrings) { std::cout << str << " "; } std::cout << std::endl; std::cout << "Test Number: " << options.testNumber << std::endl; std::cout << "Is 3D: " << (options.is3D ? "true" : "false") << std::endl; std::cout << "Run Diff Test: " << (options.runDiffTest ? "true" : "false") << std::endl; std::cout << "Diff Test Perturbation Scale: " << options.diffTestPerturbationScale << std::endl; return 0; }
# Import the necessary packages import requests # Make an API call and store the response url = 'https://example.com/' r = requests.get(url) # Extract the content of the response html_content = r.content # Print the response print(html_content)
#!/usr/bin/env bash # Copyright (c) 2016-present, Facebook, Inc. All rights reserved. # rebuild is a reasonbuild wrapper that builds reason files # it calls into ocamlbuild, telling it to call a special # command 'reopt' which links custom reasson build # rules. DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" REOPT="" if [[ -f $DIR/reopt.sh ]]; then REOPT="$DIR/reopt.sh" fi if [[ -f $DIR/reopt ]]; then REOPT="$DIR/reopt" fi if [ -z "$REOPT" ]; then echo "Couldn't find reopt" exit 1 fi # Since we need to override -ocamlopt, we parse the user passed-in -ocamlopt # here and rebuild it in reopt OCAMLOPTIDX=-1 USEOCAMLFIND=-1 # find ocamlopt in argument list i=1 for var in "$@" do if [[ $var = "-ocamlopt" ]]; then OCAMLOPTIDX=$i fi if [[ $var = "-use-ocamlfind" ]]; then USEOCAMLFIND=1 fi i=$i+1 done # found ocamlopt, parsing OCAMLOPT="ocamlopt.opt" if [[ $OCAMLOPTIDX -ne -1 ]]; then # The argument after "-ocamlopt" will be parsed into reopt as ocamlopt to be used VALUEIDX=$((OCAMLOPTIDX+1)) OCAMLOPT=${!VALUEIDX} # Remove the parsed argument out of argument list set -- "${@:1:OCAMLOPTIDX-1}" "${@: VALUEIDX+1}" fi if [[ $USEOCAMLFIND -ne -1 ]]; then env OCAMLFIND_COMMANDS="ocamlc=$REOPT" reasonbuild "$@" else # pass OCAMLOPT as an environment variable reasonbuild -ocamlopt "env OCAMLOPT=\"$OCAMLOPT\" $REOPT" "$@" fi
#!/bin/zsh DEBUG="" USAGE=1 ONE_TIME=1 LOG_LEVEL="d" if [ ! -d /opt/generic_bash_functions ];then echo "/opt/generic_bash_functions not found, attemting to clone from lanthean's github" pushd /opt sudo git clone https://github.com/lanthean/generic_bash_functions.git popd sudo chown -R $USER:staff /opt/generic_bash_functions fi source /opt/generic_bash_functions/generic_bash_functions function f_s_usage() { USAGE=0 # Usage is called, do not print eof echo " -" echo "| Usage $0 {--one-time} {-v/-vv}" echo "| --one-time: first run tasks that should be done once (lvextend, directory preparation)" echo "| -v/-vv : show debug information" echo " --" } function f_get_options(){ #'Options manager: get arguments from cli and parse them' while [[ $# -ge 1 ]] # -ge (greater or equal) because of --help (does not have pair = therefore in that case $# will be equal to 1) do #; echo "key: "$1", value: "$2", remaining number: "$# if [[ $2 == "" || $2 == "-"* ]];then ERR="EMPTY_VALUE" ERR_OPT=$1 break else case $1 in -v) DEBUG="-vvv" shift ;; -vv) DEBUG="-vvvv" shift ;; -o|--one-time) ONE_TIME=0 shift ;; -\?|-h|--help) ERR="USAGE" USAGE=0 shift ;; *) # unknown option ERR="UNKNOWN_OPT" ERR_OPT=$@ ;; esac shift # past argument or value fi done if [[ $USAGE == 0 ]];then ERR="USAGE" # elif [[ $ERR == 0 ]]; then # $ERR == False # if [[ $INPUT == "" || $OUTPUT == "" ]]; then # ERR="BAD_MANDATORY" # else # # Check if $INPUT exists # if [[ ! -d $INPUT ]];then # ERR="INPUT_NOT_A_DIRECTORY" # fi # # Check if $OUTPUT exists # if [[ ! -d $OUTPUT ]];then # ERR="OUTPUT_NOT_A_DIRECTORY" # fi # fi fi } function f_manage_err(){ #'Error manager: take internal error and convert it to sensible output/exit code' case $ERR in "USAGE") f_s_usage ;; "UNKNOWN_OPT") log e "Unknown option used, please verify your syntax." log e "Input arguments used: "$@ log e "Unknown argument: "$ERR_OPT f_s_usage ;; "BAD_MANDATORY") log e "Some mandatory parameter is missing, please consult usage." f_s_usage ;; "EMPTY_VALUE") log e "Attribute value cannot be empty. Please correct syntax of the attribute: $ERR_OPT" f_s_usage ;; # "INPUT_NOT_A_DIRECTORY") # log e "$INPUT path does not exist. Check your INPUT directory." # ;; # "OUTPUT_NOT_A_DIRECTORY") # log w "$OUTPUT path does not exist. Creating it for you." # [ $(mkdir $OUTPUT) ] && log i "$OUTPUT created" # f_set_defaults # f_main # ;; *) log e "Unknown error." f_s_usage ;; esac } function f_main() { private_key=/Users/lanthean/.ssh/id_rsa_vm # ansible-playbook -i hosts vm-one-time-setup.yml --extra-vars='target=all' --private-key=$private_key $DEBUG if [[ $ONE_TIME == 0 ]];then ansible-playbook -i hosts setup-vm.yml --extra-vars='target=all' --private-key=$private_key $DEBUG fi ansible-playbook -i localhost, setup-local.yml --private-key=$private_key $DEBUG } f_get_options $@ if [[ $ERR == 0 ]];then f_main else f_manage_err fi
#!/bin/bash trap 'echo "${BASH_SOURCE[0]}: line ${LINENO}: status ${?}: user ${USER}: func ${FUNCNAME[0]}"' ERR set -o errexit set -o errtrace SERVER_NAME="ManagedServer2" DOMAIN_HOME="/usr/local/weblogic/user_projects/domains/base_domain" LOG_DIR="${DOMAIN_HOME}/logs" DATETIME="$(date +'%Y%m%d_%H%M%S')" CURRENT_USER="$(id -un)" if [ "${CURRENT_USER}" == "root" ]; then echo "[ERROR] The current user is root!" exit fi PID="$(pgrep -xa java | grep ${DOMAIN_HOME} | grep ${SERVER_NAME} | awk '{print $1}')" if [ -n "${PID}" ]; then echo "[ERROR] The ${SERVER_NAME} (pid ${PID}) is already running!" exit fi USER_MEM_ARGS="-D${SERVER_NAME}" USER_MEM_ARGS="${USER_MEM_ARGS} -Xms1024m -Xmx1024m" USER_MEM_ARGS="${USER_MEM_ARGS} -XX:NewSize=384m -XX:MaxNewSize=384m" USER_MEM_ARGS="${USER_MEM_ARGS} -XX:PermSize=256m -XX:MaxPermSize=256m" USER_MEM_ARGS="${USER_MEM_ARGS} -XX:+UseParallelGC" USER_MEM_ARGS="${USER_MEM_ARGS} -XX:-UseAdaptiveSizePolicy" USER_MEM_ARGS="${USER_MEM_ARGS} -XX:+DisableExplicitGC" USER_MEM_ARGS="${USER_MEM_ARGS} -XX:+PrintGCDetails" USER_MEM_ARGS="${USER_MEM_ARGS} -XX:+PrintGCDateStamps" USER_MEM_ARGS="${USER_MEM_ARGS} -XX:+PrintGCTimeStamps" USER_MEM_ARGS="${USER_MEM_ARGS} -XX:+PrintHeapAtGC" USER_MEM_ARGS="${USER_MEM_ARGS} -XX:+PrintTenuringDistribution" USER_MEM_ARGS="${USER_MEM_ARGS} -Xloggc:${LOG_DIR}/gc.${SERVER_NAME}.log" # USER_MEM_ARGS="${USER_MEM_ARGS} -XX:+UseGCLogFileRotation" # USER_MEM_ARGS="${USER_MEM_ARGS} -XX:+NumberOfGCLogFiles=30" # USER_MEM_ARGS="${USER_MEM_ARGS} -XX:+GCLogFileSize=1M" USER_MEM_ARGS="${USER_MEM_ARGS} -XX:+HeapDumpOnOutOfMemoryError" USER_MEM_ARGS="${USER_MEM_ARGS} -XX:HeapDumpPath=${LOG_DIR}/dump" export USER_MEM_ARGS JAVA_OPTIONS="${JAVA_OPTIONS} -Dweblogic.SocketReaders=4" JAVA_OPTIONS="${JAVA_OPTIONS} -D_Offline_FileDataArchive=true" JAVA_OPTIONS="${JAVA_OPTIONS} -Dcom.bea.wlw.netui.disableInstrumentation=true" JAVA_OPTIONS="${JAVA_OPTIONS} -Dweblogic.connector.ConnectionPoolProfilingEnabled=false" JAVA_OPTIONS="${JAVA_OPTIONS} -Djava.net.preferIPv4Stack=true" JAVA_OPTIONS="${JAVA_OPTIONS} -Djava.net.preferIPv6Addresses=false" JAVA_OPTIONS="${JAVA_OPTIONS} -Dweblogic.system.BootIdentityFile=${DOMAIN_HOME}/boot.properties" JAVA_OPTIONS="${JAVA_OPTIONS} -Djava.security.egd=file:///dev/urandom" export JAVA_OPTIONS # JAVA_OPTIONS="${JAVA_OPTIONS} -verbose:class" # JAVA_OPTIONS="${JAVA_OPTIONS} -verbose:module" # JAVA_OPTIONS="${JAVA_OPTIONS} -verbose:jni" # export JAVA_OPTIONS # export EXT_PRE_CLASSPATH # export EXT_POST_CLASSPATH if [ -f "${LOG_DIR}/nohup.${SERVER_NAME}.out" ]; then mv ${LOG_DIR}/nohup.${SERVER_NAME}.out ${LOG_DIR}/${SERVER_NAME}/nohup.${SERVER_NAME}.${DATETIME}.out fi if [ -f "${LOG_DIR}/gc.${SERVER_NAME}.log" ]; then mv ${LOG_DIR}/gc.${SERVER_NAME}.log ${LOG_DIR}/${SERVER_NAME}/gc.${SERVER_NAME}.${DATETIME}.log fi touch ${LOG_DIR}/nohup.${SERVER_NAME}.out nohup ${DOMAIN_HOME}/bin/startManagedWebLogic.sh ${SERVER_NAME} ${ADMIN_URL} > ${LOG_DIR}/nohup.${SERVER_NAME}.out 2>&1 & tail -f ${LOG_DIR}/nohup.${SERVER_NAME}.out
/** */ package PhotosMetaModel.impl; import PhotosMetaModel.PhotosMetaModelPackage; import PhotosMetaModel.View_a; import org.eclipse.emf.ecore.EClass; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>View a</b></em>'. * <!-- end-user-doc --> * * @generated */ public class View_aImpl extends PresentationSegmentImpl implements View_a { /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected View_aImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return PhotosMetaModelPackage.Literals.VIEW_A; } } //View_aImpl
import nltk import random import string from nltk.corpus import stopwords from nltk.tokenize import word_tokenize # A list of common questions related to online shopping questions = [ 'How do I find the best deals?', 'Can I get free shipping?', 'What payment methods are accepted?', 'What is your return policy?', 'Can I track my order?' ] # List of answers to the above questions answers = [ 'We always have great deals and promotions that you can take advantage of. Visit our website for more information.', 'Yes, we offer free shipping to customers who spend more than $50.', 'We accept payments made via PayPal, Visa, and Mastercard.', 'We offer a 30-day money back guarantee if you are not satisfied with your purchase.', 'Yes, you can track your order to check its delivery status.' ] # A function to process and respond to user input def process_input(user_input): tokenized_words = word_tokenize(user_input) filtered_words = [word.lower() for word in tokenized_words if word.lower() not in stopwords.words('english') and word.lower() not in string.punctuation] for i, question in enumerate(questions): tokenized_qs = word_tokenize(question.lower()) comparable_words = [word for word in tokenized_qs if word.lower() not in stopwords.words('english') and word.lower() not in string.punctuation] if set(filtered_words) == set(comparable_words): return answers[i] # Initializes the bot print("Hi! I'm an AI bot that can answer questions related to online shopping. How can I help you?") while True: user_input = input("> ") response = process_input(user_input) if response is not None: print(response) else: print("I'm sorry, I don't understand. Can you please rephrase the question?")
module.exports = { useProject: true, exec: function({registry, logger, screen, docker, config, args}) { screen.info('Definitions are valid.'); var tableData = []; for(var cluster of registry.clusters) { tableData.push([cluster.id, cluster.berliozfile]); for(var service of cluster.services) { tableData.push([service.id, service.berliozfile]); } for(var database of cluster.databases) { tableData.push([database.id, database.berliozfile]); } for(var queue of cluster.queues) { tableData.push([queue.id, queue.berliozfile]); } } screen.table() .autofitColumn('Definition') .column('Location') .addRange(tableData) .output(); } }
#!/bin/bash # Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ if [[ $# -lt 4 || $# -gt 5 ]]; then echo "Usage: bash run_infer_310.sh [MINDIR_PATH] [NET_TYPE] [DATASET] [DATA_PATH] [DEVICE_ID] NET_TYPE can choose from [vit] DEVICE_ID is optional, it can be set by environment variable device_id, otherwise the value is zero" exit 1 fi get_real_path(){ if [ "${1:0:1}" == "/" ]; then echo "$1" else echo "$(realpath -m $PWD/$1)" fi } model=$(get_real_path $1) if [ $2 == 'vit' ]; then network=$2 else echo "NET_TYPE can choose from [vit]" exit 1 fi dataset=$3 data_path=$(get_real_path $4) device_id=0 if [ $# == 5 ]; then device_id=$5 fi echo "mindir name: "$model echo "dataset path: "$data_path echo "network: "$network echo "dataset: "$dataset echo "device id: "$device_id export ASCEND_HOME=/usr/local/Ascend/ if [ -d ${ASCEND_HOME}/ascend-toolkit ]; then export PATH=$ASCEND_HOME/fwkacllib/bin:$ASCEND_HOME/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/ascend-toolkit/latest/atc/bin:$PATH export LD_LIBRARY_PATH=$ASCEND_HOME/fwkacllib/lib64:/usr/local/lib:$ASCEND_HOME/ascend-toolkit/latest/atc/lib64:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH export TBE_IMPL_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp/op_impl/built-in/ai_core/tbe export PYTHONPATH=$ASCEND_HOME/fwkacllib/python/site-packages:${TBE_IMPL_PATH}:$ASCEND_HOME/ascend-toolkit/latest/fwkacllib/python/site-packages:$PYTHONPATH export ASCEND_OPP_PATH=$ASCEND_HOME/ascend-toolkit/latest/opp else export ASCEND_HOME=/usr/local/Ascend/latest/ export PATH=$ASCEND_HOME/fwkacllib/bin:$ASCEND_HOME/fwkacllib/ccec_compiler/bin:$ASCEND_HOME/atc/ccec_compiler/bin:$ASCEND_HOME/atc/bin:$PATH export LD_LIBRARY_PATH=$ASCEND_HOME/fwkacllib/lib64:/usr/local/lib:$ASCEND_HOME/atc/lib64:$ASCEND_HOME/acllib/lib64:$ASCEND_HOME/driver/lib64:$ASCEND_HOME/add-ons:$LD_LIBRARY_PATH export PYTHONPATH=$ASCEND_HOME/fwkacllib/python/site-packages:$ASCEND_HOME/atc/python/site-packages:$PYTHONPATH export ASCEND_OPP_PATH=$ASCEND_HOME/opp fi function compile_app() { cd ../ascend310_infer/src/ || exit if [ -f "Makefile" ]; then make clean fi bash build.sh &> build.log } function infer() { cd - || exit if [ -d result_Files ]; then rm -rf ./result_Files fi if [ -d time_Result ]; then rm -rf ./time_Result fi mkdir result_Files mkdir time_Result ../ascend310_infer/src/main --mindir_path=$model --dataset_path=$data_path --network=$network --dataset=$dataset --device_id=$device_id &> infer.log } function cal_acc() { python ../create_imagenet2012_label.py --img_path=$data_path python ../postprocess.py --dataset=$dataset --result_path=./result_Files --label_path=./imagenet_label.json &> acc.log if [ $? -ne 0 ]; then echo "calculate accuracy failed" exit 1 fi } compile_app if [ $? -ne 0 ]; then echo "compile app code failed" exit 1 fi infer if [ $? -ne 0 ]; then echo " execute inference failed" exit 1 fi cal_acc if [ $? -ne 0 ]; then echo "calculate accuracy failed" exit 1 fi
<reponame>jcfr/SPHARM-PDM /* * author: msturm, * created: 16 Apr 1997 * changes: * * contains routines for command line parsing * (mostly copied from gaudi toolbox, changed to templated functions) * */ #ifndef __ARGIO_H__ #define __ARGIO_H__ #include <stdlib.h> #include <stdio.h> #include <errno.h> #include <string.h> #include <ctype.h> const int ipMAXTOKLEN = 255; // max. length of a token // some workarounds for template function typedef char* charp; inline float fatof(const char *str) { return (float) atof(str); } // command line parsing template <class T> inline T ipGetArgument(const char **argv, const char *keystr, T (*convert) (const char *str), const T defval) { for (int i=1; argv[i]; i++) if (strstr(argv[i],keystr)) if (argv[i+1]) return convert(argv[i+1]); else { fprintf(stderr,"Error: ipGetArgument: argument value of option \"%s\" is missing!\n", argv[i]); exit(-1); } return defval; } inline charp ipGetStringArgument(const char **argv, const char *keystr, const charp defval) { char *_defval = (defval ? strdup(defval): NULL); //return ipGetArgument(argv, keystr, strdup, _defval); for (int i=1; argv[i]; i++) if (strstr(argv[i],keystr)) if (argv[i+1]) return strdup(argv[i+1]); else { fprintf(stderr,"Error: ipGetArgument: argument value of option \"%s\" is missing!\n", argv[i]); exit(-1); } return _defval; } // reads in multiple string arguments started by keystr and ended by an '-' or // end of args inline int ipGetStringMultipArgument(const char **argv, const char *keystr, char **out, const int max) { int i = 1, num; while (argv[i]) { if (strstr(argv[i],keystr)) { // keystr found if (argv[i+1] && (argv[i+1])[0] != '-') { i++; num = 0; out[0] = strdup(argv[i]); i++; num++; while (argv[i] && num < max && (argv[i])[0] != '-') { out[num] = strdup(argv[i]); i++;num++; } return num; // jump back ! } else { fprintf(stderr,"Error: ipGetArgument: argument value of option \"%s\" is missing!\n", argv[i]); exit(-1); } } // if keystr i++; } //while return 0;} inline int ipGetIntArgument(const char **argv, const char *keystr, const int defval) { //return ipGetArgument(argv, keystr, atoi, defval);{ for (int i=1; argv[i]; i++) if (strstr(argv[i],keystr)) if (argv[i+1]) return atoi(argv[i+1]); else { fprintf(stderr,"Error: ipGetArgument: argument value of option \"%s\" is missing!\n", argv[i]); exit(-1); } return defval; } inline float ipGetFloatArgument(const char **argv, const char *keystr, const float defval) { //return ipGetArgument(argv, keystr, fatof, defval); for (int i=1; argv[i]; i++) if (strstr(argv[i],keystr)) if (argv[i+1]) return fatof(argv[i+1]); else { fprintf(stderr,"Error: ipGetArgument: argument value of option \"%s\" is missing!\n", argv[i]); exit(-1); } return defval; } inline double ipGetDoubleArgument(const char **argv, const char *keystr, const double defval) { //return ipGetArgument(argv, keystr, atof, defval); for (int i=1; argv[i]; i++) if (strstr(argv[i],keystr)) if (argv[i+1]) return atof(argv[i+1]); else { fprintf(stderr,"Error: ipGetArgument: argument value of option \"%s\" is missing!\n", argv[i]); exit(-1); } return defval; } inline int ipExistsArgument(const char **argv, const char *keystr) { for (int i=1; argv[i]; i++) if (strstr(argv[i],keystr)) return 1; return 0; } // string utilities // appends src to resized dst and returns it inline char *ipAppendString(char *&dst, const char *src){ if (dst) { dst = (char *) realloc(dst, strlen(dst) + strlen(src) + 1); return strcat(dst, src); } else return strdup(src);} /************************************************************* * counts the words on one line ************************************************************/ inline int ipLineWordCount(const char *s){ int n = 0; for (;;) { while (isspace(*s)) s++; if (*s == '\0') return n; while (isgraph(*s)) s++; n++; }} /************************************************************* * gets the generic name of a file cutting the extension ************************************************************/ inline char *ipGetBaseName(const char *string){ unsigned int i; char *ret = NULL, *retp = NULL; if (!(ret = strdup(string))) { fprintf(stderr, "Error: ipGetBaseName [%s, line %d]: strdup() failed:", __FILE__, __LINE__); perror(""); exit(errno); } for(i=0, retp=ret; i<strlen(string);i++, retp++) if (*retp == '.') { *retp = '\0'; break; } return ret;} /************************************************************* * reads a line into s (already allocated) and returns length * lim describes the max line length. * function proposed by Kernighan and Ritchie ************************************************************/ inline int ipfgetline(FILE* f, char* s, int lim){ int c,i; for(i=0; (i<lim-1) && ((c=getc(f))!=EOF) && (c!='\n'); ++i) s[i]=c; if (c=='\n'){ s[i]=c; ++i; } s[i]='\0'; return(i);} // extracts string tokens from a string which are either separeted by // - white-space (isspace()) or // - punctuation (ispunct()) except '.', '+', '-', '_' // and converts them to type <class T>. // // at most n tokens will be extracted, tokenval[] has to be // allocated // returns number of tokens found, converted tokens in tokenval[] template <class T> int ipExtractTokens(T *tokenval, const char *tokenstr, const int n, T (*convert) (const char *str)) { char *tmp_token = new char [ipMAXTOKLEN]; char *tmp_tokenp = tmp_token; const char *tokenp = tokenstr; int i = 0; while ((i < n) && (*tokenp)) { memset(tmp_token, '\0', ipMAXTOKLEN * sizeof(char)); while (isspace(*tokenp) && *tokenp) tokenp++; tmp_tokenp = tmp_token; while ((*tokenp) && (isalnum(*tokenp) || (*tokenp == '.') || (*tokenp == '-') || (*tokenp == '+') || (*tokenp == '_') ) && ((tmp_tokenp - tmp_token) < ipMAXTOKLEN)) *(tmp_tokenp++) = *(tokenp++); if(*tokenp) tokenp++; // skip separator tokenval[i++] = convert(tmp_token); } delete [] tmp_token; return i; } // extracts string tokens from a string which are either separeted by // - white-space (isspace()) or // - commas // and converts them to type <class T>. // // at most n tokens will be extracted, tokenval[] has to be // allocated // returns number of tokens found, converted tokens in tokenval[] template <class T> int ipExtractSpaceSepTokens(T *tokenval, const char *tokenstr, const int n, T (*convert) (const char *str)) { char *tmp_token = new char [ipMAXTOKLEN]; char *tmp_tokenp = tmp_token; const char *tokenp = tokenstr; int i = 0; while ((i < n) && (*tokenp)) { memset(tmp_token, '\0', ipMAXTOKLEN * sizeof(char)); while (isspace(*tokenp) && *tokenp) tokenp++; tmp_tokenp = tmp_token; while ((*tokenp) && (isspace(*tokenp) || (*tokenp != ',') ) && ((tmp_tokenp - tmp_token) < ipMAXTOKLEN)) *(tmp_tokenp++) = *(tokenp++); if(*tokenp) tokenp++; // skip separator tokenval[i++] = convert(tmp_token); } delete [] tmp_token; return i; } inline int ipExtractIntTokens(int *tokenval, const char *tokenstr, const int n) { // return ipExtractTokens(tokenval, tokenstr, n, atoi); char *tmp_token = new char [ipMAXTOKLEN]; char *tmp_tokenp = tmp_token; const char *tokenp = tokenstr; int i = 0; while ((i < n) && (*tokenp)) { memset(tmp_token, '\0', ipMAXTOKLEN * sizeof(char)); while (isspace(*tokenp) && *tokenp) tokenp++; tmp_tokenp = tmp_token; while ((*tokenp) && (isalnum(*tokenp) || (*tokenp == '.') || (*tokenp == '-') || (*tokenp == '+') || (*tokenp == '_') ) && ((tmp_tokenp - tmp_token) < ipMAXTOKLEN)) *(tmp_tokenp++) = *(tokenp++); if(*tokenp) tokenp++; // skip separator tokenval[i++] = atoi(tmp_token); } delete [] tmp_token; return i; } inline int ipExtractFloatTokens(float *tokenval, const char *tokenstr, const int n) { // return ipExtractTokens(tokenval, tokenstr, n, fatof); char *tmp_token = new char [ipMAXTOKLEN]; char *tmp_tokenp = tmp_token; const char *tokenp = tokenstr; int i = 0; while ((i < n) && (*tokenp)) { memset(tmp_token, '\0', ipMAXTOKLEN * sizeof(char)); while (isspace(*tokenp) && *tokenp) tokenp++; tmp_tokenp = tmp_token; while ((*tokenp) && (isalnum(*tokenp) || (*tokenp == '.') || (*tokenp == '-') || (*tokenp == '+') || (*tokenp == '_') ) && ((tmp_tokenp - tmp_token) < ipMAXTOKLEN)) *(tmp_tokenp++) = *(tokenp++); if(*tokenp) tokenp++; // skip separator tokenval[i++] = fatof(tmp_token); } delete [] tmp_token; return i; } inline int ipExtractDoubleTokens(double *tokenval, const char *tokenstr, const int n) { // return ipExtractTokens(tokenval, tokenstr, n, atof); char *tmp_token = new char [ipMAXTOKLEN]; char *tmp_tokenp = tmp_token; const char *tokenp = tokenstr; int i = 0; while ((i < n) && (*tokenp)) { memset(tmp_token, '\0', ipMAXTOKLEN * sizeof(char)); while (isspace(*tokenp) && *tokenp) tokenp++; tmp_tokenp = tmp_token; while ((*tokenp) && (isalnum(*tokenp) || (*tokenp == '.') || (*tokenp == '-') || (*tokenp == '+') || (*tokenp == '_') ) && ((tmp_tokenp - tmp_token) < ipMAXTOKLEN)) *(tmp_tokenp++) = *(tokenp++); if(*tokenp) tokenp++; // skip separator tokenval[i++] = atof(tmp_token); } delete [] tmp_token; return i; } inline int ipExtractStringTokens(char **tokenval, const char *tokenstr, const int n) { // return ipExtractSpaceSepTokens(tokenval, tokenstr, n, strdup); char *tmp_token = new char [ipMAXTOKLEN]; char *tmp_tokenp = tmp_token; const char *tokenp = tokenstr; int i = 0; while ((i < n) && (*tokenp)) { memset(tmp_token, '\0', ipMAXTOKLEN * sizeof(char)); while (isspace(*tokenp) && *tokenp) tokenp++; tmp_tokenp = tmp_token; while ((*tokenp) && (isalnum(*tokenp) || (*tokenp == '.') || (*tokenp == '-') || (*tokenp == '+') || (*tokenp == '_') ) && ((tmp_tokenp - tmp_token) < ipMAXTOKLEN)) *(tmp_tokenp++) = *(tokenp++); if(*tokenp) tokenp++; // skip separator tokenval[i++] = strdup(tmp_token); } delete [] tmp_token; return i; } #endif
<reponame>insufficientchocolate/diplomat<filename>pkg/log/maybe.go package log type maybeLogger struct { parent Logger } func (m maybeLogger) Info(message string, args ...interface{}) { if m.parent != nil { m.parent.Info(message, args...) } } func (m maybeLogger) Error(message string, args ...interface{}) { if m.parent != nil { m.parent.Error(message, args...) } } func (m maybeLogger) Debug(message string, args ...interface{}) { if m.parent != nil { m.parent.Debug(message, args...) } } func MaybeLogger(logger Logger) Logger { return &maybeLogger{logger} }
<filename>aws/table_aws_redshift_subnet_group.go package aws import ( "context" "github.com/turbot/steampipe-plugin-sdk/v3/grpc/proto" "github.com/turbot/steampipe-plugin-sdk/v3/plugin/transform" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/redshift" "github.com/turbot/steampipe-plugin-sdk/v3/plugin" ) //// TABLE DEFINITION func tableAwsRedshiftSubnetGroup(_ context.Context) *plugin.Table { return &plugin.Table{ Name: "aws_redshift_subnet_group", Description: "AWS Redshift Subnet Group", Get: &plugin.GetConfig{ KeyColumns: plugin.SingleColumn("cluster_subnet_group_name"), IgnoreConfig: &plugin.IgnoreConfig{ ShouldIgnoreErrorFunc: isNotFoundError([]string{"ClusterSubnetGroupNotFoundFault"}), }, Hydrate: getRedshiftSubnetGroup, }, List: &plugin.ListConfig{ Hydrate: listRedshiftSubnetGroup, }, GetMatrixItem: BuildRegionList, Columns: awsRegionalColumns([]*plugin.Column{ { Name: "cluster_subnet_group_name", Description: "The name of the cluster subnet group.", Type: proto.ColumnType_STRING, }, { Name: "subnet_group_status", Description: "The status of the cluster subnet group.", Type: proto.ColumnType_STRING, }, { Name: "description", Description: "The description of the cluster subnet group.", Type: proto.ColumnType_STRING, }, { Name: "vpc_id", Description: "The VPC ID of the cluster subnet group.", Type: proto.ColumnType_STRING, }, { Name: "subnets", Description: "A list of the VPC Subnet elements.", Type: proto.ColumnType_JSON, }, { Name: "tags_src", Description: "A list of tags attached to the subnet group.", Type: proto.ColumnType_JSON, Hydrate: getRedshiftSubnetGroup, Transform: transform.FromField("Tags"), }, // Standard columns { Name: "title", Description: resourceInterfaceDescription("title"), Type: proto.ColumnType_STRING, Transform: transform.FromField("ClusterSubnetGroupName"), }, { Name: "tags", Description: resourceInterfaceDescription("tags"), Type: proto.ColumnType_JSON, Transform: transform.From(redshiftSubnetGroupTurbotTags), }, { Name: "akas", Description: resourceInterfaceDescription("akas"), Type: proto.ColumnType_JSON, Hydrate: getRedshiftSubnetGroupAkas, Transform: transform.FromValue(), }, }), } } //// LIST FUNCTION func listRedshiftSubnetGroup(ctx context.Context, d *plugin.QueryData, _ *plugin.HydrateData) (interface{}, error) { plugin.Logger(ctx).Trace("listRedshiftSubnetGroup") // Create Session svc, err := RedshiftService(ctx, d) if err != nil { return nil, err } input := &redshift.DescribeClusterSubnetGroupsInput{ MaxRecords: aws.Int64(100), } // Reduce the basic request limit down if the user has only requested a small number of rows limit := d.QueryContext.Limit if d.QueryContext.Limit != nil { if *limit < *input.MaxRecords { if *limit < 20 { input.MaxRecords = aws.Int64(20) } else { input.MaxRecords = limit } } } // List call err = svc.DescribeClusterSubnetGroupsPages( input, func(page *redshift.DescribeClusterSubnetGroupsOutput, isLast bool) bool { for _, subnetGroup := range page.ClusterSubnetGroups { d.StreamListItem(ctx, subnetGroup) // Context may get cancelled due to manual cancellation or if the limit has been reached if d.QueryStatus.RowsRemaining(ctx) == 0 { return false } } return !isLast }, ) return nil, err } //// HYDRATE FUNCTIONS func getRedshiftSubnetGroup(ctx context.Context, d *plugin.QueryData, _ *plugin.HydrateData) (interface{}, error) { clusterSubnetGroupName := d.KeyColumnQuals["cluster_subnet_group_name"].GetStringValue() // Create service svc, err := RedshiftService(ctx, d) if err != nil { return nil, err } params := &redshift.DescribeClusterSubnetGroupsInput{ ClusterSubnetGroupName: aws.String(clusterSubnetGroupName), } op, err := svc.DescribeClusterSubnetGroups(params) if err != nil { return nil, err } if op.ClusterSubnetGroups != nil && len(op.ClusterSubnetGroups) > 0 { return op.ClusterSubnetGroups[0], nil } return nil, nil } func getRedshiftSubnetGroupAkas(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { plugin.Logger(ctx).Trace("getRedshiftSubnetGroupAkas") region := d.KeyColumnQualString(matrixKeyRegion) data := h.Item.(*redshift.ClusterSubnetGroup) getCommonColumnsCached := plugin.HydrateFunc(getCommonColumns).WithCache() commonData, err := getCommonColumnsCached(ctx, d, h) if err != nil { return nil, err } commonColumnData := commonData.(*awsCommonColumnData) arn := "arn:" + commonColumnData.Partition + ":redshift:" + region + ":" + commonColumnData.AccountId + ":subnetgroup:" + *data.ClusterSubnetGroupName // Get data for turbot defined properties akas := []string{arn} return akas, nil } //// TRANSFORM FUNCTIONS func redshiftSubnetGroupTurbotTags(_ context.Context, d *transform.TransformData) (interface{}, error) { data := d.HydrateItem.(*redshift.ClusterSubnetGroup) if data.Tags == nil { return nil, nil } // Get the resource tags var turbotTagsMap map[string]string if data.Tags != nil { turbotTagsMap = map[string]string{} for _, i := range data.Tags { turbotTagsMap[*i.Key] = *i.Value } } return turbotTagsMap, nil }
#!/usr/bin/env bash for j in `seq 10`; do { time python ~/bin/Skeleton/bin/aimes-skeleton-synapse.py serial flops 1 1715750072310 65536 65536 0 0 0 ; } 2>> ~/bin/time_results_skeleton_comet; done sed -n '/cannot/!p' ~/bin/time_results_skeleton_comet | sed -n '/open/!p' | grep . > ~/bin/time_results_cleaned_skeleton_comet