text stringlengths 1 1.05M |
|---|
// Define a new module using the `mod` keyword
mod nu {
// Module implementation
}
mod sys {
// Module implementation
}
// Import functionality from other modules using the `use` keyword
use sys::Sys;
// Make items within a module accessible from outside the module using the `pub` keyword
pub use sys::Sys; |
// Install the packages
pip install flask
pip install SpeechRecognition
# Import packages
from flask import Flask, request
import speech_recognition as sr
# Variable for voice recognition
r = sr.Recognizer()
# Set up the Flask app
app = Flask(__name__)
@app.route('/ask', methods=['POST'])
def ask_question():
# Get the query from the request body
query = request.json.get('query')
# Get the query from speech
try:
with sr.Microphone() as source:
r.adjust_for_ambient_noise(source, duration=0.5)
audio = r.listen(source)
query = r.recognize_google(audio)
except:
print("Unable to get audio input")
# Process the query
# Your code here
# Return a response
response = {
'response': 'Your response here'
}
return response
if __name__ == '__main__':
app.run(debug=True) |
import { ErrorMessage, Field } from 'formik';
import React from 'react';
import ErrorComponent from './ErrorComponent';
const CheckboxInput = ({options, name, label }) => (
<div className="">
<div>{label}</div>
{
options.map((option) => {
return (
<label key={option.key}>
<br></br>
<Field type="checkbox" name={name} value={option.key} />
<span>{ option.value}</span>
</label>
)
})
}
<ErrorMessage name={name} component={ErrorComponent} />
</div>
);
export default CheckboxInput; |
^((\+|00)49\s?)?((\([0-9]{3}\)|[0-9]{3})\s?)?([0-9]{3}\\s?[0-9]{2,4}\\s?[0-9]{2,4}|[0-9]{2,4}\\s?[0-9]{2,4}\\s?[0-9]{2,4})$ |
//
// Created by baifeng on 2022/4/14.
//
#include "bmflabel.h"
#include "bmfont.h"
#include "utf8_to_unicode.h"
#include "common/log.h"
mge_begin
BMFChar::BMFChar(TexturePtr const& texture, SDL_Rect const& srcrect):ImageWidget(texture, srcrect) {
}
BMFLabel::BMFLabel():_textBox(new Widget), _padding({0, 0, 0, 0}), _dirty(false), _width(0), _visibleCount(0xffff) {
addChild(Ptr(_textBox));
}
void BMFLabel::setFont(BMFontPtr const& font) {
_font = font;
_dirty = true;
}
BMFLabel::BMFontPtr& BMFLabel::getFont() {
return _font;
}
void BMFLabel::setString(std::string const& text) {
if (_text == text) {
return;
}
_text = text;
_dirty = true;
_unicodeString.clear();
auto i = 0;
while (i < text.length()) {
auto n = utf8_get_size(&text[i]);
if (n == -1) {
n++;
LOG_ERROR("BMFLabel::setString text[%d] error.\n", i);
continue;
}
_unicodeString.push_back(utf8_convert_unicode(text.c_str()+i, n));
i += n;
}
}
void BMFLabel::setPadding(Padding const& padding) {
_padding = padding;
_dirty = true;
}
void BMFLabel::setWidth(int width) {
_width = width;
_dirty = true;
}
void BMFLabel::setSpacing(Vector2i const& spacing) {
_spacing = spacing;
_dirty = true;
}
void BMFLabel::setVisibleCount(size_t count) {
_visibleCount = count;
for (int i = 0; i < _textBox->children().size(); ++i) {
auto& child = _textBox->children()[i];
child->setVisible(i < _visibleCount);
}
}
void BMFLabel::draw_at_once() {
if (!_dirty) {
return;
}
refresh();
_dirty = false;
}
BMFChar* BMFLabel::getChar(uint32_t index) {
if (index < _textBox->children().size()) {
return _textBox->children()[index]->fast_to<BMFChar>();
}
return nullptr;
}
void BMFLabel::onUpdate(float delta) {
draw_at_once();
}
void BMFLabel::refresh() {
if (_font == nullptr) {
return;
}
Vector2f size;
size.x = _padding.left + _padding.right + _width;
_textBox->setPosition(_padding.left, _padding.top);
_textBox->removeAllChildren();
Vector2f offset;
auto& common = _font->common();
for (int i = 0; i < _unicodeString.size(); ++i) {
auto c = _unicodeString[i];
if (!_font->hasChar(c)) {
continue;
}
auto const& charset = _font->getChar(c);
if (offset.x + charset.xoffset + charset.width > _width) {
offset.x = 0;
offset.y += common.base + _spacing.y;
}
auto tex = _font->getTexture(charset.page);
auto bmfchar = New<BMFChar>(tex, SDL_Rect{charset.x, charset.y, charset.width, charset.height});
bmfchar->setPosition(offset.x + charset.xoffset, offset.y + charset.yoffset);
bmfchar->setVisible(i < _visibleCount);
_textBox->addChild(bmfchar);
offset.x += charset.width + charset.xoffset + _spacing.x;
}
size.y = common.base + offset.y + _padding.top + _padding.bottom;
this->setSize(size);
_textBox->setSize(size.x - _padding.left - _padding.right, size.y - _padding.top - _padding.bottom);
}
mge_end |
<gh_stars>1-10
const { test } = require('ava')
const Asuha = require('../..')
const { stub } = require('sinon')
const http = require('http')
test('Asuha#listen()', function (t) {
t.plan(1)
const FAKE_SERVER = {
fake: true,
listen: function () { }
}
const _stub = stub(http, 'createServer')
_stub.returns(FAKE_SERVER)
const asuha = Asuha.http()
t.is(asuha.server(), FAKE_SERVER)
})
|
import type { RequestLogger } from './request-logger';
/**
* Request logger means.
*
* @typeParam TLogger - Request logger type.
*/
export interface LoggerMeans<TLogger extends RequestLogger = RequestLogger> {
/**
* A logger to use during request processing.
*/
readonly log: TLogger;
}
|
<reponame>ourcade/phaser3-typescript-examples<filename>src/camera/camera-filter/CameraTypes.d.ts<gh_stars>10-100
declare namespace Phaser.Cameras.Scene2D
{
interface Camera
{
rotation: number
}
}
|
#!/usr/bin/env bash
# shellcheck disable=SC2046
cob="$1"
git clone https://github.com/amzn/amazon-ray.git ray
pushd ray || true
git checkout "$cob"
#bash ./ci/travis/install-bazel.sh
#BAZEL_PATH=$HOME/bin/bazel
#ray stop
SUCCESS=1
# Run all test cases, but with a forced num_gpus=1 (--test_env=RLLIB_NUM_GPUS=1).
#if ! $BAZEL_PATH test --config=ci $(./scripts/bazel_export_options) --build_tests_only --test_tag_filters=examples_A,examples_B --test_env=RAY_USE_MULTIPROCESSING_CPU_COUNT=1 --test_env=RLLIB_NUM_GPUS=1 rllib/... ; then SUCCESS=0; fi
#if ! $BAZEL_PATH test --config=ci $(./scripts/bazel_export_options) --build_tests_only --test_tag_filters=examples_C,examples_D --test_env=RAY_USE_MULTIPROCESSING_CPU_COUNT=1 --test_env=RLLIB_NUM_GPUS=1 rllib/... ; then SUCCESS=0; fi
#if ! $BAZEL_PATH test --config=ci $(./scripts/bazel_export_options) --build_tests_only --test_tag_filters=examples_E,examples_F,examples_G,examples_H,examples_I,examples_J,examples_K,examples_L,examples_M,examples_N,examples_O,examples_P --test_env=RAY_USE_MULTIPROCESSING_CPU_COUNT=1 --test_env=RLLIB_NUM_GPUS=1 rllib/... ; then SUCCESS=0; fi
#if ! $BAZEL_PATH test --config=ci $(./scripts/bazel_export_options) --build_tests_only --test_tag_filters=examples_Q,examples_R,examples_S,examples_T,examples_U,examples_V,examples_W,examples_X,examples_Y,examples_Z --test_env=RAY_USE_MULTIPROCESSING_CPU_COUNT=1 --test_env=RLLIB_NUM_GPUS=1 rllib/... ; then SUCCESS=; fi
# Run all test cases, but with a forced num_gpus=1.
export RLLIB_NUM_GPUS=1
if python rllib/examples/attention_net.py --as-test --stop-reward=20 --num-cpus=0; then SUCCESS=0; fi
if python rllib/examples/attention_net.py --framework=torch --as-test --stop-reward=20 --num-cpus=0; then SUCCESS=0; fi
popd || true
echo "{'passed': $SUCCESS}" > "${TEST_OUTPUT_JSON:-/tmp/release_test_out.json}"
exit $((1 - SUCCESS))
|
def palindrome_length(s):
n = len(s)
dp = [[0]*n for _ in range(n)]
for i in range(n):
dp[i][i] = 1
for length in range(2, n+1):
for start in range(n-length+1):
end = start+length-1
if length == 2:
if s[start] == s[end]:
dp[start][end] = 2
else:
dp[start][end] = 0
elif s[start] != s[end]:
dp[start][end] = 0
else:
dp[start][end] = dp[start+1][end-1] + 2
return sum(map(max, dp))
print(palindrome_length("abcbab")) |
import * as routes from './routes'
import express from 'express'
const app = module.exports = express()
app.set('view engine', 'jade')
app.set('views', `${__dirname}/templates`)
app.get('/article/:slug/amp', routes.amp)
app.get('/article/:slug', routes.index)
app.get('/series/:slug', routes.index)
app.get('/series/:seriesSlug/:slug', routes.index)
app.get('/video/:slug', routes.index)
app.post('/signup/editorial', routes.editorialSignup)
app.get('/post/:id', routes.redirectPost)
app.get('/:id/posts', routes.redirectPost)
|
<filename>src/training/binarysearchtree/E230_Medium_KthSmallestElementInBST.java
package training.binarysearchtree;
import training.binarytree.TreeNode;
import org.junit.jupiter.api.Test;
import java.util.Deque;
import java.util.LinkedList;
import java.util.function.ToIntBiFunction;
import static training.binarytree.TreeNode.newTree;
import static org.junit.jupiter.api.Assertions.assertEquals;
/**
* 230. 二叉搜索树中第K小的元素: https://leetcode-cn.com/problems/kth-smallest-element-in-a-bst/
*
* 给定一个二叉搜索树的根节点 root ,和一个整数 k ,请你设计一个算法查找其中第 k 个最小元素(从 1 开始计数)。
*
* 例 1:
* 输入:root = [3,1,4,null,2], k = 1
* 输出:1
* 解释:
* 3
* / \
* 1 4
* \
* 2
*
* 例 2:
* 输入:root = [5,3,6,2,4,null,null,1], k = 3
* 输出:3
* 解释:
* 5
* / \
* 3 6
* / \
* 2 4
* /
* 1
*
* 约束:
* - 树中的节点数为 n 。
* - 1 <= k <= n <= 10**4
* - 0 <= Node.val <= 10**4
*/
public class E230_Medium_KthSmallestElementInBST {
static void test(ToIntBiFunction<TreeNode, Integer> method) {
assertEquals(1, method.applyAsInt(newTree(3,1,4,null,2), 1));
assertEquals(3, method.applyAsInt(newTree(5,3,6,2,4,null,null,1), 3));
}
/**
* 这不是最高效的解法,时间复杂度为 O(N)。最好的方法是每个节点需要记录,以自己为根的这棵二叉树有多少个节点。
* 对于每个节点 node 就可以通过 node.left 推导出 node 的排名,从而做到 O(logN) 时间复杂度。
*
* LeetCode 耗时:0 ms - 100.00%
* 内存消耗:38.2 MB - 78.98%
*/
public int kthSmallest(TreeNode root, int k) {
Deque<TreeNode> stack = new LinkedList<>();
while (root != null || !stack.isEmpty()) {
while (root != null) {
stack.push(root);
root = root.left;
}
root = stack.pop();
if (--k == 0)
return root.val;
root = root.right;
}
return -1;
}
@Test
void testKthSmallest() {
test(this::kthSmallest);
}
int k;
TreeNode find;
/**
* LeetCode 耗时:0 ms - 100.00%
* 内存消耗:37.8 MB - 99.17%
*/
public int recurMethod(TreeNode root, int k) {
this.k = k;
find = null;
recur(root);
return find.val;
}
private void recur(TreeNode root) {
if (root == null || find != null)
return;
recur(root.left);
if (--k == 0) {
find = root;
return;
}
recur(root.right);
}
@Test
void testRecurMethod() {
test(this::recurMethod);
}
}
|
/**
* Inject the SDKs node_modules paths into the Node.js module resolutions. Reworked from eslint-config-axway
* @param {String} sdkPath The path of the SDK
*/
exports.injectSDKModulePath = function(sdkPath) {
var Module = require('module').Module;
var origFindPath = Module._findPath;
var path = require('path');
var sdkDir = [
path.join(sdkPath, 'node_modules')
];
Module._findPath = function (request, paths, isMain) {
return origFindPath.call(this, request, paths.concat(sdkDir), isMain);
};
}
/**
* Parse the output of winappdeploycmd list to check if an app is installed
* @param {String} output The output from winappdeploycmd
* @param {String} tiappID The tiapp id value
* @return {Boolean} Whether the app is installed or not
*/
exports.parseListData = function(output, tiappID) {
const idregex = new RegExp(tiappID, 'i');
if (idregex.test(output)) {
// Obtain the appid
return output.split('\r\n').filter(appID => idregex.test(appID));
} else {
return false;
}
}
|
<reponame>markusmeresma/SEM-coursework<filename>src/main/java/com/napier/sem/queries/WorldQueries.java
package com.napier.sem.queries;
import com.napier.sem.objects.Country;
import javax.xml.transform.Result;
import java.sql.*;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
public class WorldQueries {
private Connection conn;
public WorldQueries(Connection conn) {
this.conn = conn;
}
/**
* Gets top N populated countries in a continent provided by the user.
* @param continent
* @param number
* @return list of countries
*/
public List<Country> getTopNPopulatedCountriesInAContinent(String continent, int number) {
List<Country> result = getContinentPopulation(continent);
if(number >= result.size()) {
throw new IllegalArgumentException("The provided number is invalid. The number of countries in the world is " + result.size());
}
return result.subList(0, number);
}
/**
* Gets top N populated countries in the world provided by the user.
* @param number
* @return list of countries
*/
public List<Country> getTopNPopulatedCountriesInTheWorld(int number) {
List<Country> result = getCountriesSortedDescending();
if(number >= result.size()) {
throw new IllegalArgumentException("The provided number is invalid. The number of countries in the world is " + result.size());
}
return result.subList(0, number);
}
/**
* Gets world population from highest to lowest.
* @return List of countries
*/
public List<Country> getPopulationDescending() {
// select name from country order by population desc;
return getCountriesSortedDescending();
}
/**
* Gets world population from lowest to highest.
* @return sorted countries
*/
public List<Country> getPopulationAscending() {
List<Country> result = getCountriesSortedDescending();
Collections.reverse(result);
return result;
}
/**
* Helper method that gets world population from highest to lowest.
*
* @return list of sorted countries sorted countries
*/
private List<Country> getCountriesSortedDescending() {
List<Country> result = new ArrayList<>();
try (Statement statement = conn.createStatement()) {
statement.executeQuery("use world;");
String query = "SELECT * " +
"FROM country " +
"ORDER BY population " +
"DESC;";
PreparedStatement preparedStatement = conn.prepareStatement(query);
ResultSet resultSet = preparedStatement.executeQuery();
while (resultSet.next()) {
String countryName = resultSet.getString("name");
String continent = resultSet.getString("continent");
String region = resultSet.getString("region");
int population = resultSet.getInt("population");
result.add(new Country(countryName, continent, region, population));
}
} catch (SQLException ex) {
System.out.println(ex.getMessage());
}
return result;
}
/**
* Gets world population from highest to lowest.
* @return List of countries
*/
public List<Country> getContinentPopulationDescending(String Continent) {
// select name from country order by population desc;
return getContinentPopulation(Continent);
}
/**
* Gets world population from lowest to highest.
* @return list of sorted countries
*/
public List<Country> getContinentPopulationAscending(String Continent) {
List<Country> result = getContinentPopulationDescending(Continent);
Collections.reverse(result);
return result;
}
/**
* Helper method that gets world population from highest to lowest.
*
* @return list of sorted countries sorted countries
*/
private List<Country> getContinentPopulation(String Continent) {
List<Country> result = new ArrayList<>();
try (Statement statement = conn.createStatement()) {
statement.executeQuery("use world;");
String query = "SELECT * " +
"FROM country " +
"WHERE continent LIKE ? "+
"ORDER BY population " +
"DESC;";
PreparedStatement preparedStatement = conn.prepareStatement(query);
preparedStatement.setString(1, Continent);
ResultSet resultSet = preparedStatement.executeQuery();
while (resultSet.next()) {
String countryName = resultSet.getString("name");
String continent = resultSet.getString("continent");
String region = resultSet.getString("region");
int population = resultSet.getInt("population");
result.add(new Country(countryName, continent, region, population));
}
} catch (SQLException ex) {
System.out.println(ex.getMessage());
}
return result;
}
/**
* Method to get a population of a country
* @param name
* @return
*/
public List<Country> getCountryPopulation(String name)
{
if (name == null || name.isEmpty()) {
throw new IllegalArgumentException("Country name is null or empty");
}
else {
try {
Statement stmt = conn.createStatement();
String query =
"SELECT country.Name, country.Population "
+ "FROM country "
+ "WHERE country.Name LIKE ? ";
PreparedStatement preparedStatement = conn.prepareStatement(query);
preparedStatement.setString(1, name);
ResultSet resultSet = preparedStatement.executeQuery();
List<Country> result = new ArrayList<>();
while(resultSet.next()) {
Country country = new Country();
country.setName(resultSet.getString("country.Name"));
country.setPopulation(resultSet.getInt("country.Population"));
result.add(country);
}
return result;
}
catch (Exception e) {
System.out.println(e.getMessage());
System.out.println("Failed to get country population");
}
return null;
}
}
/**
* Method to get a population of a country
* @return int
*/
public long getWorldPopulation()
{
try {
Statement stmt = conn.createStatement();
String query =
"SELECT SUM(country.Population) AS Population "
+ "FROM country ";
PreparedStatement preparedStatement = conn.prepareStatement(query);
ResultSet resultSet = preparedStatement.executeQuery();
long result = -1;
while(resultSet.next()) {
result = resultSet.getLong("Population");
}
return result;
}
catch (Exception e) {
System.out.println(e.getMessage());
System.out.println("Failed to get country population");
}
return 0;
}
}
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.hello = (event, _context, cb) => {
const response = {
statusCode: 200,
body: JSON.stringify({
message: 'Go Serverless Webpack (Typescript) v1.0! Your function executed successfully!',
input: event,
}),
};
cb(null, response);
};
|
def flatten_list(nested_list):
flattened_list = []
for item in nested_list:
if type(item) == list:
flattened_list += item
else:
flattened_list.append(item)
return flattened_list
nested_list = [1,[2,3],4]
result = flatten_list(nested_list)
print(result) |
echo types-*/ | xargs -n 1 cp LICENSE README.md tsconfig.json .gitignore
|
#! /bin/sh
# Example install script for Unity3D project. See the entire example: https://github.com/JonathanPorta/ci-build
# This link changes from time to time. I haven't found a reliable hosted installer package for doing regular
# installs like this. You will probably need to grab a current link from: http://unity3d.com/get-unity/download/archive
echo 'Downloading from http://netstorage.unity3d.com/unity/afd2369b692a/MacEditorInstaller/Unity-5.1.2f1.pkg: '
curl -o Unity.pkg http://netstorage.unity3d.com/unity/afd2369b692a/MacEditorInstaller/Unity-5.1.2f1.pkg
echo 'Installing Unity.pkg'
sudo installer -dumplog -package Unity.pkg -target / |
#!/bin/bash -x
# Author: Paolo Cumani, Tarek Hassan, Abelardo Moralejo
# ATTENTION: VALUES TO CHANGE!!! CHANGE ALSO THE PATH TO THE DIFFERENT FILES!!!
# It creates the list of already processed flux files divided by particle/zenith/type of observation/direction in order
# from them to be skipped in a new call of flux.py
for zenName in 20deg 40deg; do
for dir in north south; do
for exten in point diff; do
for particle in gamma proton electron; do
if [ $dir == "north" ]; then
direction="N"
else
direction="S"
fi
dirac-dms-user-lfns -w "*tgz" -b /vo.cta.in2p3.fr/user/p/pcumani/MC/PROD3/PARANAL/3HB9/${zenName}/${exten}/${dir}/flux/
donename="done_${particle}_${exten}_${zenName}_${direction}.lfns"
cat vo.cta.in2p3.fr-user-p-pcumani-MC-PROD3-PARANAL-3HB9-${zenName}-${exten}-${dir}-flux.lfns | grep "${particle}" | awk -F "_" '{print $3}' > ${donename}
echo "${zenName} $particle ${exten} $dir:"
less ${donename} | wc -l
done
done
done
done
rm vo*.lfns
|
#!/bin/bash -ex
cd /tmp/staging-repository && python -mSimpleHTTPServer 18080 1>>/tmp/http-logs 2>&1 &
SRV_PROCESS=$!
if [ -n "$DOCKER_LOCALHOST" ]; then
REPO_ENV="--build-arg FN_REPO_URL=http://$DOCKER_LOCALHOST:18080"
fi
docker build $REPO_ENV $*
kill $SRV_PROCESS
|
<reponame>yunsean/yoga
package com.yoga.utility.sms.service;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import com.yoga.core.base.BaseService;
import com.yoga.core.exception.BusinessException;
import com.yoga.core.spring.SpringContext;
import com.yoga.core.utils.StringUtil;
import com.yoga.setting.model.Setting;
import com.yoga.setting.service.SettingService;
import com.yoga.utility.sms.mapper.SmsResultMapper;
import com.yoga.utility.sms.model.SmsResult;
import lombok.extern.log4j.Log4j;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.ClassUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider;
import org.springframework.core.task.AsyncTaskExecutor;
import org.springframework.core.type.filter.AssignableTypeFilter;
import org.springframework.scheduling.annotation.Async;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.stereotype.Service;
import javax.annotation.PostConstruct;
import java.util.*;
@Slf4j
@Service
@EnableAsync
public class SmsService extends BaseService {
public final static String ModuleName = "gcf_sms";
public final static String Key_SmsConfig = "sms.setting";
@Autowired
private SpringContext springContext;
@Autowired
private SettingService settingService;
@Autowired
private SmsResultMapper smsResultMapper;
List<Class<?>> smsServices = new ArrayList<>();
@Bean
public AsyncTaskExecutor smsTaskExecutor() {
ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor();
executor.setThreadNamePrefix("Sms-Executor");
executor.setCorePoolSize(5);
executor.setMaxPoolSize(5);
executor.setQueueCapacity(1000);
executor.initialize();
return executor;
}
@PostConstruct
public void loadSmsService() {
log.debug("查找短信发布服务");
ClassPathScanningCandidateComponentProvider provider = new ClassPathScanningCandidateComponentProvider(false);
provider.addIncludeFilter(new AssignableTypeFilter(SmsFactory.class));
Set<BeanDefinition> beanDefinitionSet = provider.findCandidateComponents("com.yoga.**");
for (BeanDefinition beanDefinition : beanDefinitionSet) {
try {
Class<?> entityClass = ClassUtils.getClass(beanDefinition.getBeanClassName());
smsServices.add(entityClass);
log.debug("找到短信发布服务:" + entityClass.getSimpleName());
} catch (Exception ex) {
ex.printStackTrace();
}
}
log.debug("共找到" + smsServices.size() + "个短信发布服务");
}
public Setting getSetting(long tenantId) {
Setting setting = settingService.get(tenantId, ModuleName, Key_SmsConfig);
return setting;
}
public void setSetting(long tenantId, String value, String showValue) {
settingService.save(tenantId, ModuleName, Key_SmsConfig, value, showValue);
}
@Async("smsTaskExecutor")
public void sendSms(long tenantId, String mobile, String content, String action) {
SmsActor actor = getActor(tenantId);
sendSms(tenantId, actor, mobile, content, action);
}
@Async("smsTaskExecutor")
public void sendSms(long tenantId, SmsActor actor, String mobile, String content, String action) {
if (actor == null) return;
String result = actor.sendSms(mobile, content);
boolean isOk = StringUtil.isBlank(result);
SmsResult smsResult = new SmsResult(tenantId, mobile, action, content, isOk, result);
smsResultMapper.insert(smsResult);
}
public void sendSmsSync(long tenantId, String mobile, String content, String action) {
SmsActor actor = getActor(tenantId);
sendSmsSync(tenantId, actor, mobile, content, action);
}
public void sendSmsSync(long tenantId, SmsActor actor, String mobile, String content, String action) {
if (actor == null) return;
String result = actor.sendSms(mobile, content);
boolean isOk = StringUtil.isBlank(result);
SmsResult smsResult = new SmsResult(tenantId, mobile, action, content, isOk, result);
smsResultMapper.insert(smsResult);
if (!isOk) throw new BusinessException(result);
}
public SmsActor getActor(long tenantId) {
Setting setting = getSetting(tenantId);
if (setting == null) throw new BusinessException("短信网关尚未配置");
JSONObject jsonObject = JSON.parseObject(setting.getValue());
String service = null;
Map<String, String> configs = new HashMap<>();
for (String key : jsonObject.keySet()) {
if (key.equals("service")) service = jsonObject.getString(key);
else configs.put(key, jsonObject.getString(key));
}
if (StringUtil.isBlank(service)) throw new BusinessException("短信网关配置无效");
SmsFactory smsFactory = (SmsFactory) springContext.getApplicationContext().getBean(service);
if (smsFactory == null) throw new BusinessException("无法连接短信网关");
SmsActor actor = smsFactory.build(configs);
if (actor == null) throw new BusinessException("无法访问短信网关");
return actor;
}
public Map<String, String> services() {
Map<String, String> names = new HashMap<>();
for (Class<?> service : smsServices) {
SmsFactory smsFactory = (SmsFactory) springContext.getApplicationContext().getBean(service);
String[] name = springContext.getApplicationContext().getBeanNamesForType(service);
if (name == null || name.length < 1) continue;
names.put(name[0], smsFactory.getName());
}
return names;
}
public Map<String, String> configItems(String service) {
try {
SmsFactory smsFactory = (SmsFactory) springContext.getApplicationContext().getBean(service);
return smsFactory.configItems();
} catch (Exception ex) {
ex.printStackTrace();
return null;
}
}
public String getServiceName(String service) {
try {
SmsFactory smsFactory = (SmsFactory) springContext.getApplicationContext().getBean(service);
return smsFactory.getName();
} catch (Exception ex) {
ex.printStackTrace();
return null;
}
}
}
|
<filename>src/main/java/net/toshirohex/lycurgus/Lycurgus.java<gh_stars>0
package net.toshirohex.lycurgus;
import net.fabricmc.api.ModInitializer;
import net.fabricmc.fabric.api.client.itemgroup.FabricItemGroupBuilder;
import net.minecraft.block.Blocks;
import net.minecraft.item.ItemGroup;
import net.minecraft.item.ItemStack;
import net.minecraft.util.Identifier;
import net.minecraft.world.gen.YOffset;
import net.minecraft.world.gen.decorator.CountPlacementModifier;
import net.minecraft.world.gen.decorator.HeightRangePlacementModifier;
import net.minecraft.world.gen.decorator.SquarePlacementModifier;
import net.minecraft.world.gen.feature.*;
import net.toshirohex.lycurgus.registry.ModArmors;
import net.toshirohex.lycurgus.registry.ModBlocks;
import net.toshirohex.lycurgus.registry.ModItems;
public class Lycurgus implements ModInitializer {
public static final String MOD_ID = "lycurgus";
public static final ItemGroup ITEM_GROUP = FabricItemGroupBuilder.build(
new Identifier(MOD_ID, "general"),
() -> new ItemStack(ModItems.Ingots[2]));
@Override
public void onInitialize() {
ModItems.registerItems();
ModBlocks.registerBlocks();
ModArmors.registerItems();
}
}
|
#!/bin/bash
TASK=15
SHOT=1
LANG=en
MODEL=ctrl_muniter
MODEL_CONFIG=ctrl_muniter_base
TASKS_CONFIG=iglue_fewshot_tasks_boxes36.dtu
TRTASK=xGQA${LANG}_${SHOT}
TEXT_TR=/home/projects/ku_00062/data/xGQA/annotations/few_shot/${LANG}/train_${SHOT}.pkl
TEXT_TE=/home/projects/ku_00062/data/xGQA/annotations/few_shot/${LANG}/dev.pkl
PRETRAINED=/home/projects/ku_00062/checkpoints/iglue/zero_shot/xgqa/${MODEL}/GQA_${MODEL_CONFIG}/pytorch_model_best.bin
here=$(pwd)
source /home/projects/ku_00062/envs/iglue/bin/activate
cd ../../../../../../volta
for lr in 1e-4 5e-5 1e-5; do
OUTPUT_DIR=/home/projects/ku_00062/checkpoints/iglue/few_shot.mt/xgqa/${TRTASK}/${MODEL}/${lr}
LOGGING_DIR=/home/projects/ku_00062/logs/iglue/few_shot.mt/xgqa/${TRTASK}/${lr}/${MODEL_CONFIG}
python train_task.py \
--bert_model /home/projects/ku_00062/huggingface/bert-base-multilingual-cased --config_file config/${MODEL_CONFIG}.json \
--from_pretrained ${PRETRAINED} \
--tasks_config_file config_tasks/${TASKS_CONFIG}.yml --task $TASK --num_epoch 20 \
--train_split train_${LANG}_${SHOT} --train_annotations_jsonpath $TEXT_TR \
--val_split dev_${LANG} --val_annotations_jsonpath $TEXT_TE \
--lr $lr --batch_size 8 --gradient_accumulation_steps 1 --num_workers 0 --save_every_num_epochs 5 \
--adam_epsilon 1e-6 --adam_betas 0.9 0.999 --adam_correct_bias --weight_decay 0.0001 --warmup_proportion 0.1 --clip_grad_norm 1.0 \
--output_dir ${OUTPUT_DIR} \
--logdir ${LOGGING_DIR} \
&> ${here}/train.${lr}.log
done
deactivate
|
import random
def deal_cards(players, num_cards):
suits = ['H', 'D', 'C', 'S']
ranks = ['2', '3', '4', '5', '6', '7', '8', '9', '10', 'J', 'Q', 'K', 'A']
deck = [rank + suit for suit in suits for rank in ranks] # Create a standard deck of cards
random.shuffle(deck) # Shuffle the deck
dealt_cards = {}
for player in players:
dealt_cards[player] = [deck.pop() for _ in range(num_cards)] # Deal specified number of cards to each player
return dealt_cards |
import numpy as np
class LabelProcessor:
def __init__(self, azi_only, nb_classes, xyz_def_zero, default_ele):
self._azi_only = azi_only
self._nb_classes = nb_classes
self._xyz_def_zero = xyz_def_zero
self._default_ele = default_ele
def _split_in_seqs(self, label):
# Implementation not shown
pass
def process_label_data(self, label):
label = self._split_in_seqs(label)
if self._azi_only:
# Get Cartesian coordinates from azi/ele
azi_rad = label[:, :, self._nb_classes:2 * self._nb_classes] * np.pi / 180
x = np.cos(azi_rad)
y = np.sin(azi_rad)
# Set default Cartesian x,y,z coordinates to 0,0,0
if self._xyz_def_zero:
no_ele_ind = np.where(label[:, :, 2 * self._nb_classes:] == self._default_ele)
label[no_ele_ind] = 0 # Set default Cartesian coordinates to 0
# Return processed label data
return label |
def find_first_unique_char_index(s):
# create a dict to store already encountered characters
encountered = {}
# loop through the string and check if character is unique
for i in range(len(s)):
c = s[i]
if c not in encountered:
encountered[c] = i
else:
encountered[c] = -1
# find the first occurrence of an unique character
for i in range(len(s)):
c = s[i]
if encountered[c] == i:
return i
return -1 |
#!/bin/bash
dieharder -d 201 -g 48 -S 1474965233
|
<reponame>ndinakar/Phase4-SCSB-Gateway
package org.recap.model;
import lombok.Getter;
import lombok.Setter;
/**
* Created by sudhishk on 16/12/16.
*/
@Getter
@Setter
public class ItemCheckinResponse extends BaseResponseItem {
private boolean alert;
private boolean magneticMedia;
private boolean resensitize;
private String dueDate;
private String feeType ;
private String securityInhibit;
private String currencyType;
private String feeAmount;
private String mediaType;
private String permanentLocation;
private String sortBin;
private String collectionCode;
private String callNumber;
private String destinationLocation;
private String alertType;
private String holdPatronId;
private String holdPatronName;
private boolean processed;
private String bibId;
}
|
# -*- encoding: utf-8 -*-
'''
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
Version 2, December 2004
-
Copyright (C) 2008 <NAME> <<EMAIL>>
Everyone is permitted to copy and distribute verbatim or modified
copies of this license document, and changing it is allowed as long
as the name is changed.
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
---
0. You just DO WHAT THE FUCK YOU WANT TO.
1. Just be happy every day.
-----
Author Connard
Github https://github.com/lovelacelee
Date 2021-12-20 16:43:41
LastEditTime 2021-12-21 15:08:23
LastEditors Lee
Description
FilePath /clspy.py
Copyright 2021 Lovelace, All Rights Reserved.
'''
import sys
import os
sys.path.append(os.getcwd())
from clspy import cli
if __name__ == '__main__':
cli.main()
|
<filename>src/math/Boj4159.java
package math;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.Arrays;
/**
*
* @author exponential-e
* 백준 4159번: 알래스카
*
* @see https://www.acmicpc.net/problem/4159/
*
*/
public class Boj4159 {
private static final String I = "IMPOSSIBLE\n";
private static final String P = "POSSIBLE\n";
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringBuilder sb = new StringBuilder();
while(true){
int n = Integer.parseInt(br.readLine());
if(n == 0) break;
int[] recharge = new int[n];
for(int i = 0; i < n; i++){
recharge[i] = Integer.parseInt(br.readLine());
}
sb.append(reached(n, recharge));
}
System.out.print(sb.toString());
}
private static String reached(int n, int[] arr){
Arrays.sort(arr);
int prev = arr[0];
for(int i = 1; i < n; i++){
if(arr[i] - prev > 200) return I;
prev = arr[i];
}
if(1422 - arr[n - 1] > 100) return I; // Delta Junction round trip possible?
return P;
}
}
|
var $ = require('jquery');
var _ = require('underscore');
var Backbone = require('backbone');
require('backbone-forms');
require('rangeslider.js');
Backbone.$ = $;
// Custom validators
_.extend(
Backbone.Form.validators,
{
columnType: require('./validators/column-type.js'),
interval: require('./validators/interval.js'),
requiredBoolean: require('./validators/required-boolean.js')
}
);
Backbone.Form.Original = Backbone.Form;
// Requiring custom form components
require('./form.js');
require('./fieldset-template.js');
require('./field.js');
require('./editors/base.js');
require('./editors/text.js');
require('./editors/textarea.js');
require('./editors/number/number.js');
require('./editors/select/select-view.js');
require('./editors/select/select-placeholder.js');
require('./editors/select/multi-select-view.js');
require('./editors/radio/radio.js');
require('./editors/enabler/enabler-view.js');
require('./editors/toggle/toggle.js');
require('./editors/enabler-editor/enabler-editor-view.js');
require('./editors/node-dataset/node-dataset-view.js');
require('./editors/operators/operators-view.js');
require('./editors/list/list.js');
require('./editors/list/list-item.js');
require('./editors/sortable-list.js');
require('./editors/legend/category-item.js');
require('./editors/slider/slider.js');
require('./editors/fill/fill.js');
require('./editors/taglist/taglist.js');
require('./editors/datetime/datetime.js');
require('./editors/select/suggest-view.js');
require('./editors/code-editor');
require('./editors/data-observatory-measurements/data-observatory-measurements-view.js');
require('./editors/data-observatory-measurements/measurement-item.js');
require('./editors/lazy-select/lazy-select-view.js');
|
# https://developer.zendesk.com/rest_api/docs/core/bookmarks#create-bookmark
zdesk_bookmark_create () {
method=POST
url=/api/v2/bookmarks.json
} |
<filename>tapestry-ioc/src/main/java/org/apache/tapestry5/ioc/def/DecoratorDef2.java<gh_stars>10-100
// Copyright 2010 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.ioc.def;
import org.apache.tapestry5.ioc.Markable;
/**
* Extended version of {@link org.apache.tapestry5.ioc.def.DecoratorDef} introduced to determine any
* module method annotated with {@link org.apache.tapestry5.ioc.annotations.Decorate} as a decorator
* method. Before version 5.2 a decorator was applied to any service whose id matched the pattern
* provided
* by {@link org.apache.tapestry5.ioc.annotations.Match} annotation. As of version 5.2 a service to
* decorate may also be identified by a
* combination of {@link org.apache.tapestry5.ioc.annotations.Advise} annotation and
* a set of marker annotations.
*
* @since 5.2.2
*/
public interface DecoratorDef2 extends DecoratorDef, Markable
{
}
|
import React, { Component } from 'react';
import Header from '../components/Header';
import YoutubePlayer from '../components/YoutubePlayer';
import RefreshButton from '../components/RefreshButton';
import GitHubRibbon from '../components/GitHubRibbon';
import fetch from 'isomorphic-fetch';
import ga from 'react-ga';
import config from '../../../config';
class App extends Component {
constructor(props) {
super(props);
this.refresh = this.refresh.bind(this);
this.videos = [];
this.viewedVideos = [];
this.showWarning = false;
}
componentDidMount() {
ga.initialize(config.gaTrackingId);
ga.pageview('/');
this.refresh();
}
updateViewedVideos() {
const id = this.videos[this.selectedIndex].id.videoId;
if (this.viewedVideos.indexOf(id) === -1) {
this.viewedVideos.unshift(id);
if (this.viewedVideos.length > 50) {
this.viewedVideos.pop();
}
}
}
refresh() {
ga.event({category: 'Video', action: 'Refresh video'});
fetch('/api/latest')
.then(response => response.json())
.then(json => this.processRefresh(json));
}
processRefresh(result) {
const items = result.items;
this.showWarning = items.length < 20 ? true : false;
// sort with most recent first
items.sort((item1, item2) => {
return item1.snippet.publishedAt < item2.snippet.publishedAt ? 1 : -1;
});
if (items.length !== 0) {
this.videos = items;
this.selectRandomVideo();
this.updateViewedVideos();
}
this.forceUpdate();
}
// Smart randomize to not select videos the user has already viewed
selectRandomVideo() {
let unviewed = [];
for (var i in this.videos) {
if (this.viewedVideos.indexOf(this.videos[i].id.videoId) === -1) {
unviewed.push(this.videos[i]);
}
}
if (unviewed.length === 0) {
unviewed = this.videos;
}
const rand = Math.floor(Math.random()*unviewed.length);
const selectedId = unviewed[rand].id.videoId;
for (var i in this.videos) {
if (this.videos[i].id.videoId === selectedId) {
this.selectedIndex = i;
break;
}
}
}
getEmbedUrl(video) {
if (video) {
const videoId = video.id.videoId;
return 'http://www.youtube.com/embed/' + videoId + '?rel=0';
}
}
render() {
const url = this.getEmbedUrl(this.videos[this.selectedIndex]);
return (
<div className='content'>
<GitHubRibbon />
<div className='hero'>
<Header />
</div>
<div className='floating-wrapper'>
<YoutubePlayer url={url} />
<RefreshButton clickHandler={this.refresh} />
</div>
{this.showWarning &&
<div className='notification warning'>
We've detected some problems with the YouTube API. You may experience issues loading new videos at the moment. Sorry :(
</div>
}
</div>
);
}
}
export default App;
|
<reponame>remiver/QuiFFT<gh_stars>10-100
package org.quifft.params;
import org.quifft.output.BadParametersException;
/**
* Validates {@link FFTParameters} prior to the computation of FFT and throws a {@link BadParametersException} if
* any invalid parameters are found
*/
public class ParameterValidator {
/**
* Runs through checklist of parameter validations and throws exception if any issues are identified
* @param params parameters of the FFT to be computed
* @param isFFTStream true if is FFTStream, false if is FFTResult
* @throws BadParametersException if there is an invalid parameter
*/
public static void validateFFTParameters(FFTParameters params, boolean isFFTStream) {
// window size must be > 0
if(params.windowSize <= 0)
throw new BadParametersException(String.format("Window size must be positive; " +
"was set to %d", params.windowSize));
// window size must be a power of 2 if num points is not set
if(params.numPoints == null && !isPow2(params.windowSize))
throw new BadParametersException(String.format("If number of points is not set, window size must be a " +
"power of 2; was set to %1$d. \nIf you'd like to use a window of size %1$d, " +
"set numPoints to the next power of 2 greater than %1$d so the signal will " +
"be zero-padded up to that length.", params.windowSize));
// window function cannot be null
if(params.windowFunction == null)
throw new BadParametersException("Window function cannot be null");
// window overlap must be positive and less than 1
if(params.windowOverlap < 0 || params.windowOverlap >= 1)
throw new BadParametersException(String.format("Window overlap must be a positive value " +
"between 0 and 0.99; was set to %f", params.windowOverlap));
// num points, if set, must be positive
if(params.numPoints != null && params.numPoints < 0)
throw new BadParametersException(String.format("Number of points in FFT must be positive; " +
"was set to %d", params.numPoints));
// num points, if set, must be greater than or equal to window size
if(params.numPoints != null && params.numPoints < params.windowSize)
throw new BadParametersException(String.format("Number of points in FFT must be at least as large as " +
"window size; window size was %d but numPoints was only %d",
params.windowSize, params.numPoints));
// num points, if set, must be a power of 2
if(params.numPoints != null && !isPow2(params.numPoints))
throw new BadParametersException(String.format("Number of points in FFT must be a power of two; " +
"was set to %d", params.numPoints));
// normalization without dB scale can't be on for an FFTStream
if(isFFTStream && !params.useDecibelScale && params.isNormalized)
throw new BadParametersException("Normalization can't be used without also using dB scale for an FFTStream " +
"because it doesn't make any sense -- normalization relies on knowing the maximum amplitude across " +
"any frequency in the entire file, and FFTStream only knows the maximum frequency of one window " +
"at a time. If you'd like to use normalization with an FFTStream, it's recommended that you " +
"implement this yourself");
}
private static boolean isPow2(int n) {
return n > 1 && ((n & (n - 1)) == 0);
}
}
|
include_recipe 'cloudless-box::essentials'
include_recipe 'cloudless-box::packages'
include_recipe 'cloudless-box::accounts'
include_recipe 'cloudless-box::certificates'
include_recipe 'cloudless-box::ruby'
include_recipe 'cloudless-box::gems'
include_recipe 'cloudless-box::elixir'
include_recipe 'cloudless-box::bower'
include_recipe 'cloudless-box::meteor'
include_recipe 'cloudless-box::postgresql'
include_recipe 'cloudless-box::mongodb'
include_recipe 'cloudless-box::redis'
include_recipe 'cloudless-box::environment'
include_recipe 'cloudless-box::deploy'
include_recipe 'cloudless-box::webserver'
include_recipe 'cloudless-box::firewall'
include_recipe 'cloudless-box::backup'
|
#!/bin/bash
iterations=10
echo "----------------------------------------------------------------"
echo "Running $iterations iterations of .NET 5 (Plus 1 warm-up iteration.)."
echo "----------------------------------------------------------------"
for (( i=0; i<=$iterations; i++ ))
do
echo "Iteration $i"
$MONOCMD bin/Release/netcoreapp3.0/linux-x64/publish/time_to_main
done
echo "----------------------------------------------------------------"
echo "Finished."
echo "----------------------------------------------------------------"
|
#!/bin/bash
date '+keyreg-teal-test start %Y%m%d_%H%M%S'
set -e
set -x
set -o pipefail
export SHELLOPTS
gcmd="goal -d ../../net1/Primary"
MAIN=$(${gcmd} account list|awk '{ print $3 }'|tail -1)
APP_ID=1
INDEX=7
VALUE=129
# create transactions
${gcmd} app call -f "$MAIN" \
--app-id "$APP_ID" \
--app-arg "str:set_int" \
--app-arg "int:$INDEX" \
--app-arg "int:$VALUE"
# read global state
${gcmd} app read --app-id "$APP_ID" --guess-format --global --from "$MAIN"
|
package set summary "Very fast implementation of tldr in Rust"
package set git.url "https://github.com/dbrgn/tealdeer.git"
package set src.url "https://github.com/dbrgn/tealdeer/archive/v1.5.0.tar.gz"
package set src.sum "00902a50373ab75fedec4578c6c2c02523fad435486918ad9a86ed01f804358a"
package set license "Apache-2.0"
package set dep.pkg "openssl"
package set dep.cmd "base64 patch"
package set bsystem "cargo"
prepare() {
{
cat << EOF
LS0tIGEvc3JjL3R5cGVzLnJzCisrKyBiL3NyYy90eXBlcy5ycwpAQCAtNDMsNyArNDMsNyBAQAog
fQogCiBpbXBsIFBsYXRmb3JtVHlwZSB7Ci0gICAgI1tjZmcodGFyZ2V0X29zID0gImxpbnV4Iild
CisgICAgI1tjZmcoYW55KHRhcmdldF9vcyA9ICJsaW51eCIsIHRhcmdldF9vcyA9ICJhbmRyb2lk
IikpXQogICAgIHB1YiBmbiBjdXJyZW50KCkgLT4gU2VsZiB7CiAgICAgICAgIFNlbGY6OkxpbnV4
CiAgICAgfQpAQCAtNjYsNiArNjYsNyBAQAogCiAgICAgI1tjZmcobm90KGFueSgKICAgICAgICAg
dGFyZ2V0X29zID0gImxpbnV4IiwKKyAgICAgICAgdGFyZ2V0X29zID0gImFuZHJvaWQiLAogICAg
ICAgICB0YXJnZXRfb3MgPSAibWFjb3MiLAogICAgICAgICB0YXJnZXRfb3MgPSAiZnJlZWJzZCIs
CiAgICAgICAgIHRhcmdldF9vcyA9ICJuZXRic2QiLAo=
EOF
} | base64 -d | patch -p1
}
|
<gh_stars>0
# The Book of Ruby - http://www.sapphiresteel.com
module MyMod
end
puts( MyMod.class ) |
package controllers;
import models.Entity.Avatar;
import models.Entity.Entity;
import models.Occupation.Smasher;
import models.Occupation.Sneak;
import models.Occupation.Summoner;
import models.Stat.Stat;
import models.StateModel.AvatarCreationModel;
import models.StateModel.PlayStateModel;
import utilities.GameStateManager;
import utilities.KeyCommand.KeyCommand;
import utilities.State.State;
import views.PlayStateView;
import java.awt.event.KeyEvent;
/**
* Created by clayhausen on 4/16/16.
*/
public class AvatarCreationViewController extends Controller {
private AvatarCreationModel model;
//Constructor of the AvatarCreationController
public AvatarCreationViewController(AvatarCreationModel model, GameStateManager gsm){
super(gsm);
this.model = model;
}
@Override
public void loadKeyCommand() {
keyMap.put(KeyEvent.VK_UP, new KeyCommand(){
@Override
public void execute() {
model.up();
}
});
keyMap.put(KeyEvent.VK_DOWN, new KeyCommand() {
@Override
public void execute() {
model.down();
}
});
keyMap.put(KeyEvent.VK_U, new KeyCommand(){
@Override
public void execute() {
model.up();
}
});
keyMap.put(KeyEvent.VK_J, new KeyCommand() {
@Override
public void execute() {
model.down();
}
});
keyMap.put(KeyEvent.VK_ENTER, new KeyCommand() {
@Override
public void execute() {
if(model.getSelected() == AvatarCreationModel.AvatarCreationMenu.SMASHER){
Avatar newAvatar = new Avatar(1, new Smasher());
playStateTransition(newAvatar);
}
if(model.getSelected() == AvatarCreationModel.AvatarCreationMenu.SNEAK){
Avatar newAvatar = new Avatar(1, new Sneak());
playStateTransition(newAvatar);
}
if(model.getSelected() == AvatarCreationModel.AvatarCreationMenu.SUMMONER){
Avatar newAvatar = new Avatar(1, new Summoner());
playStateTransition(newAvatar);
}
}
});
}
// TODO change to appropriate state transition
private void playStateTransition(Avatar avatar) {
PlayStateModel model = new PlayStateModel(avatar);
PlayStateView view = new PlayStateView(500,500,gsm.getCurrentView().getCanvas(),model);
PlayStateController controller = new PlayStateController(model,gsm, avatar);
State playState = new State(view,controller);
gsm.changeState(playState);
}
@Override
public void updateModel() {
}
}
|
#!/bin/bash
# Updates or creates a package with the given name (idempotent).
# The name is the directory it will be housed in.
# The name will have @endo/ in package.json by default, if the package is
# new.
#
# Usage: scripts/repackage.sh NAME
# Example: scripts/repackage.sh console
set -ueo pipefail
DIR=$(dirname -- "${BASH_SOURCE[0]}")
cd "$DIR/.."
NAME=$1
PKGJSON=packages/$NAME/package.json
mkdir -p "packages/$NAME/"{src,dist,test}
NEWPKGJSONHASH=$(
if [ -f "$PKGJSON" ]; then
cat "$PKGJSON"
else
echo {}
fi | jq --arg name "$NAME" '{
name: null,
version: null,
private: null,
description: "Description forthcoming.",
keywords: [],
author: "Endo contributors",
license: "Apache-2.0",
homepage: null,
repository: null,
bugs: null,
type: null,
main: null,
module: null,
browser: null,
unpkg: null,
types: null,
exports: {},
scripts: {},
dependencies: {},
devDependencies: {},
files: [],
publishConfig: null,
eslintConfig: null,
prettier: null,
ava: null,
} + . + {
name: (.name // "@endo/\($name)"),
version: (.version // "0.1.0"),
homepage: (.homepage // "https://github.com/endojs/endo/tree/master/packages/\($name)#readme"),
repository: {
type: "git",
url: "git+https://github.com/endojs/endo.git",
},
bugs: {
url: "https://github.com/endojs/endo/issues",
},
type: "module",
main: "./index.js",
module: "./index.js",
types: "./index.d.ts",
exports: (
if
.exports["./package.json"]
then
(.exports // {}) + {
".": "./index.js",
}
else
({
".": "./index.js",
"./package.json": "./package.json",
})
end
),
scripts: ((.scripts // {}) + {
"build": "exit 0",
"test": "ava",
"lint": "yarn lint:types && yarn lint:js",
"lint:types": "tsc --build jsconfig.json",
"lint:js": "eslint .",
"lint-fix": "eslint --fix .",
}) | to_entries | sort_by(.key) | from_entries,
devDependencies: ((.devDependencies // {}) + {
"@endo/eslint-config": "^0.3.6",
"ava": "^3.12.1",
"babel-eslint": "^10.0.3",
"eslint": "^7.23.0",
"eslint-config-airbnb-base": "^14.0.0",
"eslint-config-prettier": "^6.9.0",
"eslint-plugin-eslint-comments": "^3.1.2",
"eslint-plugin-import": "^2.19.1",
"eslint-plugin-prettier": "^3.1.2",
"prettier": "^1.19.1",
"typescript": "^4.0.5",
"ava": "^3.12.1",
}) | to_entries | sort_by(.key) | from_entries,
files: ((.files // []) + [
"src",
"LICENSE*",
"index.js",
"index.d.ts"
]) | sort | unique,
"publishConfig": {
"access": "public",
},
"eslintConfig": {
"extends": [
"@endo"
],
},
"prettier": {
"trailingComma": "all",
"singleQuote": true,
},
"ava": {
"files": [
"test/**/test-*.js"
],
"timeout": "2m"
}
}' | git hash-object -w --stdin
)
git cat-file blob "$NEWPKGJSONHASH" > "$PKGJSON"
cp skel/index.d.ts packages/"$NAME"/index.d.ts
cp skel/jsconfig.json packages/"$NAME"/jsconfig.json
cp LICENSE packages/"$NAME"/LICENSE
touch packages/"$NAME"/README.md
touch packages/"$NAME"/NEWS.md
touch packages/"$NAME"/index.js
|
#!/bin/bash
# Copyright (c) 2015-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
# ******************************************************************************
# This is an end-to-end test intended to run on CI.
# You can also run it locally but it's slow.
# ******************************************************************************
# Start in tasks/ even if run from root directory
cd "$(dirname "$0")"
function cleanup {
echo 'Cleaning up.'
cd $root_path
# Uncomment when snapshot testing is enabled by default:
# rm ./packages/magic-scripts/template/src/__snapshots__/App.test.js.snap
rm -rf $temp_cli_path $temp_app_path
}
# Error messages are redirected to stderr
function handle_error {
echo "$(basename $0): ERROR! An error was encountered executing line $1." 1>&2;
cleanup
echo 'Exiting with error.' 1>&2;
exit 1
}
function handle_exit {
cleanup
echo 'Exiting without error.' 1>&2;
exit
}
function create_magic_component {
node "$temp_cli_path"/node_modules/create-magic-component/index.js $*
}
# Exit the script with a helpful error message when any error is encountered
trap 'set +x; handle_error $LINENO $BASH_COMMAND' ERR
# Cleanup before exit on any termination signal
trap 'set +x; handle_exit' SIGQUIT SIGTERM SIGINT SIGKILL SIGHUP
# Echo every command being executed
set -x
# Go to root
cd ..
root_path=$PWD
npm install
# Lint own code
./node_modules/.bin/eslint --ignore-path .gitignore ./
# ******************************************************************************
# First, test the create-magic-component development environment.
# This does not affect our users but makes sure we can develop it.
# ******************************************************************************
# Test local build command
npm run build
# Check for expected output
test -e build/*.html
test -e build/static/js/*.js
test -e build/static/css/*.css
test -e build/static/media/*.svg
test -e build/favicon.ico
# Run tests with CI flag
CI=true npm test
# Uncomment when snapshot testing is enabled by default:
# test -e template/src/__snapshots__/App.test.js.snap
# Test local start command
npm start -- --smoke-test
# ******************************************************************************
# Next, pack magic-scripts and create-magic-component so we can verify they work.
# ******************************************************************************
# Pack CLI
cd $root_path/packages/create-magic-component
cli_path=$PWD/`npm pack`
# Go to magic-scripts
cd $root_path/packages/magic-scripts
# Like bundle-deps, this script modifies packages/magic-scripts/package.json,
# copying own dependencies (those in the `packages` dir) to bundledDependencies
node $root_path/tasks/bundle-own-deps.js
# Finally, pack magic-scripts
scripts_path=$root_path/packages/magic-scripts/`npm pack`
# ******************************************************************************
# Now that we have packed them, create a clean app folder and install them.
# ******************************************************************************
# Install the CLI in a temporary location
# http://unix.stackexchange.com/a/84980
temp_cli_path=`mktemp -d 2>/dev/null || mktemp -d -t 'temp_cli_path'`
cd $temp_cli_path
npm install $cli_path
# Install the app in a temporary location
temp_app_path=`mktemp -d 2>/dev/null || mktemp -d -t 'temp_app_path'`
cd $temp_app_path
create_magic_component --scripts-version=$scripts_path test-app
# ******************************************************************************
# Now that we used create-magic-component to create an app depending on magic-scripts,
# let's make sure all npm scripts are in the working state.
# ******************************************************************************
# Enter the app directory
cd test-app
# Test the build
npm run build
# Check for expected output
test -e build/*.html
test -e build/static/js/*.js
test -e build/static/css/*.css
test -e build/static/media/*.svg
test -e build/favicon.ico
# Run tests with CI flag
CI=true npm test
# Uncomment when snapshot testing is enabled by default:
# test -e src/__snapshots__/App.test.js.snap
# Test the server
npm start -- --smoke-test
# ******************************************************************************
# Finally, let's check that everything still works after ejecting.
# ******************************************************************************
# Eject...
echo yes | npm run eject
# ...but still link to the local packages
npm link $root_path/packages/babel-preset-magic-component
npm link $root_path/packages/eslint-config-magic-component
npm link $root_path/packages/magic-dev-utils
npm link $root_path/packages/magic-scripts
# Test the build
npm run build
# Check for expected output
test -e build/*.html
test -e build/static/js/*.js
test -e build/static/css/*.css
test -e build/static/media/*.svg
test -e build/favicon.ico
# Run tests, overring the watch option to disable it.
# `CI=true npm test` won't work here because `npm test` becomes just `jest`.
# We should either teach Jest to respect CI env variable, or make
# `scripts/test.js` survive ejection (right now it doesn't).
npm test -- --watch=no
# Uncomment when snapshot testing is enabled by default:
# test -e src/__snapshots__/App.test.js.snap
# Test the server
npm start -- --smoke-test
# ******************************************************************************
# Test --scripts-version with a version number
# ******************************************************************************
cd $temp_app_path
create_magic_component --scripts-version=0.4.0 test-app-version-number
cd test-app-version-number
# Check corresponding scripts version is installed.
test -e node_modules/magic-scripts
grep '"version": "0.4.0"' node_modules/magic-scripts/package.json
# ******************************************************************************
# Test --scripts-version with a tarball url
# ******************************************************************************
cd $temp_app_path
create_magic_component --scripts-version=https://registry.npmjs.org/magic-scripts/-/magic-scripts-0.4.0.tgz test-app-tarball-url
cd test-app-tarball-url
# Check corresponding scripts version is installed.
test -e node_modules/magic-scripts
grep '"version": "0.4.0"' node_modules/magic-scripts/package.json
# ******************************************************************************
# Test --scripts-version with a custom fork of magic-scripts
# ******************************************************************************
cd $temp_app_path
create_magic_component --scripts-version=magic-scripts-fork test-app-fork
cd test-app-fork
# Check corresponding scripts version is installed.
test -e node_modules/magic-scripts-fork
# Cleanup
cleanup
|
/*
* Copyright 2012-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package me.batizhao.dp.initializr.extension.dependency.springintegration;
import io.spring.initializr.generator.buildsystem.Build;
import io.spring.initializr.generator.buildsystem.Dependency;
import io.spring.initializr.generator.buildsystem.DependencyScope;
import io.spring.initializr.generator.spring.documentation.HelpDocument;
import me.batizhao.dp.initializr.support.implicit.ImplicitDependency;
import me.batizhao.dp.initializr.support.implicit.ImplicitDependency.Builder;
import java.util.Arrays;
import java.util.function.Consumer;
import java.util.stream.Collectors;
/**
* A registry of available Spring Integration modules.
*
* @author <NAME>
* @author <NAME>
*/
abstract class SpringIntegrationModuleRegistry {
static Iterable<ImplicitDependency> create() {
return create(
onDependencies("activemq", "artemis").customizeBuild(addDependency("jms"))
.customizeHelpDocument(addReferenceLink("JMS Module", "jms")),
onDependencies("amqp").customizeBuild(addDependency("amqp"))
.customizeHelpDocument(addReferenceLink("AMQP Module", "amqp")),
onDependencies("data-jdbc", "jdbc").customizeBuild(addDependency("jdbc"))
.customizeHelpDocument(addReferenceLink("JDBC Module", "jdbc")),
onDependencies("data-jpa").customizeBuild(addDependency("jpa"))
.customizeHelpDocument(addReferenceLink("JPA Module", "jpa")),
onDependencies("data-mongodb", "data-mongodb-reactive").customizeBuild(addDependency("mongodb"))
.customizeHelpDocument(addReferenceLink("MongoDB Module", "mongodb")),
onDependencies("data-r2dbc").customizeBuild(addDependency("r2dbc"))
.customizeHelpDocument(addReferenceLink("R2DBC Module", "r2dbc")),
onDependencies("data-redis", "data-redis-reactive").customizeBuild(addDependency("redis"))
.customizeHelpDocument(addReferenceLink("Redis Module", "redis")),
onDependencies("geode").customizeBuild(addDependency("gemfire"))
.customizeHelpDocument(addReferenceLink("Apache Geode Module", "gemfire")),
onDependencies("integration").customizeBuild(addDependency("test", DependencyScope.TEST_COMPILE))
.customizeHelpDocument(addReferenceLink("Test Module", "testing")),
onDependencies("kafka", "kafka-streams").customizeBuild(addDependency("kafka"))
.customizeHelpDocument(addReferenceLink("Apache Kafka Module", "kafka")),
onDependencies("mail").customizeBuild(addDependency("mail"))
.customizeHelpDocument(addReferenceLink("Mail Module", "mail")),
onDependencies("rsocket").customizeBuild(addDependency("rsocket"))
.customizeHelpDocument(addReferenceLink("RSocket Module", "rsocket")),
onDependencies("security").customizeBuild(addDependency("security"))
.customizeHelpDocument(addReferenceLink("Security Module", "security")),
onDependencies("web").customizeBuild(addDependency("http"))
.customizeHelpDocument(addReferenceLink("HTTP Module", "http")),
onDependencies("webflux").customizeBuild(addDependency("webflux"))
.customizeHelpDocument(addReferenceLink("WebFlux Module", "webflux")),
onDependencies("websocket").customizeBuild(addDependency("stomp").andThen(addDependency("websocket")))
.customizeHelpDocument(addReferenceLink("STOMP Module", "stomp")
.andThen(addReferenceLink("WebSocket Module", "websocket"))),
onDependencies("web-services").customizeBuild(addDependency("ws"))
.customizeHelpDocument(addReferenceLink("Web Services Module", "ws")));
}
private static Iterable<ImplicitDependency> create(ImplicitDependency.Builder... dependencies) {
return Arrays.stream(dependencies).map(Builder::build).collect(Collectors.toList());
}
private static ImplicitDependency.Builder onDependencies(String... dependencyIds) {
return new Builder().matchAnyDependencyIds(dependencyIds);
}
private static Consumer<Build> addDependency(String id) {
return addDependency(id, DependencyScope.COMPILE);
}
private static Consumer<Build> addDependency(String id, DependencyScope scope) {
return (build) -> build.dependencies().add("integration-" + id,
Dependency.withCoordinates("org.springframework.integration", "spring-integration-" + id).scope(scope));
}
private static Consumer<HelpDocument> addReferenceLink(String name, String id) {
return (helpDocument) -> {
String href = String.format("https://docs.spring.io/spring-integration/reference/html/%s.html", id);
String description = String.format("Spring Integration %s Reference Guide", name);
helpDocument.gettingStarted().addReferenceDocLink(href, description);
};
}
}
|
export const user = JSON.stringify({
query: '{user(id: 1) {login}}'
});
export const users = JSON.stringify({
query: '{users {login}}'
});
export const event = JSON.stringify({
query: '{event(id: 1) {title}}'
});
export const events = JSON.stringify({
query: '{events {title}}'
});
export const room = JSON.stringify({
query: '{room(id: 1) {title}}'
});
export const rooms = JSON.stringify({
query: '{rooms {title}}'
});
|
#!/bin/bash
set -ev
if [ -z ${LOCAL_PKG+x} ] || [ -z "$LOCAL_PKG" ]; then
echo "LOCAL_PKG is not set. Aborting..."
exit 1
fi
echo "=== Updating the build environment in $LOCAL_PKG ==="
#echo "=== Installing from external package sources ==="
#wget -O - http://apt.llvm.org/llvm-snapshot.gpg.key|sudo apt-key add -
#echo "deb http://apt.llvm.org/trusty/ llvm-toolchain-trusty-3.9 main" | sudo tee -a /etc/apt/sources.list
#sudo add-apt-repository -y ppa:lttng/ppa
#sudo apt-get update -qq
#sudo apt-get install -y clang-3.9 clang-tidy-3.9
echo "=== Installing python packages ==="
pip install --user cpp-coveralls
# Increase the environment version to force a rebuild of the packages
# The version is writen to the cache file after every build of the dependencies
ENV_VERSION="1"
ENV_INSTALLED=""
if [ -e $LOCAL_PKG/.build_env ]; then
echo "=== No cached build environment ==="
read -r ENV_INSTALLED < $LOCAL_PKG/.build_env
fi
# travis caches the $LOCAL_PKG dir. If it is loaded, we don't need to reinstall the packages
if [ "$ENV_VERSION" = "$ENV_INSTALLED" ]; then
echo "=== The build environment is current ==="
# Print version numbers
clang --version
g++ --version
cppcheck --version
valgrind --version
exit 0
fi
echo "=== The build environment is outdated ==="
# Clean up
# additional safety measure to avoid rm -rf on root
# only execute it on travis
if ! [ -z ${TRAVIS+x} ]; then
rm -rf $LOCAL_PKG/*
fi
# create cached flag
echo "=== Store cache flag ==="
echo $ENV_VERSION > $LOCAL_PKG/.build_env
# Print version numbers
echo "=== Installed versions are ==="
clang --version
g++ --version
cppcheck --version
valgrind --version
|
<gh_stars>0
#include "fastfetch.h"
#define FF_LOCALIP_MODULE_NAME "Local IP"
#define FF_LOCALIP_NUM_FORMAT_ARGS 1
#include <sys/types.h>
#include <ifaddrs.h>
#include <netinet/in.h>
#include <string.h>
#include <arpa/inet.h>
static void printValue(FFinstance* instance, const char* ifaName, const char* addressBuffer)
{
FF_STRBUF_CREATE(key);
if (instance->config.localIpKey.length == 0) {
ffStrbufSetF(&key, FF_LOCALIP_MODULE_NAME " (%s)", ifaName);
} else {
ffParseFormatString(&key, &instance->config.localIpKey, NULL, 1, (FFformatarg[]){
{FF_FORMAT_ARG_TYPE_STRING, ifaName}
});
}
if (instance->config.localIpFormat.length == 0) {
ffPrintLogoAndKey(instance, FF_LOCALIP_MODULE_NAME, 0, &key);
puts(addressBuffer);
} else {
ffPrintFormatString(instance, FF_LOCALIP_MODULE_NAME, 0, &key, &instance->config.localIpFormat, NULL, FF_LOCALIP_NUM_FORMAT_ARGS, (FFformatarg[]){
{FF_FORMAT_ARG_TYPE_STRING, addressBuffer}
});
}
ffStrbufDestroy(&key);
}
void ffPrintLocalIp(FFinstance* instance)
{
struct ifaddrs* ifAddrStruct = NULL;
int ret = getifaddrs(&ifAddrStruct);
if (ret < 0) {
ffPrintError(instance, FF_LOCALIP_MODULE_NAME, 0, &instance->config.localIpKey, &instance->config.localIpFormat, FF_LOCALIP_NUM_FORMAT_ARGS, "getifaddrs(&ifAddrStruct) < 0 (%i)", ret);
return;
}
for (struct ifaddrs* ifa = ifAddrStruct; ifa != NULL; ifa = ifa->ifa_next) {
if (!ifa->ifa_addr)
continue;
// loop back
if (strcmp(ifa->ifa_name, "lo") == 0 && !instance->config.localIpShowLoop)
continue;
if (ifa->ifa_addr->sa_family == AF_INET) { // check it is IP4
// is a valid IP4 Address
if (!instance->config.localIpShowIpV4)
continue;
void* tmpAddrPtr=&((struct sockaddr_in *)ifa->ifa_addr)->sin_addr;
char addressBuffer[INET_ADDRSTRLEN];
inet_ntop(AF_INET, tmpAddrPtr, addressBuffer, INET_ADDRSTRLEN);
printValue(instance, ifa->ifa_name, addressBuffer);
} else if (ifa->ifa_addr->sa_family == AF_INET6) { // check it is IP6
// is a valid IP6 Address
if (!instance->config.localIpShowIpV6)
continue;
void* tmpAddrPtr=&((struct sockaddr_in6 *)ifa->ifa_addr)->sin6_addr;
char addressBuffer[INET6_ADDRSTRLEN];
inet_ntop(AF_INET6, tmpAddrPtr, addressBuffer, INET6_ADDRSTRLEN);
printValue(instance, ifa->ifa_name, addressBuffer);
}
}
if (ifAddrStruct) freeifaddrs(ifAddrStruct);
}
|
<gh_stars>1-10
package com.abubusoft.kripton.examplea0.data.model;
import java.util.Date;
public class SMS {
public Date date;
public String from;
public String message;
public String to;
public SMS(String paramString1, String paramString2, String paramString3,Date paramDate) {
this.from = paramString1;
this.to = paramString2;
this.message = paramString3;
this.date = paramDate;
}
} |
#!/bin/bash
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
if ! [[ "$2" =~ ^(git@)?(www.)?github.com(:|/)amfeedpay/amfeed(.git)?$ ]]; then
exit 0
fi
while read LINE; do
set -- A $LINE
if [ "$4" != "refs/heads/master" ]; then
continue
fi
if ! ./contrib/verify-commits/verify-commits.sh $3 > /dev/null 2>&1; then
echo "ERROR: A commit is not signed, can't push"
./contrib/verify-commits/verify-commits.sh
exit 1
fi
done < /dev/stdin
|
package org.jaudiotagger.tag.id3.framebody;
import org.jaudiotagger.AbstractTestCase;
import org.jaudiotagger.tag.id3.ID3v23Frames;
import org.jaudiotagger.tag.id3.valuepair.TextEncoding;
/**
* Test TSOTFrameBody
*/
public class FrameBodyXSOTTest extends AbstractTestCase
{
public static final String TITLE_SORT = "titlesort";
public static FrameBodyXSOT getInitialisedBody()
{
FrameBodyXSOT fb = new FrameBodyXSOT();
fb.setText(FrameBodyXSOTTest.TITLE_SORT);
return fb;
}
public void testCreateFrameBody()
{
Exception exceptionCaught = null;
FrameBodyXSOT fb = null;
try
{
fb = new FrameBodyXSOT(TextEncoding.ISO_8859_1, FrameBodyXSOTTest.TITLE_SORT);
}
catch (Exception e)
{
exceptionCaught = e;
}
assertNull(exceptionCaught);
assertEquals(ID3v23Frames.FRAME_ID_V3_TITLE_SORT_ORDER_MUSICBRAINZ, fb.getIdentifier());
assertEquals(TextEncoding.ISO_8859_1, fb.getTextEncoding());
assertEquals(FrameBodyXSOTTest.TITLE_SORT, fb.getText());
}
public void testCreateFrameBodyEmptyConstructor()
{
Exception exceptionCaught = null;
FrameBodyXSOT fb = null;
try
{
fb = new FrameBodyXSOT();
fb.setText(FrameBodyXSOTTest.TITLE_SORT);
}
catch (Exception e)
{
exceptionCaught = e;
}
assertNull(exceptionCaught);
assertEquals(ID3v23Frames.FRAME_ID_V3_TITLE_SORT_ORDER_MUSICBRAINZ, fb.getIdentifier());
assertEquals(TextEncoding.ISO_8859_1, fb.getTextEncoding());
assertEquals(FrameBodyXSOTTest.TITLE_SORT, fb.getText());
}
}
|
#!/usr/bin/bash
# Copyright (c) 2021. Huawei Technologies Co.,Ltd.ALL rights reserved.
# This program is licensed under Mulan PSL v2.
# You can use it according to the terms and conditions of the Mulan PSL v2.
# http://license.coscl.org.cn/MulanPSL2
# THIS PROGRAM IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
# See the Mulan PSL v2 for more details.
# #############################################
# @Author : wenjun
# @Contact : 1009065695@qq.com
# @Date : 2021/10/25
# @License : Mulan PSL v2
# @Desc : verify the uasge of ntfscat ntfs-3g.probe command
# ############################################
source "common/common_lib.sh"
function pre_test() {
LOG_INFO "Start to prepare the test environment."
get_disk
LOG_INFO "Finish preparing the test environment."
}
function run_test() {
LOG_INFO "Start to run test."
ntfscat --help 2>&1 | grep "Usage: ntfscat \[options\]"
CHECK_RESULT $? 0 0 "Check ntfscat --help failed."
ntfscat --version 2>&1 | grep "ntfscat v"
CHECK_RESULT $? 0 0 "Check ntfscat --version failed."
ntfscat /dev/${disk1} --inode 5 --attribute INDEX_ROOT
CHECK_RESULT $? 0 0 "Check ntfscat --attribute failed."
ntfscat /dev/${disk1} --inode 15 --attribute-name ""
CHECK_RESULT $? 0 0 "Check ntfscat --attribute-name failed."
ntfscat /dev/${disk1} --inode 4 --force
CHECK_RESULT $? 0 0 "Check ntfscat --force failed."
ntfscat /dev/${disk1} --inode 4 --quiet
CHECK_RESULT $? 0 0 "Check ntfscat --quiet failed."
ntfscat /dev/${disk1} --inode 4 --verbose
CHECK_RESULT $? 0 0 "Check ntfscat --verbose failed."
ntfs-3g.probe --help 2>&1 | grep "Usage:.*ntfs-3g.probe"
CHECK_RESULT $? 0 0 "Check ntfs-3g.probe --help failed."
ntfs-3g.probe --readonly /dev/${disk1}
CHECK_RESULT $? 0 0 "Check ntfs-3g.probe --readonly failed."
ntfs-3g.probe --readwrite /dev/${disk1}
CHECK_RESULT $? 0 0 "Check ntfscat ntfs-3g.probe --readwrite failed."
LOG_INFO "End of the test."
}
function post_test() {
LOG_INFO "Start to restore the test environment."
expect <<EOF
spawn mkfs.ext4 /dev/${disk1}
send "y\n"
expect eof
EOF
DNF_REMOVE
LOG_INFO "Finish restoring the test environment."
}
main $@
|
import _ from 'lodash';
import * as dotenv from 'dotenv';
import * as winston from 'winston';
if (Object.prototype.hasOwnProperty.call(process.env, 'GDAL_DATA')) {
winston.warn('Found a GDAL_DATA environment variable. This is usually from an external GDAL '
+ 'installation and can interfere with CRS parsing in Harmony, so we will ignore it. '
+ 'If you need to override the GDAL_DATA location for Harmony, provide a GDAL_DATA key in '
+ 'your .env file.');
delete process.env.GDAL_DATA;
}
if (dotenv.config().error) {
winston.warn('Did not read a .env file');
}
interface HarmonyEnv {
logLevel: string;
stagingBucket: string;
artifactBucket: string;
maxSynchronousGranules: number;
maxGranuleLimit: number;
objectStoreType: string;
awsDefaultRegion: string;
sameRegionAccessRole: string;
maxPostFields: number;
maxPostFileSize: number;
maxPostFileParts: number;
nodeEnv: string;
adminGroupId: string;
harmonyClientId: string;
isDevelopment: boolean;
uploadBucket: string;
argoUrl: string;
cmrEndpoint: string;
oauthHost: string;
oauthUid: string;
useLocalstack: boolean;
localstackHost: string;
callbackUrlRoot: string;
syncRequestPollIntervalMs: number;
defaultImagePullPolicy: string;
cmrGranuleLocatorImagePullPolicy: string;
sharedSecretKey: string;
defaultBatchSize: number;
defaultParallelism: number;
jobReaperPeriodSec: number;
reapableJobAgeMinutes: number;
defaultArgoPodTimeoutSecs: number;
builtInTaskPrefix: string;
builtInTaskVersion: string;
cmrMaxPageSize: number;
}
const envVars: HarmonyEnv = {} as HarmonyEnv;
/**
* Add a symbol to module.exports with an appropriate value. The exported symbol will be in
* camel case, e.g., `maxPostFileSize`. This approach has the drawback that these
* config variables don't show up in VS Code autocomplete, but the reduction in repeated
* boilerplate code is probably worth it.
*
* @param envName - The environment variable corresponding to the config variable in
* CONSTANT_CASE form
* @param defaultValue - The value to use if the environment variable is not set. Only strings
* and integers are supported
*/
function makeConfigVar(envName: string, defaultValue?: string | number): void {
const envValue = process.env[envName];
let value;
if (!envValue) {
value = defaultValue;
} else if (typeof defaultValue === 'number') {
value = parseInt(envValue, 10);
} else {
value = envValue;
}
envVars[_.camelCase(envName)] = value;
}
// create exported config variables
[
// ENV_VAR, DEFAULT_VALUE
['ARGO_URL', 'http://localhost:4276'],
['LOG_LEVEL', 'debug'],
['STAGING_BUCKET', 'local-staging-bucket'],
['ARTIFACT_BUCKET', 'local-artifact-bucket'],
['MAX_SYNCHRONOUS_GRANULES', 1],
['MAX_GRANULE_LIMIT', 350],
['OBJECT_STORE_TYPE', 's3'],
['AWS_DEFAULT_REGION', 'us-west-2'],
['SAME_REGION_ACCESS_ROLE'],
['JOB_REAPER_PERIOD_SEC', 360],
['REAPABLE_JOB_AGE_MINUTES', 60],
['LOCALSTACK_HOST', 'localhost'],
// shapefile upload related configs
['MAX_POST_FIELDS', 100],
['MAX_POST_FILE_SIZE', 2000000000],
['MAX_POST_FILE_PARTS', 100],
['NODE_ENV', 'development'],
['ADMIN_GROUP_ID', null],
['CMR_ENDPOINT', 'https://cmr.uat.earthdata.nasa.gov'],
['OAUTH_HOST', 'https://uat.urs.earthdata.nasa.gov'],
['OAUTH_UID', null],
['CALLBACK_URL_ROOT', null],
['SYNC_REQUEST_POLL_INTERVAL_MS', 100],
['DEFAULT_BATCH_SIZE', 2000],
['DEFAULT_IMAGE_PULL_POLICY', 'Always'],
['CMR_GRANULE_LOCATOR_IMAGE_PULL_POLICY', 'Always'],
['DEFAULT_PARALLELISM', 2],
['SHARED_SECRET_KEY', null],
['DEFAULT_ARGO_POD_TIMEOUT_SECS', 14400],
['BUILT_IN_TASK_PREFIX', ''],
['BUILT_IN_TASK_VERSION', 'latest'],
['CMR_MAX_PAGE_SIZE', 2000],
['FEEDBACK_URL', null],
].forEach((value) => makeConfigVar.apply(this, value));
// special cases
envVars.harmonyClientId = process.env.CLIENT_ID || 'harmony-unknown';
envVars.isDevelopment = process.env.NODE_ENV === 'development';
envVars.uploadBucket = process.env.UPLOAD_BUCKET || process.env.STAGING_BUCKET || 'local-staging-bucket';
envVars.useLocalstack = process.env.USE_LOCALSTACK === 'true';
export = envVars;
|
export {};
declare global {
interface Array<T> {
/**
* [拡張メソッド]
* 配列の重複を除去します。
* @return 重複除去後の配列
*/
distinct(): T[];
/**
* [拡張メソッド]
* 判定対象を比較して配列から重複を除去します。
* @param keySelector 重複判定対象
* @return 重複除去後の配列
*/
distinctBy<K>(keySelector?: (obj: T) => K): T[];
}
}
Array.prototype.distinct = function <T>(): T[] {
const items = this as T[];
if (!Array.isArray(items) || items.length === 0) return [];
return items.filter((item, index, array) => array.indexOf(item) === index);
};
Array.prototype.distinctBy = function <T, K>(keySelector?: (obj: T) => K): T[] {
const items = this as T[];
if (!Array.isArray(items) || items.length === 0) return [];
if (!keySelector) return items.distinct();
const keys = items.map((item) => keySelector(item));
return items.filter((item, index) => keys.indexOf(keySelector(item)) === index);
};
|
<reponame>jeanfredrik/boardgame.io
/*
* Copyright 2018 The boardgame.io Authors
*
* Use of this source code is governed by a MIT-style
* license that can be found in the LICENSE file or at
* https://opensource.org/licenses/MIT.
*/
import React from 'react';
import Token from './token';
import Enzyme from 'enzyme';
import Adapter from 'enzyme-adapter-react-16';
Enzyme.configure({ adapter: new Adapter() });
test('animation', () => {
const token = Enzyme.shallow(
<Token x={1} y={2} animate={true}>
<p>foo</p>
</Token>
);
token.setState({
...token.state(),
originX: 0,
originY: 0,
originTime: 0,
});
const inst = token.instance();
inst._animate(150)();
expect(token.state('x')).toBeCloseTo(0.032, 3);
expect(token.state('y')).toBeCloseTo(0.064, 3);
inst._animate(375)();
expect(token.state('x')).toEqual(0.5);
expect(token.state('y')).toEqual(1.0);
inst._animate(600)();
expect(token.state('x')).toBeCloseTo(0.968, 3);
expect(token.state('y')).toBeCloseTo(1.936, 3);
inst._animate(1000)();
expect(token.state('x')).toEqual(1);
expect(token.state('y')).toEqual(2);
});
test('props change', () => {
const token = Enzyme.shallow(
<Token x={1} y={2} animate={true}>
<p>foo</p>
</Token>
);
token.setProps({ x: 0, y: 2 });
expect(token.state('originX')).toEqual(1);
expect(token.state('originY')).toEqual(2);
});
test('debounce', () => {
const token = Enzyme.shallow(
<Token x={1} y={2} animate={true}>
<p>foo</p>
</Token>
);
token.setProps({ x: 0, y: 2 });
expect(token.state('originX')).toEqual(1);
expect(token.state('originY')).toEqual(2);
token.setProps({ x: 0, y: 2 });
expect(token.state('originX')).toEqual(1);
expect(token.state('originY')).toEqual(2);
});
test('click handler', () => {
const onClick = jest.fn();
const token = Enzyme.mount(
<Token x={0} y={0} animate={false} onClick={onClick}>
<p>foo</p>
</Token>
);
token.simulate('click');
expect(onClick).toHaveBeenCalled();
});
test('mouse over handler', () => {
const onMouseOver = jest.fn();
const token = Enzyme.mount(
<Token x={0} y={0} animate={false} onMouseOver={onMouseOver}>
<p>foo</p>
</Token>
);
token.simulate('mouseOver');
expect(onMouseOver).toHaveBeenCalled();
});
test('mouse out handler', () => {
const onMouseOut = jest.fn();
const token = Enzyme.mount(
<Token x={0} y={0} animate={false} onMouseOut={onMouseOut}>
<p>foo</p>
</Token>
);
token.simulate('mouseOut');
expect(onMouseOut).toHaveBeenCalled();
});
|
package oidc.management.service;
import oidc.management.model.UserAccount;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import java.util.List;
import java.util.Optional;
/**
* User account service.
*
* @author <NAME>
* @since 27-03-2022
* @see UserAccount
* @see oidc.management.service.impl.DefaultUserAccountService
*/
public interface UserAccountService {
public UserAccount.UserAccountBuilder entityBuilder();
/**
* Find all the {@link UserAccount}s.
*
* @return List of {@link UserAccount}s.
*/
public List<UserAccount> findAll();
/**
* Find a page of @{@link UserAccount}s.
*
* @param pageable Pageable object.
* @param search Search string.
* @return Page of {@link UserAccount}s.
*/
public Page<UserAccount> findAll(Pageable pageable, String search);
/**
* Find a {@link UserAccount} by its id.
*
* @param id The {@link UserAccount} id.
* @return An {@link Optional} containing the {@link UserAccount} if it
* exists, or an empty {@link Optional} if it does not.
*/
public Optional<UserAccount> findById(String id);
/**
* Find a user account by its username.
*
* @param username Username.
* @return An {@link Optional} containing the {@link UserAccount} if it
* exists, or an empty {@link Optional} if it does not.
*/
public Optional<UserAccount> findByUsername(String username);
/**
* Save a {@link UserAccount}.
*
* @param userAccount The {@link UserAccount} to save.
* @return The saved {@link UserAccount}.
*/
public UserAccount save(UserAccount userAccount);
/**
* Deletes a {@link UserAccount} by it's {@link String} id.
*
* @param id The {@link UserAccount} id.
*/
public void deleteById(String id);
}
|
import abs from "./abs";
import byteFmt from "./byte-fmt";
import degrees from "./degrees";
import kbFmt from "./kb-fmt";
import max from "./max";
import min from "./min";
import percent from "./percent";
import radians from "./radians";
import radix from "./radix";
import shortFmt from "./short-fmt";
import sum from "./sum";
export {
abs,byteFmt,degrees,kbFmt,
max,min,percent,radians,radix,shortFmt,sum
}
|
<filename>src/personalfinance/gui/table/TransactionTableData.java
package personalfinance.gui.table;
import personalfinance.gui.table.model.TransactionTableModel;
import personalfinance.gui.table.renderer.MainTableCellRenderer;
import personalfinance.settings.Style;
import personalfinance.settings.Text;
import javax.swing.*;
import java.awt.*;
public class TransactionTableData extends TableData{
private static final String[] columns = new String[]{"DATE", "ACCOUNT", "ARTICLE", "AMOUNT", "NOTICE"};
private static final ImageIcon[] icons = new ImageIcon[]{Style.ICON_DATE, Style.ICON_ACCOUNT, Style.ICON_ARTICLE, Style.ICON_AMOUNT, Style.ICON_NOTICE};
public TransactionTableData() {
super(new TransactionTableModel(columns), columns, icons);
init();
}
public TransactionTableData(int count) {
super(new TransactionTableModel(columns, count), columns, icons);
init();
}
@Override
protected final void init() {
getColumn(Text.get("AMOUNT")).setCellRenderer(new TableCellAmountRenderer());
}
private class TableCellAmountRenderer extends MainTableCellRenderer {
@Override
public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) {
Component renderer = super.getTableCellRendererComponent(table, value, isSelected, hasFocus, row, column);
if (( value.toString()).contains("-")) renderer.setForeground(Style.COLOR_EXP);
else renderer.setForeground(Style.COLOR_INCOME);
return renderer;
}
}
}
|
<reponame>jjjdewan/leaflet-challenge<filename>static/js/config.js
// API key www.mapbox.com
// const API_KEY = "<KEY>";
const API_KEY = "<KEY>";
|
package com.yamatokataoka.xroaddrive.api.repository;
import com.yamatokataoka.xroaddrive.api.domain.Metadata;
import org.springframework.data.mongodb.repository.MongoRepository;
public interface MetadataRepository extends MongoRepository<Metadata, String> {} |
#!/bin/sh
# Automatic build script for libssl and libcrypto
# for iPhoneOS and iPhoneSimulator
#
# Created by Felix Schulze on 16.12.10.
# Copyright 2010 Felix Schulze. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
# Change values here #
# #
VERSION="1.0.1e" #
SDKVERSION="6.1" #
# #
###########################################################################
# #
# Don't change anything under this line! #
# #
###########################################################################
CURRENTPATH=`pwd`
ARCHS="i386 armv7 armv7s"
DEVELOPER=`xcode-select -print-path`
if [ ! -d "$DEVELOPER" ]; then
echo "xcode path is not set correctly $DEVELOPER does not exist (most likely because of xcode > 4.3)"
echo "run"
echo "sudo xcode-select -switch <xcode path>"
echo "for default installation:"
echo "sudo xcode-select -switch /Applications/Xcode.app/Contents/Developer"
exit 1
fi
set -e
if [ ! -e openssl-${VERSION}.tar.gz ]; then
echo "Downloading openssl-${VERSION}.tar.gz"
curl -O http://www.openssl.org/source/openssl-${VERSION}.tar.gz
else
echo "Using openssl-${VERSION}.tar.gz"
fi
mkdir -p "${CURRENTPATH}/src"
mkdir -p "${CURRENTPATH}/bin"
mkdir -p "${CURRENTPATH}/lib"
tar zxf openssl-${VERSION}.tar.gz -C "${CURRENTPATH}/src"
cd "${CURRENTPATH}/src/openssl-${VERSION}"
for ARCH in ${ARCHS}
do
if [ "${ARCH}" == "i386" ];
then
PLATFORM="iPhoneSimulator"
else
sed -ie "s!static volatile sig_atomic_t intr_signal;!static volatile intr_signal;!" "crypto/ui/ui_openssl.c"
PLATFORM="iPhoneOS"
fi
export CROSS_TOP="${DEVELOPER}/Platforms/${PLATFORM}.platform/Developer"
export CROSS_SDK="${PLATFORM}${SDKVERSION}.sdk"
echo "Building openssl-${VERSION} for ${PLATFORM} ${SDKVERSION} ${ARCH}"
echo "Please stand by..."
export CC="${CROSS_TOP}/usr/bin/gcc -arch ${ARCH}"
mkdir -p "${CURRENTPATH}/bin/${PLATFORM}${SDKVERSION}-${ARCH}.sdk"
LOG="${CURRENTPATH}/bin/${PLATFORM}${SDKVERSION}-${ARCH}.sdk/build-openssl-${VERSION}.log"
if [[ "$VERSION" =~ 1.0.0. ]]; then
./Configure BSD-generic32 --openssldir="${CURRENTPATH}/bin/${PLATFORM}${SDKVERSION}-${ARCH}.sdk" > "${LOG}" 2>&1
else
./Configure iphoneos-cross --openssldir="${CURRENTPATH}/bin/${PLATFORM}${SDKVERSION}-${ARCH}.sdk" > "${LOG}" 2>&1
fi
# add -isysroot to CC=
sed -ie "s!^CFLAG=!CFLAG=-isysroot ${CROSS_TOP}/SDKs/${CROSS_SDK} !" "Makefile"
make >> "${LOG}" 2>&1
make install >> "${LOG}" 2>&1
make clean >> "${LOG}" 2>&1
done
echo "Build library..."
lipo -create ${CURRENTPATH}/bin/iPhoneSimulator${SDKVERSION}-i386.sdk/lib/libssl.a ${CURRENTPATH}/bin/iPhoneOS${SDKVERSION}-armv7.sdk/lib/libssl.a ${CURRENTPATH}/bin/iPhoneOS${SDKVERSION}-armv7s.sdk/lib/libssl.a -output ${CURRENTPATH}/lib/libssl.a
lipo -create ${CURRENTPATH}/bin/iPhoneSimulator${SDKVERSION}-i386.sdk/lib/libcrypto.a ${CURRENTPATH}/bin/iPhoneOS${SDKVERSION}-armv7.sdk/lib/libcrypto.a ${CURRENTPATH}/bin/iPhoneOS${SDKVERSION}-armv7s.sdk/lib/libcrypto.a -output ${CURRENTPATH}/lib/libcrypto.a
mkdir -p ${CURRENTPATH}/include
cp -R ${CURRENTPATH}/bin/iPhoneSimulator${SDKVERSION}-i386.sdk/include/openssl ${CURRENTPATH}/include/
echo "Building done."
echo "Cleaning up..."
rm -rf ${CURRENTPATH}/src/openssl-${VERSION}
echo "Done."
|
export { default } from 'ember-facial-recognition/models/mcs-face';
|
package com.example.batchforscience.service;
import java.math.BigDecimal;
public interface ClientService {
BigDecimal getDebt(Long clientId);
long getNumberOfCurrentOrders(Long clientId);
}
|
exit ()
{
if [ -n "$_CLEANUP_TEST" ]; then
rm -f "$WORKSPACE_ROOT/.testing" 2>/dev/null
rm -f "$WORKSPACE_ROOT/.testing.$$" 2>/dev/null
fi
if [ -n "$_SYSLOG_TRACE_PID" ]; then
case "$DISTRIBUTION" in
debian | \
ubuntu | \
redhat | \
centos | \
sl | \
opensuse | \
suse | \
freebsd | \
netbsd | \
openbsd | \
sunos )
kill -TERM "$_SYSLOG_TRACE_PID" 2>/dev/null &&
{
wait "$_SYSLOG_TRACE_PID"
unset _SYSLOG_TRACE_PID
}
;;
esac
fi
builtin exit $*
}
append_path ()
{
if [ -d "$1" ]; then
if [ -n "$PATH" ]; then
echo "$PATH" | $GREP -q -- "$1:" 2>/dev/null && return;
echo "$PATH" | $GREP -q -- "$1\$" 2>/dev/null && return;
PATH="$PATH:$1"
else
PATH="$1"
fi
export PATH
fi
}
prepend_path ()
{
if [ -d "$1" ]; then
if [ -n "$PATH" ]; then
echo "$PATH" | $GREP -q -- "$1:" 2>/dev/null && return;
echo "$PATH" | $GREP -q -- "$1\$" 2>/dev/null && return;
PATH="$1:$PATH"
else
PATH="$1"
fi
export PATH
fi
}
append_cflags ()
{
if [ -n "$1" ]; then
if [ -n "$CFLAGS" ]; then
echo "$CFLAGS" | $GREP -q -- "$1 " 2>/dev/null && return;
echo "$CFLAGS" | $GREP -q -- "$1\$" 2>/dev/null && return;
CFLAGS="$CFLAGS $1"
else
CFLAGS="$1"
fi
export CFLAGS
fi
}
append_cppflags ()
{
if [ -n "$1" ]; then
if [ -n "$CPPFLAGS" ]; then
echo "$CPPFLAGS" | $GREP -q -- "$1 " 2>/dev/null && return;
echo "$CPPFLAGS" | $GREP -q -- "$1\$" 2>/dev/null && return;
CPPFLAGS="$CPPFLAGS $1"
else
CPPFLAGS="$1"
fi
export CPPFLAGS
fi
}
append_ldflags ()
{
if [ -n "$1" ]; then
if [ -n "$LDFLAGS" ]; then
echo "$LDFLAGS" | $GREP -q -- "$1 " 2>/dev/null && return;
echo "$LDFLAGS" | $GREP -q -- "$1\$" 2>/dev/null && return;
LDFLAGS="$LDFLAGS $1"
else
LDFLAGS="$1"
fi
export LDFLAGS
fi
}
append_ld_library_path ()
{
if [ -d "$1" ]; then
if [ -n "$LD_LIBRARY_PATH" ]; then
echo "$LD_LIBRARY_PATH" | $GREP -q -- "$1:" 2>/dev/null && return;
echo "$LD_LIBRARY_PATH" | $GREP -q -- "$1\$" 2>/dev/null && return;
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$1"
else
LD_LIBRARY_PATH="$1"
fi
export LD_LIBRARY_PATH
fi
}
find_jenkins_workspace_root ()
{
if [ -z "$WORKSPACE" -o ! -d "$WORKSPACE" ]; then
echo "find_jenkins_workspace_root: Unable to find workspace root since no WORKSPACE has been defined" >&2
return 1
fi
local workspace="$WORKSPACE"
local max_iter=20
local currdir
while [ "$max_iter" -gt 0 ] 2>/dev/null; do
# check if the last dir on the path is workspace
currdir=`echo "$workspace" | sed 's%.*/%%' 2>/dev/null`
if [ "$currdir" = "workspace" ]; then
break
fi
# remove the last dir on the path
workspace=`echo "$workspace" | sed 's%/[^/]*$%%' 2>/dev/null`
max_iter=$(( max_iter - 1))
done
if [ -n "$workspace" -a "$max_iter" -gt 0 ] 2>/dev/null; then
WORKSPACE_ROOT="$workspace"
return 0
fi
echo "find_jenkins_workspace_root: Failed to find workspace root in WORKSPACE=$WORKSPACE" >&2
return 1
}
find_program ()
{
if [ -n "$1" ]; then
local path=`which "$1" 2>/dev/null`
if [ -n "$path" -a -x "$path" ]; then
echo "$path"
return 0
fi
fi
return 1
}
find_make ()
{
local make
local program
local programs="gmake make"
case "$DISTRIBUTION" in
freebsd )
programs="gmake"
;;
esac
for program in $programs; do
make=`find_program "$program"`
if [ -n "$make" ]; then
export MAKE="$make"
return 0
fi
done
return 1
}
find_m4 ()
{
local m4
local program
for program in gm4 m4; do
m4=`find_program "$program"`
if [ -n "$m4" ]; then
export M4="$m4"
return 0
fi
done
return 1
}
find_md5sum ()
{
local md5sum
local program
for program in md5sum gmd5sum md5; do
md5sum=`find_program "$program"`
if [ -n "$md5sum" ]; then
MD5SUM="$md5sum"
case "$program" in
md5)
MD5SUM="$MD5SUM -q"
;;
esac
export MD5SUM
return 0
fi
done
return 1
}
find_sha1sum ()
{
local shasum
local program
for program in sha1sum gsha1sum shasum sha1; do
shasum=`find_program "$program"`
if [ -n "$shasum" ]; then
SHA1SUM="$shasum"
case "$program" in
shasum)
SHA1SUM="$SHA1SUM -a 1"
;;
sha1)
SHA1SUM="$SHA1SUM -q"
;;
esac
export SHA1SUM
return 0
fi
done
return 1
}
find_sha256sum ()
{
local sha256sum
local program
for program in sha256sum gsha256sum shasum sha256; do
sha256sum=`find_program "$program"`
if [ -n "$sha256sum" ]; then
SHA256SUM="$shasum"
case "$program" in
shasum)
SHA256SUM="$SHA256SUM -a 256"
;;
sha256)
SHA256SUM="$SHA256SUM -q"
;;
esac
export SHA256SUM
return 0
fi
done
return 1
}
find_wget ()
{
local wget
local program
for program in wget; do
wget=`find_program "$program"`
if [ -n "$wget" ]; then
export WGET="$wget"
return 0
fi
done
return 1
}
find_ccache ()
{
local ccache
local program
local path
for program in ccache; do
ccache=`find_program "$program"`
if [ -n "$ccache" ]; then
export CCACHE="$ccache"
for path in /usr/lib64/ccache /usr/lib/ccache /usr/local/lib64/ccache /usr/local/lib/ccache; do
if [ -d "$path" ]; then
prepend_path "$path"
break
fi
done
return 0
fi
done
return 1
}
find_cc ()
{
local cc
local program
for program in cc gcc; do
cc=`find_program "$program"`
if [ -n "$cc" ]; then
export CC="$cc"
return 0
fi
done
return 1
}
find_cxx ()
{
local cxx
local program
for program in c++ g++; do
cxx=`find_program "$program"`
if [ -n "$cxx" ]; then
export CXX="$cxx"
return 0
fi
done
return 1
}
find_tee ()
{
local tee
local program
for program in tee; do
tee=`find_program "$program"`
if [ -n "$tee" ]; then
export TEE="$tee"
return 0
fi
done
return 1
}
find_date ()
{
local date
local program
local time_now
for program in date; do
date=`find_program "$program"`
if [ -n "$date" ]; then
time_now=`$date '+%s' 2>/dev/null`
if [ -n "$time_now" -a "$time_now" -gt 0 ] 2>/dev/null; then
export DATE="$date"
return 0
fi
fi
done
return 1
}
find_tail ()
{
local tail
local tail_follow
local program
local programs="tail"
case "$DISTRIBUTION" in
sunos )
programs="gtail"
;;
esac
for program in $programs; do
tail=`find_program "$program"`
if [ -n "$tail" ]; then
break
fi
done
if [ -z "$tail" ]; then
return 1
fi
case "$DISTRIBUTION" in
debian | \
ubuntu | \
redhat | \
centos | \
sl | \
opensuse | \
suse | \
sunos )
tail_follow="$tail --follow=name -n 0"
;;
freebsd | \
netbsd )
tail_follow="$tail -f -F -n 0"
;;
openbsd )
tail_follow="$tail -f -n 0"
;;
esac
if [ -z "$tail_follow" ]; then
return 1
fi
export TAIL="$tail"
export TAIL_FOLLOW="$tail_follow"
return 0
}
find_grep ()
{
local grep
local program
for program in ggrep grep; do
grep=`find_program "$program"`
if [ -n "$grep" ]; then
export GREP="$grep"
return
fi
done
return 1
}
find_pgrep ()
{
local pgrep
local program
for program in pgrep; do
pgrep=`find_program "$program"`
if [ -n "$pgrep" ]; then
export PGREP="$pgrep"
return 0
fi
done
return 1
}
find_sort ()
{
local sort
local program
for program in gsort sort; do
sort=`find_program "$program"`
if [ -n "$sort" ]; then
export SORT="$sort"
return 0
fi
done
return 1
}
setup_install_root ()
{
if [ -n "$INSTALL_ROOT" ]; then
if [ -d "$INSTALL_ROOT" ]; then
return 0
else
return 1
fi
fi
if [ ! -d "$WORKSPACE_ROOT/root" ]; then
if ! mkdir -p "$WORKSPACE_ROOT/root" 2>/dev/null; then
echo "setup_install_root: Unable to create INSTALL_ROOT at $WORKSPACE_ROOT/root" >&2
return 1
fi
fi
if [ -n "$INSTALL_TAG" ]; then
if [ ! -d "$WORKSPACE_ROOT/root/$INSTALL_TAG" ]; then
if ! mkdir -p "$WORKSPACE_ROOT/root/$INSTALL_TAG" 2>/dev/null; then
echo "setup_install_root: Unable to create INSTALL_ROOT at $WORKSPACE_ROOT/root/$INSTALL_TAG" >&2
return 1
fi
fi
INSTALL_ROOT="$WORKSPACE_ROOT/root/$INSTALL_TAG"
if [ -d "$INSTALL_ROOT/bin" ]; then
prepend_path "$INSTALL_ROOT/bin"
fi
if [ -d "$INSTALL_ROOT/sbin" ]; then
prepend_path "$INSTALL_ROOT/sbin"
fi
if [ -d "$INSTALL_ROOT/lib64" ]; then
append_ldflags "-L$INSTALL_ROOT/lib64"
append_ld_library_path "$INSTALL_ROOT/lib64"
fi
if [ -d "$INSTALL_ROOT/lib" ]; then
append_ldflags "-L$INSTALL_ROOT/lib"
append_ld_library_path "$INSTALL_ROOT/lib"
fi
if [ -d "$INSTALL_ROOT/include" ]; then
append_cflags "-I$INSTALL_ROOT/include"
append_cppflags "-I$INSTALL_ROOT/include"
fi
return 0
fi
echo "setup_install_root: INSTALL_TAG or INSTALL_ROOT is not set, need to know in where to build/test" >&2
return 1
}
detect_revision ()
{
if [ -z "$REVISION" ]; then
if [ -n "$SVN_REVISION" ]; then
REVISION="$SVN_REVISION"
elif [ -n "$GIT_COMMIT" ]; then
REVISION="$GIT_COMMIT"
fi
fi
}
detect_distribution ()
{
DISTRIBUTION="UNKNOWN"
if [ -f "/etc/debian_version" ]; then
if uname -a 2>/dev/null | $GREP -q -i ubuntu 2>/dev/null; then
DISTRIBUTION="ubuntu"
else
DISTRIBUTION="debian"
fi
elif [ -f "/etc/redhat-release" ]; then
if $GREP -q -i centos /etc/redhat-release 2>/dev/null; then
DISTRIBUTION="centos"
elif $GREP -q -i fedora /etc/redhat-release 2>/dev/null; then
DISTRIBUTION="fedora"
elif $GREP -q -i "scientific linux" /etc/redhat-release 2>/dev/null; then
DISTRIBUTION="sl"
else
DISTRIBUTION="redhat"
fi
elif [ -f "/etc/os-release" ]; then
if $GREP -q -i opensuse /etc/os-release 2>/dev/null; then
DISTRIBUTION="opensuse"
fi
elif [ -f "/etc/SuSE-release" ]; then
if $GREP -q -i "suse linux enterprise" /etc/SuSE-release 2>/dev/null; then
DISTRIBUTION="suse"
fi
elif uname -a 2>/dev/null | $GREP -q -i freebsd 2>/dev/null; then
DISTRIBUTION="freebsd"
elif uname -a 2>/dev/null | $GREP -q -i sunos 2>/dev/null; then
DISTRIBUTION="sunos"
elif uname -a 2>/dev/null | $GREP -q -i openbsd 2>/dev/null; then
DISTRIBUTION="openbsd"
elif uname -a 2>/dev/null | $GREP -q -i netbsd 2>/dev/null; then
DISTRIBUTION="netbsd"
fi
export DISTRIBUTION
}
init ()
{
unset _CLEANUP_TEST
unset _SYSLOG_TRACE_PID
unset PRE_TEST
unset POST_TEST
find_grep || exit 1
detect_revision
detect_distribution
find_jenkins_workspace_root || exit 1
setup_install_root || exit 1
find_make || exit 1
find_m4 || exit 1
find_wget || exit 1
find_md5sum || exit 1
find_sha1sum || exit 1
find_sha256sum || exit 1
find_ccache # ccache needs to be found before cc/cxx
find_cc || exit 1
find_cxx || exit 1
find_tee || exit 1
find_date || exit 1
find_tail || exit 1
find_pgrep || exit 1
find_sort || exit 1
# prevent CTRL-Z and CTRL-C
trap "" SIGINT 2>/dev/null >/dev/null
trap "" SIGTSTP 2>/dev/null >/dev/null
return 0
}
finish ()
{
local core
find "$WORKSPACE" "$INSTALL_ROOT" -name '*core' -type f 2>/dev/null | while read core; do
chmod a+r "$core" 2>/dev/null
done
}
check_if_built ()
{
if [ -z "$1" ]; then
echo "usage: check_if_built <name tag>" >&2
exit 1
fi
if [ -z "$REVISION" ]; then
echo "check_if_built: No REVISION is set, can't check if build is ok!" >&2
exit 1
fi
local name_tag="$1"
if [ -f "$INSTALL_ROOT/.$name_tag.build" ]; then
local build_rev=`cat "$INSTALL_ROOT/.$name_tag.build" 2>/dev/null`
if [ "$REVISION" = "$build_rev" ]; then
return 0
fi
fi
return 1
}
start_build ()
{
if [ -z "$1" ]; then
echo "usage: start_build <name tag>" >&2
exit 1
fi
local name_tag="$1"
if [ -f "$INSTALL_ROOT/.$name_tag.ok" ]; then
if ! rm "$INSTALL_ROOT/.$name_tag.ok" 2>/dev/null; then
echo "start_build: can't remove old ok file $INSTALL_ROOT/.$name_tag.ok !" >&2
exit 1
fi
fi
echo "start_build: Starting build for $name_tag on $DISTRIBUTION"
return 0
}
set_build_ok ()
{
if [ -z "$1" ]; then
echo "usage: set_build_ok <name tag>" >&2
exit 1
fi
if [ -z "$REVISION" ]; then
echo "set_build_ok: No REVISION is set, can't check if build is ok!" >&2
exit 1
fi
local name_tag="$1"
if [ -f "$INSTALL_ROOT/.$name_tag.ok" ]; then
echo "set_build_ok: Build already mark ok, this should not happend. Did you forget to start_build?" >&2
exit 1
fi
echo "$REVISION" > "$INSTALL_ROOT/.$name_tag.build"
if [ -f "$INSTALL_ROOT/.$name_tag.build" ]; then
local build_rev=`cat "$INSTALL_ROOT/.$name_tag.build" 2>/dev/null`
if [ "$REVISION" = "$build_rev" ]; then
if ! touch "$INSTALL_ROOT/.$name_tag.ok" 2>/dev/null; then
echo "set_build_ok: Can't tag build ok $INSTALL_ROOT/.$name_tag.ok !" >&2
return 1
fi
return 0
fi
fi
echo "set_build_ok: Was not able to tag build ok!" >&2
return 1
}
check_if_tested ()
{
if [ -z "$1" ]; then
echo "usage: check_if_tested <name tag>" >&2
exit 1
fi
if [ -z "$REVISION" ]; then
echo "check_if_tested: No REVISION is set, can't check if test is ok!" >&2
exit 1
fi
local name_tag="$1"
if [ -f "$INSTALL_ROOT/.$name_tag.test" ]; then
local build_rev=`cat "$INSTALL_ROOT/.$name_tag.test" 2>/dev/null`
if [ "$REVISION" = "$build_rev" ]; then
if [ "$INSTALL_ROOT/.$name_tag.junit" ]; then
if ! cp -- "$INSTALL_ROOT/.$name_tag.junit" "$WORKSPACE/junit.xml" 2>/dev/null; then
return 1
fi
fi
if [ -f "$WORKSPACE/junit.xml" ]; then
touch "$WORKSPACE/junit.xml"
fi
return 0
fi
fi
return 1
}
start_test ()
{
if [ -z "$1" ]; then
echo "usage: start_test <name tag>" >&2
exit 1
fi
local name_tag="$1"
# local time_start=`$DATE '+%s' 2>/dev/null`
# local timeout=3600
# local time_stop=$(( time_start + timeout ))
# local time_now
# local build_tag
#
# echo "$BUILD_TAG $$" > "$WORKSPACE_ROOT/.testing.$$"
# build_tag=`cat "$WORKSPACE_ROOT/.testing.$$" 2>/dev/null`
# if [ "$build_tag" != "$BUILD_TAG $$" ]; then
# echo "start_test: Unable to add test lock!" >&2
# rm -f "$WORKSPACE_ROOT/.testing.$$" 2>/dev/null
# return 1
# fi
#
# while true; do
# if [ ! -f "$WORKSPACE_ROOT/.testing" ]; then
# if ln -s "$WORKSPACE_ROOT/.testing.$$" "$WORKSPACE_ROOT/.testing" 2>/dev/null; then
# build_tag=`cat "$WORKSPACE_ROOT/.testing" 2>/dev/null`
# if [ "$build_tag" = "$BUILD_TAG $$" ]; then
# if [ -f "$INSTALL_ROOT/.$name_tag.ok.test" ]; then
# if ! rm "$INSTALL_ROOT/.$name_tag.ok.test" 2>/dev/null; then
# echo "start_test: can't remove old ok file $INSTALL_ROOT/.$name_tag.ok.test !" >&2
# exit 1
# fi
# fi
# export _CLEANUP_TEST=1
# return 0
# fi
# fi
# fi
#
# if [ -z "$time_now" ]; then
# echo "start_test: waiting for other tests to finish (timeout $timeout)"
# fi
#
# time_now=`$DATE '+%s' 2>/dev/null`
# if [ "$time_now" -ge "$time_stop" ] 2>/dev/null; then
# break
# fi
# if [ -z "$time_now" -o ! "$time_now" -lt "$time_stop" ] 2>/dev/null; then
# echo "start_test: Invalid timestamp from date!" >&2
# exit 1
# fi
# sleep 2
# done
#
# echo "start_test: Unable to get test lock, timeout" >&2
# rm -f "$WORKSPACE_ROOT/.testing.$$" 2>/dev/null
# exit 1
if [ -f "$INSTALL_ROOT/.$name_tag.ok.test" ]; then
if ! rm "$INSTALL_ROOT/.$name_tag.ok.test" 2>/dev/null; then
echo "start_test: can't remove old ok file $INSTALL_ROOT/.$name_tag.ok.test !" >&2
exit 1
fi
fi
echo "start_test: Starting test for $name_tag on $DISTRIBUTION"
return 0
}
stop_test ()
{
# local build_tag
#
# if [ ! -f "$WORKSPACE_ROOT/.testing" ]; then
# echo "stop_test: Called without a test lock file, this should not happen!" >&2
# return 1
# fi
#
# build_tag=`cat "$WORKSPACE_ROOT/.testing.$$" 2>/dev/null`
# if [ "$build_tag" != "$BUILD_TAG $$" ]; then
# echo "stop_test: Our test lock does not exist or is not our own!" >&2
# return 1
# fi
#
# build_tag=`cat "$WORKSPACE_ROOT/.testing" 2>/dev/null`
# if [ "$build_tag" != "$BUILD_TAG $$" ]; then
# echo "stop_test: Content of test lock changed during test!" >&2
# rm -f "$WORKSPACE_ROOT/.testing.$$" 2>/dev/null
# return 1
# fi
#
# rm -f "$WORKSPACE_ROOT/.testing" 2>/dev/null
# rm -f "$WORKSPACE_ROOT/.testing.$$" 2>/dev/null
# export _CLEANUP_TEST=""
return 0
}
set_test_ok ()
{
if [ -z "$1" ]; then
echo "usage: set_test_ok <name tag>" >&2
exit 1
fi
if [ -z "$REVISION" ]; then
echo "set_test_ok: No REVISION is set, can't check if test is ok!" >&2
exit 1
fi
local name_tag="$1"
if [ -f "$INSTALL_ROOT/.$name_tag.ok.test" ]; then
echo "set_test_ok: Test already mark ok, this should not happend. Did you forget to start_test?" >&2
exit 1
fi
echo "$REVISION" > "$INSTALL_ROOT/.$name_tag.test"
if [ -f "$INSTALL_ROOT/.$name_tag.test" ]; then
local test_rev=`cat "$INSTALL_ROOT/.$name_tag.test" 2>/dev/null`
if [ "$REVISION" = "$test_rev" ]; then
if ! touch "$INSTALL_ROOT/.$name_tag.ok.test" 2>/dev/null; then
echo "set_test_ok: Can't tag test ok $INSTALL_ROOT/.$name_tag.ok.test !" >&2
return 1
fi
if [ -f "$WORKSPACE/junit.xml" ]; then
if ! cp -- "$WORKSPACE/junit.xml" "$INSTALL_ROOT/.$name_tag.junit" 2>/dev/null; then
echo "set_test_ok: Can't copy $WORKSPACE/junit.xml to $INSTALL_ROOT/.$name_tag.junit !" >&2
return 1
fi
fi
return 0
fi
fi
echo "set_test_ok: Was not able to tag test ok!" >&2
return 1
}
require ()
{
if [ -z "$1" ]; then
echo "usage: require <name tag>" >&2
exit 1
fi
local name_tag="$1"
if [ ! -f "$INSTALL_ROOT/.$name_tag.ok" ]; then
echo "require: Required program $name_tag not found or not built!" >&2
exit 1
fi
if [ ! -f "$INSTALL_ROOT/.$name_tag.build" ]; then
echo "require: Required program $name_tag corrupt, can't find build version!" >&2
exit 1
fi
local require_rev=`cat "$INSTALL_ROOT/.$name_tag.build" 2>/dev/null`
if [ -z "$require_rev" ]; then
echo "require: There is no build version for $name_tag!" >&2
exit 1
fi
export REVISION="$REVISION-$name_tag:$require_rev"
}
check_hash ()
{
if [ -z "$1" -o -z "$2" -o -z "$3" ]; then
echo "usage: check_hash <filename> <type> <hash>" >&2
exit 1
fi
local filename="$1"
local type="$2"
local hash="$3"
local file_hash
if [ -f "$filename" ]; then
case "$type" in
md5)
file_hash=`$MD5SUM "$filename" 2>/dev/null | awk '{print $1}'`
;;
sha1)
file_hash=`$SHA1SUM "$filename" 2>/dev/null | awk '{print $1}'`
;;
sha256)
file_hash=`$SHA256SUM "$filename" 2>/dev/null | awk '{print $1}'`
;;
esac
if [ -n "$file_hash" -a "$hash" = "$file_hash" ]; then
return 0
fi
fi
return 1
}
fetch_src ()
{
if [ -z "$1" -o -z "$2" -o -z "$3" -o -z "$4" ]; then
echo "usage: fetch_src <url> <filename> <type> <hash>" >&2
exit 1
fi
local url="$1"
local filename="$2"
local type="$3"
local hash="$4"
local path_filename
if [ ! -d "$WORKSPACE_ROOT/cache" ]; then
if ! mkdir -p "$WORKSPACE_ROOT/cache" 2>/dev/null; then
echo "fetch_src: Unable to create cache directory $WORKSPACE_ROOT/cache!" >&2
exit 1
fi
fi
path_filename="$WORKSPACE_ROOT/cache/$filename"
if [ -f "$path_filename" ]; then
if check_hash "$path_filename" "$type" "$hash"; then
echo "$path_filename"
return 0
fi
if ! rm "$path_filename" 2>/dev/null; then
echo "fetch_src: Unable to remove old invalid file $path_filename!" >&2
exit 1
fi
fi
if ! $WGET -O "$path_filename" "$url" 2>/dev/null; then
echo "fetch_src: wget failed!" >&2
rm -f "$path_filename" 2>/dev/null
exit 1
fi
if [ ! -f "$path_filename" ]; then
echo "fetch_src: File at $url not found at $path_filename!" >&2
exit 1
fi
if ! check_hash "$path_filename" "$type" "$hash"; then
echo "fetch_src: Checksum does not match for $path_filename!" >&2
exit 1
fi
echo "$path_filename"
}
log_init ()
{
if [ -z "$1" ]; then
echo "usage: log_this <log name>" >&2
exit 1
fi
local name="$1"
local log_stderr="_log.$BUILD_TAG.$name.stderr"
local log_stdout="_log.$BUILD_TAG.$name.stdout"
touch "$log_stderr" "$log_stdout"
}
log_this ()
{
if [ -z "$1" -o -z "$2" ]; then
echo "usage: log_this <log name> <command> [options ...]" >&2
exit 1
fi
local name="$1"
local log_stderr="_log.$BUILD_TAG.$name.stderr"
local log_stdout="_log.$BUILD_TAG.$name.stdout"
shift
touch "$log_stderr" "$log_stdout"
echo "log_this: logging $name for command: $*"
$* 2>>"$log_stderr" >>"$log_stdout"
}
log_this_timeout ()
{
if [ -z "$1" -o -z "$2" ]; then
echo "usage: log_this_timeout <log name> <timeout in seconds> <command ... >" >&2
exit 1
fi
local name="$1"
local log_stderr="_log.$BUILD_TAG.$name.stderr"
local log_stdout="_log.$BUILD_TAG.$name.stdout"
local time_start=`$DATE '+%s' 2>/dev/null`
local time_stop
local time_now
local timeout="$2"
local pid
shift 2
if [ ! "$time_start" -gt 0 ] 2>/dev/null; then
echo "log_this_timeout: Unable to get start time!" >&2
exit 1
fi
if [ ! "$timeout" -gt 0 ] 2>/dev/null; then
echo "log_this_timeout: Wrong timeout value or 0!" >&2
exit 1
fi
if [ "$timeout" -gt 3600 ] 2>/dev/null; then
echo "log_this_timeout: Too long timeout used, can't be over 3600 seconds!" >&2
exit 1
fi
time_stop=$(( time_start + timeout ))
touch "$log_stderr" "$log_stdout"
echo "log_this_timeout: logging $name with timeout $timeout for command: $*"
( $* 2>>"$log_stderr" >>"$log_stdout" ) &
pid="$!"
if [ -z "$pid" -o "$pid" -le 0 ] 2>/dev/null; then
echo "log_this_timeout: No pid from backgrounded program?" >&2
return 1
fi
while true; do
time_now=`$DATE '+%s' 2>/dev/null`
if [ "$time_now" -ge "$time_stop" ] 2>/dev/null; then
break
fi
if [ -z "$time_now" -o ! "$time_now" -lt "$time_stop" ] 2>/dev/null; then
echo "log_this_timeout: Invalid timestamp from date!" >&2
exit 1
fi
if ! kill -0 "$pid" 2>/dev/null; then
wait "$pid"
return "$?"
fi
sleep 1
done
kill -TERM "$pid"
sleep 1
if kill -0 "$pid" 2>/dev/null; then
kill -KILL "$pid"
fi
return 1
}
log_force_stop ()
{
if [ -z "$1" ]; then
echo "usage: log_force_stop <log name> [grace period in seconds]" >&2
exit 1
fi
local name="$1"
local grace="$2"
local log_stderr_pid="_log_pid.$BUILD_TAG.$name.stderr"
local log_stdout_pid="_log_pid.$BUILD_TAG.$name.stdout"
local stderr_pid
local stdout_pid
local time_start=`$DATE '+%s' 2>/dev/null`
local time_stop=$(( time_start + 5 ))
local time_now
if [ -n "$grace" -a "$grace" -gt 0 ] 2>/dev/null; then
time_stop=$(( time_start + grace ))
fi
while true; do
if [ ! -f "$log_stderr_pid" -a ! -f "$log_stdout_pid" ]; then
break
fi
time_now=`$DATE '+%s' 2>/dev/null`
if [ "$time_now" -ge "$time_stop" ] 2>/dev/null; then
break;
fi
if [ -z "$time_now" -o ! "$time_now" -lt "$time_stop" ] 2>/dev/null; then
echo "log_force_stop: Invalid timestamp from date!" >&2
return 1
fi
sleep 1
done
if [ -f "$log_stderr_pid" ]; then
stderr_pid=`cat "$log_stderr_pid"`
if [ "$stderr_pid" -gt 0 ] 2>/dev/null; then
kill -TERM "$stderr_pid" 2>/dev/null
fi
rm -f "$log_stderr_pid"
fi
if [ -f "$log_stdout_pid" ]; then
stdout_pid=`cat "$log_stdout_pid"`
if [ "$stdout_pid" -gt 0 ] 2>/dev/null; then
kill -TERM "$stdout_pid" 2>/dev/null
fi
rm -f "$log_stdout_pid"
fi
return 0
}
log_grep ()
{
local output=""
OPTIND=1
while getopts ":o" opt; do
case "$opt" in
o)
output=1
;;
\?)
echo "log_grep: Invalid option: -$OPTARG" >&2
exit 1
;;
esac
done
shift $((OPTIND-1))
if [ -z "$1" -o -z "$2" -o -z "$3" ]; then
echo "usage: log_grep [-o] <log name> <stdout|stderr|both> <grep string ...>" >&2
exit 1
fi
local name="$1"
local log_stderr="_log.$BUILD_TAG.$name.stderr"
local log_stdout="_log.$BUILD_TAG.$name.stdout"
local type="$2"
local grep_string="$3"
local log_files
case "$type" in
stdout)
if [ ! -f "$log_stdout" ]; then
return 1
fi
log_files="$log_stdout"
;;
stderr)
if [ ! -f "$log_stderr" ]; then
return 1
fi
log_files="$log_stderr"
;;
both)
if [ ! -f "$log_stdout" -a ! -f "$log_stderr" ]; then
return 1
fi
log_files="$log_stdout $log_stderr"
;;
esac
if [ -z "$log_files" ]; then
echo "log_grep: Wrong type of log file specified, should be stdout, stderr or both!" >&2
exit 1
fi
if [ -n "$output" ]; then
$GREP -- "$grep_string" $log_files 2>/dev/null
else
echo "log_grep: greping in $name for: $grep_string"
$GREP -q -- "$grep_string" $log_files 2>/dev/null
fi
}
log_grep_count ()
{
if [ -z "$1" -o -z "$2" -o -z "$3" -o -z "$3" ]; then
echo "usage: log_grep_count <log name> <stdout|stderr|both> <count> <grep string ...>" >&2
exit 1
fi
local name="$1"
local log_stderr="_log.$BUILD_TAG.$name.stderr"
local log_stdout="_log.$BUILD_TAG.$name.stdout"
local type="$2"
local grep_string="$3"
local count="$4"
local log_files
local count_found
case "$type" in
stdout)
if [ ! -f "$log_stdout" ]; then
return 1
fi
log_files="$log_stdout"
;;
stderr)
if [ ! -f "$log_stderr" ]; then
return 1
fi
log_files="$log_stderr"
;;
both)
if [ ! -f "$log_stdout" -a ! -f "$log_stderr" ]; then
return 1
fi
log_files="$log_stdout $log_stderr"
;;
esac
if [ -z "$log_files" ]; then
echo "log_grep_count: Wrong type of log file specified, should be stdout, stderr or both!" >&2
exit 1
fi
echo "log_grep_count: greping in $name, should find $count of: $grep_string"
count_found=`$GREP -- "$grep_string" $log_files 2>/dev/null | wc -l 2>/dev/null`
if [ "$count_found" -eq "$count" ] 2>/dev/null; then
return 0
fi
return 1
}
log_waitfor ()
{
if [ -z "$1" -o -z "$2" -o -z "$3" -o -z "$3" ]; then
echo "usage: log_waitfor <log name> <stdout|stderr|both> <timeout in seconds> <grep string ...>" >&2
exit 1
fi
local name="$1"
local log_stderr="_log.$BUILD_TAG.$name.stderr"
local log_stdout="_log.$BUILD_TAG.$name.stdout"
local type="$2"
local timeout="$3"
local grep_string="$4"
local time_start=`$DATE '+%s' 2>/dev/null`
local time_stop
local time_now
local log_files
case "$type" in
stdout)
if [ ! -f "$log_stdout" ]; then
return 1
fi
log_files="$log_stdout"
;;
stderr)
if [ ! -f "$log_stderr" ]; then
return 1
fi
log_files="$log_stderr"
;;
both)
if [ ! -f "$log_stdout" -a ! -f "$log_stderr" ]; then
return 1
fi
log_files="$log_stdout $log_stderr"
;;
esac
if [ -z "$log_files" ]; then
echo "log_waitfor: Wrong type of log file specified, should be stdout, stderr or both!" >&2
exit 1
fi
if [ ! "$time_start" -gt 0 ] 2>/dev/null; then
echo "log_waitfor: Unable to get start time!" >&2
exit 1
fi
if [ ! "$timeout" -gt 0 ] 2>/dev/null; then
echo "log_waitfor: Wrong timeout value or 0!" >&2
exit 1
fi
if [ "$timeout" -gt 3600 ] 2>/dev/null; then
echo "log_waitfor: Too long timeout used, can't be over 3600 seconds!" >&2
exit 1
fi
time_stop=$(( time_start + timeout ))
echo "log_waitfor: waiting for log $name to contain (timeout $timeout): $grep_string"
while true; do
if $GREP -q -- "$grep_string" $log_files 2>/dev/null; then
return 0
fi
time_now=`$DATE '+%s' 2>/dev/null`
if [ "$time_now" -ge "$time_stop" ] 2>/dev/null; then
break
fi
if [ -z "$time_now" -o ! "$time_now" -lt "$time_stop" ] 2>/dev/null; then
echo "log_waitfor: Invalid timestamp from date!" >&2
exit 1
fi
sleep 2
done
return 1
}
log_cleanup ()
{
local pid_file
local pid
ls _log_pid* 2>/dev/null | while read pid_file; do
pid=`cat $pid_file 2>/dev/null`
if [ -n "$pid" -a "$pid" -gt 0 ] 2>/dev/null; then
kill -TERM "$pid" 2>/dev/null
rm -f "$pid_file" 2>/dev/null
fi
done
rm -f "_log.$BUILD_TAG"* 2>/dev/null
}
log_remove ()
{
if [ -z "$1" ]; then
echo "usage: log_remove <log name>" >&2
exit 1
fi
local name="$1"
local log_stderr="_log.$BUILD_TAG.$name.stderr"
local log_stdout="_log.$BUILD_TAG.$name.stdout"
rm -f "$log_stderr" "$log_stdout" 2>/dev/null
}
log_save_try ()
{
if [ -z "$1" ]; then
echo "usage: log_save_try <try>" >&2
exit 1
fi
local try="$1"
local log_file
ls "_log.$BUILD_TAG"* 2>/dev/null | while read log_file; do
if ! mv "$log_file" "$log_file-try-$try" 2>/dev/null; then
echo "log_save_try: Unable to save log file $log_file to $log_file-try-$try"
return 1
fi
done
return 0
}
run_tests ()
{
if [ -z "$1" ]; then
echo "usage: run_tests <tests directory>" >&2
exit 1
fi
local test_dir="$1"
local entry
local test=()
local test_num=0
local test_iter=0
local test_path
local test_status
local test_failed=0
local test_start
local test_stop
local test_time
local pwd=`pwd`
local pwd2
local retry
local junit="$WORKSPACE/junit.xml"
local junit_head="$WORKSPACE/junit.xml.head"
local junit_test="$WORKSPACE/junit.xml.test"
local junit_foot="$WORKSPACE/junit.xml.foot"
local tail_pid
local test_name
local test_classname
if [ -n "$PRE_TEST" ]; then
if ! declare -F "$PRE_TEST" >/dev/null 2>/dev/null; then
unset PRE_TEST
fi
fi
if [ -n "$POST_TEST" ]; then
if ! declare -F "$POST_TEST" >/dev/null 2>/dev/null; then
unset POST_TEST
fi
fi
if [ -n "$INTERRUPT_TEST" ]; then
if ! declare -F "$INTERRUPT_TEST" >/dev/null 2>/dev/null; then
unset INTERRUPT_TEST
fi
fi
if [ -n "$RETRY_TEST" ]; then
if [ ! "$RETRY_TEST" -gt 0 ] 2>/dev/null; then
RETRY_TEST=0
fi
else
RETRY_TEST=0
fi
if [ -n "$RETRY_SLEEP" ]; then
if [ ! "$RETRY_SLEEP" -ge 0 ] 2>/dev/null; then
RETRY_SLEEP=10
fi
else
RETRY_SLEEP=10
fi
if ! cd "$test_dir" 2>/dev/null; then
echo "run_tests: unable to change to test directory $test_dir!" >&2
return 1
fi
rm -f "$junit" "$junit_test"
echo '<?xml version="1.0" encoding="UTF-8"?>' > "$junit_head"
echo '<testsuites>' > "$junit_head"
ls -1 2>/dev/null | $GREP '^[0-9]*' | $GREP -v '\.off$' 2>/dev/null >"_tests.$BUILD_TAG"
while read entry; do
if [ -d "$entry" -a -f "$entry/test.sh" ]; then
if [ -f "$entry/off" ]; then
test_name=`echo "$entry"|sed 's%\.% %g'|awk '{print $3}'`
if [ -z "$test_name" ]; then
test_name='unknown'
fi
test_classname=`echo "$entry"|sed 's%\.% %g'|awk '{print $1 "." $2}'`
if [ -z "$test_classname" ]; then
test_classname='unknown.unknown'
fi
echo '<testsuite name="'"$entry"'" tests="1" skip="1">' >> "$junit_test"
echo '<testcase name="'"$test_name"'" classname="'"$test_classname"'">' >> "$junit_test"
echo '<skipped message="Skipped">Test skipped, disabled with off file</skipped>' >> "$junit_test"
echo '</testcase>' >> "$junit_test"
echo '</testsuite>' >> "$junit_test"
else
test[test_num]="$entry"
test_num=$(( test_num + 1 ))
fi
fi
done <"_tests.$BUILD_TAG"
rm -f "_tests.$BUILD_TAG" 2>/dev/null
if [ "$test_num" -le 0 ] 2>/dev/null; then
echo "run_tests: no active tests found!" >&2
cd "$pwd"
# Do not generate JUnit if there is no tests or all tests skipped because
# Jenkins might mark it failed otherwise
rm -f "$junit_head" "$junit_test" "$junit_foot"
return 1
fi
if [ -n "$INTERRUPT_TEST" ]; then
STOP_TEST=0
trap "STOP_TEST=1" SIGINT
fi
echo "Running tests ..."
while [ "$test_iter" -lt "$test_num" ] 2>/dev/null; do
retry=0
test_path="${test[test_iter]}"
test_iter=$(( test_iter + 1 ))
test_start=`date +%s`
test_name=`echo "$test_path"|sed 's%\.% %g'|awk '{print $3}'`
if [ -z "$test_name" ]; then
test_name='unknown'
fi
test_classname=`echo "$test_path"|sed 's%\.% %g'|awk '{print $1 "." $2}'`
if [ -z "$test_classname" ]; then
test_classname='unknown.unknown'
fi
echo "##### `date` $test_iter/$test_num $test_path ... "
pwd2=`pwd`
cd "$test_path" 2>/dev/null &&
rm -f "_test.$BUILD_TAG" &&
touch "_test.$BUILD_TAG" &&
while [ "$retry" -le "$RETRY_TEST" ] 2>/dev/null; do
if [ "$retry" -gt 0 ] 2>/dev/null; then
syslog_stop &&
cat "_test.$BUILD_TAG" &&
mv "_test.$BUILD_TAG" "_test.$BUILD_TAG-try-$retry" &&
log_save_try "$retry" &&
syslog_save_try "$retry" ||
{
echo "##### `date` $test_iter/$test_num $test_path ... Unable to retry"
test_status=1
break
}
echo "##### `date` $test_iter/$test_num $test_path ... RETRY $retry in $RETRY_SLEEP seconds"
sleep "$RETRY_SLEEP"
rm -f "_test.$BUILD_TAG"
touch "_test.$BUILD_TAG"
fi
syslog_trace &&
if [ -n "$PRE_TEST" ]; then
$PRE_TEST "$test_path"
fi &&
( source ./test.sh ) >> "_test.$BUILD_TAG" 2>&1
test_status="$?"
if [ -n "$INTERRUPT_TEST" -a "$STOP_TEST" = "1" ]; then
cat "_test.$BUILD_TAG"
echo "##### `date` $test_iter/$test_num $test_path ... INTERRUPTED"
break
fi
if [ "$test_status" -eq 0 ] 2>/dev/null; then
break
fi
retry=$(( retry + 1 ))
done
test_stop=`date +%s`
test_time=0
if [ "$test_start" -gt 0 -a "$test_stop" -gt 0 ] 2>/dev/null; then
test_time=$(( test_stop - test_start ))
fi
syslog_stop
if [ -n "$INTERRUPT_TEST" -a "$STOP_TEST" = "1" ]; then
$INTERRUPT_TEST "$test_path"
test_failed=1
break
elif [ -n "$POST_TEST" ]; then
$POST_TEST "$test_path" "$test_status"
fi
if [ "$test_status" -eq 0 ] 2>/dev/null; then
cat "_test.$BUILD_TAG"
echo "##### `date` $test_iter/$test_num $test_path ... OK"
log_cleanup
syslog_cleanup
echo '<testsuite name="'"$test_path"'" tests="1" time="'"$test_time"'">' >> "$junit_test"
echo '<testcase name="'"$test_name"'" classname="'"$test_classname"'" time="'"$test_time"'">' >> "$junit_test"
echo '</testcase>' >> "$junit_test"
echo '<system-out>' >> "$junit_test"
cat "_test.$BUILD_TAG" | sed 's%&%\&%g' | sed 's%<%\<%g' | sed 's%>%\>%g' >> "$junit_test" 2>/dev/null
echo '</system-out>' >> "$junit_test"
echo '</testsuite>' >> "$junit_test"
rm -f "_test.$BUILD_TAG"*
else
test_failed=$(( test_failed + 1 ))
cat "_test.$BUILD_TAG"
echo "##### `date` $test_iter/$test_num $test_path ... FAILED!"
echo '<testsuite name="'"$test_path"'" tests="1" time="'"$test_time"'">' >> "$junit_test"
echo '<testcase name="'"$test_name"'" classname="'"$test_classname"'" time="'"$test_time"'">' >> "$junit_test"
echo '<failure message="Failed">Test failed, exit code '"$test_status"'</failure>' >> "$junit_test"
echo '</testcase>' >> "$junit_test"
echo '<system-err>' >> "$junit_test"
cat "_test.$BUILD_TAG" | sed 's%&%\&%g' | sed 's%<%\<%g' | sed 's%>%\>%g' >> "$junit_test" 2>/dev/null
echo '</system-err>' >> "$junit_test"
echo '</testsuite>' >> "$junit_test"
fi
if ! cd "$pwd2" 2>/dev/null; then
echo "run_tests: unable to change back to test directory $pwd2 after running a test!" >&2
test_failed=1
break
fi
done
if [ -n "$INTERRUPT_TEST" ]; then
trap "" SIGINT
fi
echo '</testsuites>' > "$junit_foot"
cat "$junit_head" "$junit_test" "$junit_foot" > "$junit" 2>/dev/null
rm -f "$junit_head" "$junit_test" "$junit_foot"
if ! cd "$pwd" 2>/dev/null; then
echo "run_tests: unable to change back to directory $pwd after running tests!" >&2
return 1
fi
if [ "$test_failed" -gt 0 ] 2>/dev/null; then
return 1
fi
}
run_test ()
{
if [ -z "$1" -o -z "$2" ]; then
echo "usage: run_test <test name> <test directory>" >&2
exit 1
fi
local test_name="$1"
local test_dir="$2"
local test_status
local pwd=`pwd`
if [ -n "$PRE_TEST" ]; then
if ! declare -F "$PRE_TEST" >/dev/null 2>/dev/null; then
unset PRE_TEST
fi
fi
if [ -n "$POST_TEST" ]; then
if ! declare -F "$POST_TEST" >/dev/null 2>/dev/null; then
unset POST_TEST
fi
fi
if [ -n "$INTERRUPT_TEST" ]; then
if ! declare -F "$INTERRUPT_TEST" >/dev/null 2>/dev/null; then
unset INTERRUPT_TEST
fi
fi
if [ ! -f "$test_dir/test.sh" ]; then
echo "run_test: no test.sh in test $test_name ($test_dir)!" >&2
return 1
fi
if ! cd "$test_dir" 2>/dev/null; then
echo "run_test: unable to change to test $test_name directory $test_dir!" >&2
return 1
fi
if [ -n "$INTERRUPT_TEST" ]; then
STOP_TEST=0
trap "STOP_TEST=1" SIGINT
fi
echo "##### Running test $test_name ..."
if [ -n "$PRE_TEST" ]; then
$PRE_TEST "$test_name"
fi &&
syslog_trace &&
( source ./test.sh )
test_status="$?"
syslog_stop
if [ -n "$INTERRUPT_TEST" -a "$STOP_TEST" = "1" ]; then
echo "##### `date` $test_iter/$test_num $test_path ... INTERRUPTED"
$INTERRUPT_TEST "$test_path"
trap "" SIGINT
return 1
elif [ -n "$POST_TEST" ]; then
$POST_TEST "$test_name" "$test_status"
fi
if [ "$test_status" -eq 0 ] 2>/dev/null; then
echo "##### Test $test_name ... OK"
log_cleanup
syslog_cleanup
else
echo "##### Test $test_name ... FAILED!"
fi
if ! cd "$pwd" 2>/dev/null; then
echo "run_test: unable to change back to directory $pwd after running test $test_name!" >&2
return 1
fi
if [ "$test_status" -ne 0 ] 2>/dev/null; then
return 1
fi
}
syslog_trace ()
{
if [ -n "$_SYSLOG_TRACE_PID" ]; then
echo "syslog_trace: Syslog trace already running (pid $_SYSLOG_TRACE_PID)!" >&2
exit 1
fi
local syslog_file
case "$DISTRIBUTION" in
debian | \
ubuntu )
syslog_file="/var/log/syslog"
;;
redhat | \
centos | \
sl | \
opensuse | \
suse | \
freebsd | \
netbsd | \
openbsd )
syslog_file="/var/log/messages"
;;
sunos )
syslog_file="/var/adm/messages"
;;
esac
if [ -z "$syslog_file" ]; then
echo "syslog_trace: Unable to start trace of syslog: no syslog file set" >&2
exit 1
fi
if [ ! -r "$syslog_file" ]; then
echo "syslog_trace: Unable to start trace of syslog: no access to $syslog_file" >&2
exit 1
fi
$TAIL_FOLLOW "$syslog_file" >"_syslog.$BUILD_TAG" 2>/dev/null &
_SYSLOG_TRACE_PID="$!"
if [ -z "$_SYSLOG_TRACE_PID" -o ! "$_SYSLOG_TRACE_PID" -gt 0 ] 2>/dev/null; then
echo "syslog_trace: Unable to start trace of syslog!" >&2
exit 1
fi
if ! kill -0 "$_SYSLOG_TRACE_PID" 2>/dev/null >/dev/null; then
wait "$_SYSLOG_TRACE_PID"
echo "syslog_trace: Unable to start trace of syslog: exited with status $?"
exit 1
fi
echo "syslog_trace: trace started (pid $_SYSLOG_TRACE_PID)"
}
syslog_stop ()
{
if [ -z "$_SYSLOG_TRACE_PID" ]; then
echo "syslog_stop: Syslog trace not started!" >&2
exit 1
fi
if kill -TERM "$_SYSLOG_TRACE_PID" 2>/dev/null; then
wait "$_SYSLOG_TRACE_PID" 2>/dev/null
unset _SYSLOG_TRACE_PID
fi
if [ -n "$_SYSLOG_TRACE_PID" ]; then
echo "syslog_stop: Unable to stop trace of syslog!" >&2
exit 1
fi
echo "syslog_stop: trace stopped"
}
syslog_waitfor ()
{
if [ -z "$1" -o -z "$2" ]; then
echo "usage: syslog_waitfor <timeout in seconds> <grep string ...>" >&2
exit 1
fi
local time_start=`$DATE '+%s' 2>/dev/null`
local time_stop
local time_now
local timeout="$1"
local grep_string="$2"
if [ ! -f "_syslog.$BUILD_TAG" ]; then
echo "syslog_waitfor: No syslog file to grep from!" >&2
exit 1
fi
if [ ! "$time_start" -gt 0 ] 2>/dev/null; then
echo "syslog_waitfor: Unable to get start time!" >&2
exit 1
fi
if [ ! "$timeout" -gt 0 ] 2>/dev/null; then
echo "syslog_waitfor: Wrong timeout value or 0!" >&2
exit 1
fi
if [ "$timeout" -gt 3600 ] 2>/dev/null; then
echo "syslog_waitfor: Too long timeout used, can't be over 3600 seconds!" >&2
exit 1
fi
time_stop=$(( time_start + timeout ))
echo "syslog_waitfor: waiting for syslog to contain (timeout $timeout): $grep_string"
while true; do
if $GREP -q -- "$grep_string" "_syslog.$BUILD_TAG" 2>/dev/null; then
return 0
fi
time_now=`$DATE '+%s' 2>/dev/null`
if [ -z "$_SYSLOG_TRACE_PID" -o "$time_now" -ge "$time_stop" ] 2>/dev/null; then
break
fi
if [ -z "$time_now" -o ! "$time_now" -lt "$time_stop" ] 2>/dev/null; then
echo "syslog_waitfor: Invalid timestamp from date!" >&2
exit 1
fi
sleep 2
done
return 1
}
syslog_waitfor_count ()
{
if [ -z "$1" -o -z "$2" -o -z "$3" ]; then
echo "usage: syslog_waitfor_count <timeout in seconds> <count> <grep string ...>" >&2
exit 1
fi
local time_start=`$DATE '+%s' 2>/dev/null`
local time_stop
local time_now
local timeout="$1"
local count="$2"
local grep_string="$3"
local count_found
if [ ! -f "_syslog.$BUILD_TAG" ]; then
echo "syslog_waitfor_count: No syslog file to grep from!" >&2
exit 1
fi
if [ ! "$time_start" -gt 0 ] 2>/dev/null; then
echo "syslog_waitfor_count: Unable to get start time!" >&2
exit 1
fi
if [ ! "$timeout" -gt 0 ] 2>/dev/null; then
echo "syslog_waitfor_count: Wrong timeout value or 0!" >&2
exit 1
fi
if [ "$timeout" -gt 3600 ] 2>/dev/null; then
echo "syslog_waitfor_count: Too long timeout used, can't be over 3600 seconds!" >&2
exit 1
fi
time_stop=$(( time_start + timeout ))
echo "syslog_waitfor_count: waiting for syslog to contain $count counts of (timeout $timeout): $grep_string"
while true; do
count_found=`$GREP -- "$grep_string" "_syslog.$BUILD_TAG" 2>/dev/null | wc -l 2>/dev/null`
if [ "$count_found" -eq "$count" ] 2>/dev/null; then
return 0
fi
time_now=`$DATE '+%s' 2>/dev/null`
if [ -z "$_SYSLOG_TRACE_PID" -o "$time_now" -ge "$time_stop" ] 2>/dev/null; then
break
fi
if [ -z "$time_now" -o ! "$time_now" -lt "$time_stop" ] 2>/dev/null; then
echo "syslog_waitfor_count: Invalid timestamp from date!" >&2
exit 1
fi
sleep 2
done
return 1
}
syslog_grep ()
{
if [ -z "$1" ]; then
echo "usage: syslog_grep <grep string ...>" >&2
exit 1
fi
local grep_string="$1"
if [ ! -f "_syslog.$BUILD_TAG" ]; then
echo "syslog_grep: No syslog file to grep from!" >&2
exit 1
fi
echo "syslog_grep: greping syslog for: $grep_string"
$GREP -q -- "$grep_string" "_syslog.$BUILD_TAG" 2>/dev/null
}
syslog_grep_count ()
{
if [ -z "$1" -o -z "$2" ]; then
echo "usage: syslog_grep_count <count> <grep string ...>" >&2
exit 1
fi
local count="$1"
local grep_string="$2"
local count_found
if [ ! -f "_syslog.$BUILD_TAG" ]; then
echo "syslog_grep_count: No syslog file to grep from!" >&2
exit 1
fi
echo "syslog_grep_count: greping syslog, should find $count of: $grep_string"
count_found=`$GREP -- "$grep_string" "_syslog.$BUILD_TAG" 2>/dev/null | wc -l 2>/dev/null`
if [ "$count_found" -eq "$count" ] 2>/dev/null; then
return 0
fi
return 1
}
syslog_grep_count2 ()
{
if [ -z "$1" ]; then
echo "usage: syslog_grep_count2 <grep string ...>" >&2
exit 1
fi
local grep_string="$1"
local count_found
if [ ! -f "_syslog.$BUILD_TAG" ]; then
echo "syslog_grep_count2: No syslog file to grep from!" >&2
exit 1
fi
count_found=`$GREP -- "$grep_string" "_syslog.$BUILD_TAG" 2>/dev/null | wc -l 2>/dev/null | awk '{print $1}'`
if [ "$count_found" -lt 0 ] 2>/dev/null; then
echo "syslog_grep_count: Invalid count returned from wc -l '$count_found'" >&2
exit 1
fi
echo "$count_found"
return 0
}
syslog_cleanup ()
{
rm -f "_syslog.$BUILD_TAG"* 2>/dev/null
}
syslog_save_try ()
{
if [ -z "$1" ]; then
echo "usage: syslog_save_try <try>" >&2
exit 1
fi
local try="$1"
if ! mv "_syslog.$BUILD_TAG" "_syslog.$BUILD_TAG-try-$try" 2>/dev/null; then
echo "syslog_save_try: Unable to save syslog file _syslog.$BUILD_TAG to _syslog.$BUILD_TAG-try-$try"
return 1
fi
return 0
}
apply_parameter ()
{
if [ -z "$1" -o -z "$2" -o -z "$3" ]; then
echo "usage: apply_parameter <parameter tag> <parameter value> <files ... >" >&2
echo " ex: apply_parameter \"INSTALL_ROOT\" \"\$INSTALL_ROOT\" conf.xml" >&2
exit 1
fi
local parameter_tag="$1"
local parameter_value="$2"
shift 2
local files="$*"
local file
if echo "$parameter_tag" | $GREP -q "@" 2>/dev/null; then
echo "apply_parameter: parameter tag contains '@', it must not" >&2
return 1
fi
for file in $files; do
if [ ! -f "$file" ]; then
echo "apply_parameter: File $file not found" >&2
return 1
fi
if [ -f "$file.$$" ]; then
echo "apply_parameter: Temporary file $file.$$ exists but it should not" >&2
return 1
fi
done
for file in $files; do
sed 's%@'"$parameter_tag"'@%'"$parameter_value"'%g' "$file" > "$file.$$" 2>/dev/null &&
mv "$file.$$" "$file" 2>/dev/null ||
{
echo "apply_parameter: Unable to apply parameter $parameter_tag value $parameter_value to file $file" >&2
return 1
}
done
return 0
}
sed_inplace ()
{
if [ -z "$1" -o -z "$2" ]; then
echo "usage: sed_inplace <expression> <files ... >" >&2
exit 1
fi
local expression="$1"
shift 1
local files="$*"
local file
for file in $files; do
if [ ! -f "$file" ]; then
echo "sed_inplace: File $file not found" >&2
return 1
fi
if [ -f "$file.$$" ]; then
echo "sed_inplace: Temporary file $file.$$ exists but it should not" >&2
return 1
fi
done
for file in $files; do
sed "$expression" "$file" > "$file.$$" 2>/dev/null &&
mv "$file.$$" "$file" 2>/dev/null ||
{
echo "sed_inplace: Unable to sed inplace file $file" >&2
return 1
}
done
return 0
}
try_run ()
{
if [ -z "$1" -o -z "$2" ]; then
echo "usage: try_run <timeout in seconds> <command ... >" >&2
exit 1
fi
local time_start=`$DATE '+%s' 2>/dev/null`
local time_stop
local time_now
local timeout="$1"
local pid
shift
if [ ! "$time_start" -gt 0 ] 2>/dev/null; then
echo "try_run: Unable to get start time!" >&2
exit 1
fi
if [ ! "$timeout" -gt 0 ] 2>/dev/null; then
echo "try_run: Wrong timeout value or 0!" >&2
exit 1
fi
if [ "$timeout" -gt 3600 ] 2>/dev/null; then
echo "try_run: Too long timeout used, can't be over 3600 seconds!" >&2
exit 1
fi
time_stop=$(( time_start + timeout ))
( $* ) &
pid="$!"
if [ -z "$pid" -o "$pid" -le 0 ] 2>/dev/null; then
echo "try_run: No pid from backgrounded program?" >&2
return 1
fi
while true; do
time_now=`$DATE '+%s' 2>/dev/null`
if [ "$time_now" -ge "$time_stop" ] 2>/dev/null; then
break
fi
if [ -z "$time_now" -o ! "$time_now" -lt "$time_stop" ] 2>/dev/null; then
echo "try_run: Invalid timestamp from date!" >&2
exit 1
fi
if ! kill -0 "$pid" 2>/dev/null; then
wait "$pid"
return "$?"
fi
sleep 1
done
kill -TERM "$pid"
sleep 1
if kill -0 "$pid" 2>/dev/null; then
kill -KILL "$pid"
fi
return 1
}
waitfor_this ()
{
if [ -z "$1" -o -z "$2" -o -z "$3" ]; then
echo "usage: waitfor_this <file to grep> <timeout in seconds> <grep string ...>" >&2
exit 1
fi
local time_start=`$DATE '+%s' 2>/dev/null`
local time_stop
local time_now
local file="$1"
local timeout="$2"
local grep_string="$3"
if [ ! -f "$file" ]; then
echo "waitfor_this: No file to grep from!" >&2
exit 1
fi
if [ ! "$time_start" -gt 0 ] 2>/dev/null; then
echo "waitfor_this: Unable to get start time!" >&2
exit 1
fi
if [ ! "$timeout" -gt 0 ] 2>/dev/null; then
echo "waitfor_this: Wrong timeout value or 0!" >&2
exit 1
fi
if [ "$timeout" -gt 3600 ] 2>/dev/null; then
echo "waitfor_this: Too long timeout used, can't be over 3600 seconds!" >&2
exit 1
fi
time_stop=$(( time_start + timeout ))
echo "waitfor_this: waiting for $file to contain (timeout $timeout): $grep_string"
while true; do
if $GREP -q -- "$grep_string" "$file" 2>/dev/null; then
return 0
fi
time_now=`$DATE '+%s' 2>/dev/null`
if [ "$time_now" -ge "$time_stop" ] 2>/dev/null; then
break
fi
if [ -z "$time_now" -o ! "$time_now" -lt "$time_stop" ] 2>/dev/null; then
echo "waitfor_this: Invalid timestamp from date!" >&2
exit 1
fi
sleep 2
done
return 1
}
waitfor_count_this ()
{
if [ -z "$1" -o -z "$2" -o -z "$3" -o -z "$4" ]; then
echo "usage: waitfor_count_this <file to grep> <timeout in seconds> <count> <grep string ...>" >&2
exit 1
fi
local time_start=`$DATE '+%s' 2>/dev/null`
local time_stop
local time_now
local file="$1"
local timeout="$2"
local count="$3"
local grep_string="$4"
local count_found
if [ ! -f "$file" ]; then
echo "waitfor_count_this: No file to grep from!" >&2
exit 1
fi
if [ ! "$time_start" -gt 0 ] 2>/dev/null; then
echo "waitfor_count_this: Unable to get start time!" >&2
exit 1
fi
if [ ! "$timeout" -gt 0 ] 2>/dev/null; then
echo "waitfor_count_this: Wrong timeout value or 0!" >&2
exit 1
fi
if [ "$timeout" -gt 3600 ] 2>/dev/null; then
echo "waitfor_count_this: Too long timeout used, can't be over 3600 seconds!" >&2
exit 1
fi
time_stop=$(( time_start + timeout ))
echo "waitfor_count_this: waiting for $file to contain $count counts of (timeout $timeout): $grep_string"
while true; do
count_found=`$GREP -- "$grep_string" "$file" 2>/dev/null | wc -l 2>/dev/null`
if [ "$count_found" -eq "$count" ] 2>/dev/null; then
return 0
fi
time_now=`$DATE '+%s' 2>/dev/null`
if [ "$time_now" -ge "$time_stop" ] 2>/dev/null; then
break
fi
if [ -z "$time_now" -o ! "$time_now" -lt "$time_stop" ] 2>/dev/null; then
echo "waitfor_count_this: Invalid timestamp from date!" >&2
exit 1
fi
sleep 2
done
return 1
}
grep_this ()
{
if [ -z "$1" -o -z "$2" ]; then
echo "usage: grep_this <file to grep> <grep string ...>" >&2
exit 1
fi
local file="$1"
local grep_string="$2"
if [ ! -f "$file" ]; then
echo "grep_this: No file to grep from!" >&2
exit 1
fi
echo "grep_this: greping in $file for: $grep_string"
$GREP -q -- "$grep_string" "$file" 2>/dev/null
}
grep_count_this ()
{
if [ -z "$1" -o -z "$2" -o -z "$3" ]; then
echo "usage: grep_count_this <file to grep> <count> <grep string ...>" >&2
exit 1
fi
local file="$1"
local count="$2"
local grep_string="$3"
local count_found
if [ ! -f "$file" ]; then
echo "grep_count_this: No file to grep from!" >&2
exit 1
fi
echo "grep_count_this: greping in $file, should find $count of: $grep_string"
count_found=`$GREP -- "$grep_string" "$file" 2>/dev/null | wc -l 2>/dev/null`
if [ "$count_found" -eq "$count" ] 2>/dev/null; then
return 0
fi
return 1
}
grep_count_this2 ()
{
if [ -z "$1" -o -z "$2" ]; then
echo "usage: grep_count_this2 <file to grep> <grep string ...>" >&2
exit 1
fi
local file="$1"
local grep_string="$2"
local count_found
if [ ! -f "$file" ]; then
echo "grep_count_this2: No file to grep from!" >&2
exit 1
fi
count_found=`$GREP -- "$grep_string" "$file" 2>/dev/null | wc -l 2>/dev/null | awk '{print $1}`
if [ "$count_found" -lt 0 ] 2>/dev/null; then
echo "grep_count_this2: Invalid count returned from wc -l '$count_found'" >&2
exit 1
fi
echo "$count_found"
return 0
}
|
import Vue from 'vue'
import 'normalize.css/normalize.css' // A modern alternative to CSS resets
import ElementUI from 'element-ui'
import 'element-ui/lib/theme-chalk/index.css'
import locale from 'element-ui/lib/locale/lang/zh-CN' // lang i18n
import '@/styles/index.scss' // global css
import App from './App'
import store from './store'
import router from './router'
import '@/icons' // icon
import '@/permission' // permission control
import echarts from 'echarts'
import '@/assets/ali_icons/iconfont.css'
import '@/assets/ali_svg/iconfont.js'
Vue.prototype.$echarts = echarts
import { baseUrl } from './const'
Vue.prototype.$baseUrl = baseUrl
// set ElementUI lang to EN
Vue.use(ElementUI, { locale })
import pdf from 'vue-pdf'
Vue.use(pdf)
Vue.config.productionTip = false
const vm = new Vue({
el: '#app',
router,
store,
render: h => h(App)
})
Date.prototype.format = function(fmt) {
var o = {
'M+': this.getMonth() + 1, //月份
'd+': this.getDate(), //日
'h+': this.getHours(), //小时
'm+': this.getMinutes(), //分
's+': this.getSeconds(), //秒
'q+': Math.floor((this.getMonth() + 3) / 3), //季度
S: this.getMilliseconds(), //毫秒
};
if (/(y+)/.test(fmt))
fmt = fmt.replace(
RegExp.$1,
(this.getFullYear() + '').substr(4 - RegExp.$1.length),
);
for (var k in o)
if (new RegExp('(' + k + ')').test(fmt))
fmt = fmt.replace(
RegExp.$1,
RegExp.$1.length == 1 ? o[k] : ('00' + o[k]).substr(('' + o[k]).length),
);
return fmt;
};
export default vm;
|
import { Component, OnInit } from '@angular/core';
import { ValidateService} from '../../services/validate.service';
import { AuthenticationService} from '../../services/authentication.service';
import { FlashMessagesService} from 'angular2-flash-messages';
import { Router} from '@angular/router';
@Component({
selector: 'app-register',
templateUrl: './register.component.html',
styleUrls: ['./register.component.css']
})
export class RegisterComponent implements OnInit {
fname: String;
lname: String;
uname: String;
email: String;
pword: String;
rpword: String;
constructor(
private valService: ValidateService,
private authService: AuthenticationService,
private FlashMsg: FlashMessagesService,
private router: Router
) { }
ngOnInit() {
if (!this.authService.loggedIn()) {
return true;
}else {
this.router.navigate(['/dashboard']);
}
}
onRegisterSubmit() {
const user = {
fname: this.fname,
lname: this.lname,
uname: this.uname,
email: this.email,
pword: this.pword,
rpword: this.rpword
};
if (!this.valService.valRegister(user)) {
this.FlashMsg.show('All Fields Are Required', {cssClass: 'msg-danger msg', timeout: 5000});
return false;
}
if (!this.valService.regEmailValidate(this.email)) {
this.FlashMsg.show('Invalid Email', {cssClass: 'msg-danger msg', timeout: 5000});
return false;
}
if (!this.valService.regPasswordValidate(this.pword)) {
this.FlashMsg.show('Password Not Strong Enough', {cssClass: 'msg-danger msg', timeout: 5000});
return false;
}
if (!this.valService.regRPasswordValidate(this.pword, this.rpword)) {
this.FlashMsg.show('Passwords Must Match', {cssClass: 'msg-danger msg', timeout: 5000});
return false;
}
this.authService.registerUser(user).subscribe(data => {
if (data.success) {
this.FlashMsg.show('Registration Complete. Go Login!', {cssClass: 'msg-accept msg', timeout: 5000});
this.router.navigate(['/login']);
}else {
this.FlashMsg.show('Registration Failed!!', {cssClass: 'msg-danger msg', timeout: 5000});
this.router.navigate(['/register']);
}
});
}
EmailVal() {
if (!this.valService.regEmailValidate(this.email)) {
this.FlashMsg.show('Invalid Email', {cssClass: 'msg-danger msg', timeout: 5000});
return false;
}/* else {
this.FlashMsg.show('Email Accepted!', {cssClass: 'msg-accept msg', timeout: 5000});
return true;
}*/
}
PasswordVal() {
if (!this.valService.regPasswordValidate(this.pword)) {
this.FlashMsg.show('Password Not Strong Enough', {cssClass: 'msg-danger msg', timeout: 5000});
return false;
}/* else {
this.FlashMsg.show('Password Accepted!!', {cssClass: 'msg-accept msg', timeout: 5000});
return true;
}*/
}
RPasswordVal() {
if (!this.valService.regRPasswordValidate(this.pword, this.rpword)) {
this.FlashMsg.show('Passwords Must Match', {cssClass: 'msg-danger msg', timeout: 5000});
return false;
} /*else {
this.FlashMsg.show('Password Are A Match!!', {cssClass: 'msg-accept msg', timeout: 5000});
return true;
}*/
}
}
|
<reponame>carlos-eduardo-dev/proffy
function removeField($event) {
const field = $event.target.parentNode
field.remove()
removeButtonClose()
}
function removeButtonClose() {
const fieldsContainer = document.querySelectorAll('.schedule-item')
if (fieldsContainer.length <= 1) {
const btn = document.querySelectorAll('.remove-time')
btn.forEach(function (btn) {
btn.style.display = 'none'
})
}
} |
package com.tuya.iot.suite.web.model.request.role;
import io.swagger.annotations.ApiModelProperty;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import lombok.ToString;
import java.io.Serializable;
/**
* @author mickey
* @date 2021年06月01日 15:00
*/
@NoArgsConstructor
@AllArgsConstructor
@Data
@Builder
public class RoleAddReq implements Serializable {
/**
*
*/
@ApiModelProperty("角色名称")
private String roleName;
/**
*
*/
@ApiModelProperty("角色类型 /admin /manager /normal")
private String roleType;
/**
*
*/
@ApiModelProperty("角色备注")
private String roleRemark;
}
|
package com.ceiba.reserva.modelo.dto;
import lombok.AllArgsConstructor;
import lombok.Getter;
import java.time.LocalDateTime;
@Getter
@AllArgsConstructor
public class DtoReserva {
private Long id;
private Long idCombo;
private double precioFinalReserva;
private LocalDateTime fechaCreacionReserva;
private LocalDateTime fechaReservacion;
private LocalDateTime fechaExpiracion;
private String nombrePersonaReserva;
private String idPersonaReserva;
private String telefonoPersonReserva;
private String direccionPersonaReserva;
}
|
<filename>src/helpers/firestore.helper.ts
import { DocumentData } from './../specifics/exports';
export function convertDataFromDb(data: DocumentData): DocumentData {
if (data) {
for (const key in data) {
// eslint-disable-next-line no-prototype-builtins
if (data.hasOwnProperty(key) && data[key]) {
if (typeof (data[key] as any).toDate === 'function') {
// attribute is a Firebase Timestamp
data[key] = (data[key] as any).toDate();
} else if (typeof (data[key] as any) === 'object') {
// attribute is an object or an array
if (Object.keys(data[key]).length > 0 && !isDocumentReference(data[key])) {
data[key] = convertDataFromDb(data[key]);
}
}
}
}
}
return data;
}
export function isDocumentReference(data: any): boolean {
return !!(data && data.id && data.parent && data.path && data.firestore);
}
|
<reponame>jamievullo/Tag-Sale
module ApplicationHelper
def sortable(column, title = nil)
title ||= column.titleize
direction = column == sort_column && sort_direction == "asc" ? "desc" : "asc"
link_to title, :sort => column, :direction => direction
end
def item_condition(item)
if item.new
"New"
else
"Used"
end
end
def image_present?(item)
if item.image.attached?
return image_tag(item.image, style:'width:30%')
end
end
def logo
return image_tag("tagsalelogo1.png", height:"50")
end
def render_navbar
if logged_in?
render 'layouts/logged_in_navbar'
else
render 'layouts/logged_out_navbar'
end
end
end
|
#!/bin/bash
make depend
cat env.sh
source env.sh
python3 -m pydnstest.testserver --scenario $(pwd)/tests/deckard_raw_id.rpl &
sleep 1
python3 -m ci.raw_id |
<gh_stars>10-100
// interface for emr.
var emrIf = function ($)
{
var source_type;
var source_is_image;
var target_type;
var target_is_image;
var is_debug = false;
var is_dragging = false;
this.init = function()
{
if ( emr_options.is_debug)
{
this.is_debug = true;
this.debug('EMR Debug is active');
}
$('input[name="timestamp_replace"]').on('change', $.proxy(this.checkCustomDate, this));
$('input[name="replace_type"]').on('change', $.proxy(this.showReplaceOptions, this));
$('input[name="userfile"]').on('change', $.proxy(this.handleImage, this));
// DragDrop
$('.wrap.emr_upload_form').on('dragover', $.proxy(this.dragOverArea, this));
$('.wrap.emr_upload_form').on('dragleave', $.proxy(this.dragOutArea, this));
$('.emr_drop_area').on('drop', $.proxy(this.fileDrop, this));
this.checkCustomDate();
this.loadDatePicker();
var source = $('.image_placeholder').first();
if (typeof( $(source).data('filetype') ) !== 'undefined')
{
source_type = $(source).data('filetype').trim();
this.debug('detected type - ' + source_type);
}
if (source.hasClass('is_image'))
{
source_is_image = true;
}
this.updateTextLayer(source, false);
this.showReplaceOptions();
}
this.loadDatePicker = function()
{
$('#emr_datepicker').datepicker({
dateFormat: emr_options.dateFormat,
onClose: function() {
var date = $(this).datepicker( 'getDate' );
if (date) {
var formattedDate = (date.getFullYear()) + "-" +
(date.getMonth()+1) + "-" +
date.getDate();
$('input[name="custom_date_formatted"]').val(formattedDate);
//$('input[name="custom_date"]').val($.datepicker.parseDate( emr_options.dateFormat, date));
}
},
});
},
this.checkCustomDate = function()
{
if ($('input[name="timestamp_replace"]:checked').val() == 3)
this.showCustomDate();
else
this.hideCustomDate();
},
this.showCustomDate = function()
{
$('.custom_date').css('visibility', 'visible').fadeTo(100, 1);
},
this.hideCustomDate = function()
{
$('.custom_date').fadeTo(100,0,
function ()
{
$('.custom_date').css('visibility', 'hidden');
});
}
this.handleImage = function(e)
{
this.toggleErrors(false);
var target = e.target;
var file = target.files[0];
if (! target.files || target.files.length <= 0) // FileAPI appears to be not present, handle files on backend.
{
if ($('input[name="userfile"]').val().length > 0)
this.checkSubmit();
console.log('FileAPI not detected');
return false;
}
var status = this.checkUpload(file);
this.debug('check upload status ' + status);
if (status)
{
this.updatePreview(file);
}
else {
this.updatePreview(null);
}
this.checkSubmit();
},
this.updatePreview = function(file)
{
var preview = $('.image_placeholder').last();
$(preview).find('img').remove();
$(preview).removeClass('is_image not_image is_document');
var is_empty = false;
if (file !== null) /// file is null when empty, or error
{
target_is_image = (file.type.indexOf('image') >= 0) ? true : false;
target_type = file.type.trim();
}
else
{
is_empty = true;
}
// If image, load thumbnail and get dimensions.
if (file && target_is_image)
{
var img = new Image();
img.src = window.URL.createObjectURL(file);
self = this;
img.setAttribute('style', 'max-width:100%; max-height: 100%;');
img.addEventListener("load", function () {
// with formats like svg it can be rough.
var width = img.naturalWidth;
var height = img.naturalHeight;
if (width == 0)
width = img.width;
if (height == 0)
height = img.height;
// $(preview).find('.textlayer').text(img.naturalWidth + ' x ' + img.naturalHeight );
self.updateTextLayer(preview, width + ' x ' + height);
});
$(preview).prepend(img);
$(preview).addClass('is_image');
}
else if(file === null)
{
$(preview).addClass('not_image');
$(preview).find('.dashicons').removeClass().addClass('dashicons dashicons-no');
//$(preview).find('.textlayer').text('');
this.updateTextLayer(preview, '');
this.debug('File is null');
}
else { // not an image
$(preview).addClass('not_image is_document');
$(preview).find('.dashicons').removeClass().addClass('dashicons dashicons-media-document');
//$(preview).find('.textlayer').text(file.name);
this.updateTextLayer(preview, file.name);
this.debug('Not image, media document');
}
if (! is_empty && target_type != source_type)
{
this.debug(target_type + ' not ' + source_type);
var falsePositive = this.checkFalsePositiveType(source_type, target_type);
if (! falsePositive)
this.warningFileType();
}
if (! is_empty && emr_options.allowed_mime.indexOf(target_type) == -1)
{
this.debug(target_type + ' not ' + ' in allowed types ');
var falsePositive = this.checkFalsePositiveType(source_type, target_type);
if (! falsePositive)
this.warningMimeType();
}
// this.debug(emr_options.allowed_mime);
}
this.checkFalsePositiveType = function(source_type, target_type)
{
// windows (sigh) reports application/zip as application/x-zip-compressed. Or something else, why not.
if (source_type.indexOf('zip') >= 0 && target_type.indexOf('zip') >= 0)
{
this.debug('Finding ' + source_type + ' ' + target_type + ' close enough, false positive');
return true;
}
return false;
}
// replace the text, check if text is there ( or hide ), and fix the layout.
this.updateTextLayer = function (preview, newtext)
{
textlayer = $(preview).find('.textlayer');
textlayer.css('opacity', '0');
if (newtext !== false)
textlayer.text(newtext);
if (textlayer.text() !== '')
{
textlayer.css('opacity', '0.7');
// textlayer.css('margin-left', '-' + (textlayer.width() / 2 ) + 'px');
}
},
this.checkSubmit = function()
{
var check = ($('input[name="userfile"]').val().length > 0) ? true : false;
if (check)
{
$('input[type="submit"]').prop('disabled', false);
}
else {
$('input[type="submit"]').prop('disabled', true);
}
},
this.toggleErrors = function(toggle)
{
$('.form-error').fadeOut();
$('.form-warning').fadeOut();
},
this.checkUpload = function(fileItem)
{
var maxsize = emr_options.maxfilesize;
if ($('input[name="userfile"]').val().length <= 0)
{
console.info('[EMR] - Upload file value not set in form. Pick a file');
$('input[name="userfile"]').val('');
return false;
}
if (fileItem.size > maxsize)
{
console.info('[EMR] - File too big for uploading - exceeds upload limits');
this.errorFileSize(fileItem);
$('input[name="userfile"]').val('');
return false;
}
return true;
},
this.errorFileSize = function(fileItem)
{
$('.form-error.filesize').find('.fn').text(fileItem.name);
$('.form-error.filesize').fadeIn();
}
this.warningFileType = function(fileItem)
{
$('.form-warning.filetype').fadeIn();
}
this.warningMimeType = function(fileItem)
{
$('.form-warning.mimetype').fadeIn();
}
this.debug = function(message)
{
console.debug(message);
}
this.showReplaceOptions = function(e)
{
$('section.options .location_option').hide();
var replace_option = $('input[name="replace_type"]:checked').val();
if (replace_option == 'replace_and_search')
{
$('section.options .location_option').show();
}
}
this.dragOverArea = function(e)
{
e.preventDefault();
e.stopPropagation();
if ( this.is_dragging)
return;
//this.debug('dragover');
//$('.emr_drop_area').css('border-color', '#83b4d8');
$('.emr_drop_area').addClass('drop_breakout');
this.is_dragging = true;
}
this.dragOutArea = function(e)
{
e.preventDefault();
e.stopPropagation();
// this.debug('dragout');
//$('.emr_drop_area').css('border-color', '#b4b9be');
$('.emr_drop_area').removeClass('drop_breakout');
this.is_dragging = false;
}
this.fileDrop = function (e)
{
var ev = e.originalEvent;
this.dragOutArea(e);
ev.preventDefault();
e.preventDefault();
if (ev.dataTransfer.items) {
// Use DataTransferItemList interface to access the file(s)
document.getElementById('userfile').files = ev.dataTransfer.files;
$('input[name="userfile"]').trigger('change');
}
}
} // emrIf
jQuery(document).ready(function($)
{
window.enableMediaReplace = new emrIf($);
window.enableMediaReplace.init();
});
function emrDelayedInit() {
console.log('Checking delayed init ');
if(typeof window.enableMediaReplace == "undefined") {
console.log(emrIf);
window.enableMediaReplace = new emrIf(jQuery);
window.enableMediaReplace.init();
}
else if (typeof window.enableMediaReplace !== 'undefined')
{
// All fine.
}
else { // Nothing yet, try again.
setTimeout(emrdelayedInit, 3000);
}
}
setTimeout(emrDelayedInit, 3000);
|
function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); }
/* THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. */
import * as React from 'react';
var SvgTurkey = React.forwardRef(function (props, svgRef) {
return React.createElement("svg", _extends({
width: "1em",
height: "1em",
viewBox: "0 0 40 32",
ref: svgRef
}, props), React.createElement("path", {
fill: "#be1e2d",
d: "M0 32h40V0H0z"
}), React.createElement("path", {
fill: "#fff",
d: "M19.187 14.589l.298-1.789 1.186 1.367 1.729-.219-.853 1.57.771 1.654-1.714-.397-1.253 1.241-.205-1.815-1.547-.887z"
}), React.createElement("path", {
fill: "#fff",
d: "M17.816 9.76a6.635 6.635 0 0 0-2.333-.42c-3.687 0-6.677 2.998-6.677 6.697s2.989 6.697 6.677 6.697c.821 0 1.607-.149 2.333-.42a7.965 7.965 0 0 1-5.004 1.76c-4.425 0-8.012-3.598-8.012-8.036s3.587-8.036 8.012-8.036c1.893 0 3.633.659 5.004 1.76z"
}), React.createElement("path", {
fill: "#2d2926",
opacity: 0.15,
d: "M38 30H2V2h36v28zm2-30H0v32h40V0z"
}));
});
export default SvgTurkey; |
import TaskView from '../views/taskView';
import TaskModel from '../models/taskModel';
import showAlert from '../helpers/showAlert';
import getElement from '../helpers/getElement';
import dragHandler from '../helpers/dragHandler';
import getElementAll from '../helpers/getElementAll';
import refactorIndex from '../helpers/refactorIndex';
export default () => ({
taskView: new TaskView(),
newDescription: '',
handleAddTask() {
getElement('.task-form').addEventListener('submit', (event) => {
event.preventDefault();
const description = getElement('.descInput').value;
const taskDescriptions = TaskModel.tasks.map((task) => task.description);
if (!description) {
showAlert("Task description can't be blank!", 'danger');
} else if (description === taskDescriptions.find((t) => t === String(description))) {
showAlert('Duplicate task descriptions not accepted!', 'danger');
} else {
const task = new TaskModel(description);
TaskModel.addTask(task);
getElement('.descInput').value = '';
window.location.reload();
}
});
},
handletoggleTaskStatus() {
getElementAll('.checkbox').forEach((checkbox) => {
checkbox.addEventListener('change', () => {
const task = TaskModel.tasks[Number(checkbox.getAttribute('data-id')) - 1];
TaskModel.toggleTaskStatus(task);
window.location.reload();
});
});
},
handleClearCompleted() {
getElement('.clear-button').addEventListener('click', () => {
if (TaskModel.tasks.filter((task) => task.complete === true).length > 0) {
const notComplete = TaskModel.tasks.filter((task) => task.complete === false);
refactorIndex(notComplete);
TaskModel.tasks = notComplete;
TaskModel.refreshStorage();
window.location.reload();
} else {
showAlert('No complete taks at the moment', 'danger');
}
});
},
handleEditTask() {
const editables = getElementAll('.editable');
editables.forEach((editable) => {
editable.addEventListener('input', (event) => {
this.newDescription = event.target.textContent;
});
});
editables.forEach((editable) => {
editable.addEventListener('focusout', (event) => {
if (this.newDescription) {
const task = TaskModel.tasks[Number(event.target.getAttribute('data-id')) - 1];
TaskModel.editTask(task, this.newDescription);
this.newDescription = '';
window.location.reload();
}
});
});
},
handleDeleteTask() {
document.querySelectorAll('.delete').forEach((delBtn) => {
delBtn.addEventListener('click', (event) => {
const tasks = [...TaskModel.tasks];
tasks.splice(Number(event.target.getAttribute('data-id')) - 1, 1);
refactorIndex(tasks);
TaskModel.tasks = tasks;
TaskModel.refreshStorage();
window.location.reload();
});
});
},
init() {
if (localStorage.getItem('tasks') === null) {
TaskModel.refreshStorage();
}
this.taskView.showTasks(TaskModel.tasks);
dragHandler(TaskModel.tasks);
this.handleAddTask();
this.handletoggleTaskStatus();
this.handleClearCompleted();
this.handleEditTask();
this.handleDeleteTask();
},
}); |
<gh_stars>1-10
package com.ielson.djiBote;
import android.content.Context;
import android.util.Log;
import android.widget.Toast;
import org.ros.message.MessageListener;
import org.ros.namespace.GraphName;
import org.ros.node.AbstractNodeMain;
import org.ros.node.ConnectedNode;
import org.ros.node.topic.Subscriber;
import dji.common.error.DJIError;
import dji.common.flightcontroller.virtualstick.FlightControlData;
import dji.common.util.CommonCallbacks;
import geometry_msgs.Twist;
import static com.ielson.djiBote.MainActivity.mFlightController;
public class CmdVelListener extends AbstractNodeMain {
private Context context;
public CmdVelListener(Context context) {
this.context = context;
}
@Override
public GraphName getDefaultNodeName() {
return GraphName.of("djiBote/cmdVelListener");
}
@Override
public void onStart(ConnectedNode connectedNode) {
Log.e("CMDVEL", "on Start");
Subscriber<geometry_msgs.Twist> subscriber = connectedNode.newSubscriber("/cmd_vel", Twist._TYPE);
subscriber.addMessageListener(new MessageListener<Twist>() {
@Override
public void onNewMessage(Twist twist) {
Log.e("CMDVEL", "new msg: " + twist.getAngular().getY() + " " + twist.getAngular().getX() + " " +twist.getAngular().getZ() + " " +twist.getLinear().getZ());
if (mFlightController != null) {
Log.e("CMDVEL", "sending virtual stick control data");
if (MainActivity.state == MainActivity.State.VIRTUALSTICKCOMPLETE) {
mFlightController.sendVirtualStickFlightControlData(
new FlightControlData(
(float) twist.getLinear().getY(), (float) twist.getLinear().getX(), (float) twist.getAngular().getZ(), (float) twist.getLinear().getZ()
), new CommonCallbacks.CompletionCallback() {
@Override
public void onResult(DJIError djiError) {
if (djiError != null) {
Log.e("CMDVEL", "djiError: " + djiError.getDescription());
} else {
Log.e("CMDVEL", "cmd sent");
}
}
}
);
}
}
}
});
}
}
|
<reponame>JeromeParadis/django-dynasite<filename>setup.py
from distutils.core import setup
setup(
name="django-dynasite",
version=__import__("dynasite").__version__,
description="Tools to dynamically manage multiple Django Web sites in a single app the way you need to.",
#long_description=open("docs/usage.txt").read(),
author="<NAME>",
author_email="<EMAIL>",
url="http://github.com/JeromeParadis/django-dynasite",
license='LICENSE.txt',
packages=[
"dynasite",
],
package_dir={"dynasite": "dynasite"},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Framework :: Django",
],
install_requires=[
"Django >= 1.4.1",
],
package_data={'dynasite': [] },
)
|
<?php
namespace Metaregistrar\EPP;
class eppCreateRequest extends eppRequest {
private $domainName;
private $registrantInfo;
function __construct() {
parent::__construct();
}
function __destruct() {
parent::__destruct();
}
public function setDomainName($domainName) {
$this->domainName = $domainName;
}
public function setRegistrantInfo($registrantInfo) {
$this->registrantInfo = $registrantInfo;
}
public function sendRequest() {
// Code to send the EPP request to the domain registrar's server
// This method should use the domain name and registrant information to construct and send the EPP request
}
} |
import React from 'react';
import { Editor, EditorState, RichUtils, Modifier } from 'draft-js';
class DocumentEditor extends React.Component {
constructor(props) {
super(props);
this.state = { editorState: EditorState.createEmpty() };
this.onChange = (editorState) => this.setState({ editorState });
this.handleKeyCommand = this.handleKeyCommand.bind(this);
this.save = this.save.bind(this);
this.print = this.print.bind(this);
}
handleKeyCommand(command, editorState) {
const newState = RichUtils.handleKeyCommand(editorState, command);
if (newState) {
this.onChange(newState);
return true;
}
return false;
}
save() {
const content = this.state.editorState.getCurrentContent();
alert('Document saved : ', JSON.stringify(convertToRaw(content)));
}
print() {
const content = this.state.editorState.getCurrentContent();
const html = convertToHTML(content);
printWindow = window.open('', '', 'height=400,width=600');
printWindow.document.write('<html><head><title>Print Document</title></head>');
printWindow.document.write('<body >');
printWindow.document.write(html);
printWindow.document.write('</body></html>');
printWindow.document.close();
printWindow.focus();
printWindow.print();
printWindow.close();
}
render() {
return (
<div>
<Editor editorState={this.state.editorState} handleKeyCommand={this.handleKeyCommand} onChange={this.onChange} />
<button onClick={this.save}>Save</button>
<button onClick={this.print}>Print</button>
</div>
);
}
}
export default DocumentEditor; |
package com.bv.eidss;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.support.v4.app.DialogFragment;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentPagerAdapter;
import android.support.v4.app.NavUtils;
import android.support.v4.view.PagerTabStrip;
import android.support.v4.view.ViewPager;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.View;
import android.widget.EditText;
import com.bv.eidss.data.EidssDatabase;
import com.bv.eidss.generated.Species_binding;
import com.bv.eidss.generated.ASSession_binding;
import com.bv.eidss.model.ASSession;
import com.bv.eidss.model.CaseSerializer;
import com.bv.eidss.model.CaseStatus;
import com.bv.eidss.model.CaseType;
import com.bv.eidss.model.ASSession;
import com.bv.eidss.model.interfaces.IFieldChanged;
import com.bv.eidss.model.interfaces.IGet;
import com.bv.eidss.model.interfaces.IToChange;
import com.bv.eidss.model.interfaces.ValidateCode;
import com.bv.eidss.model.interfaces.ValidateResult;
import com.bv.eidss.utils.DateHelpers;
import java.io.File;
import java.io.FileOutputStream;
import java.io.OutputStreamWriter;
import java.util.ArrayList;
import java.util.List;
import java.util.Vector;
public class ASSessionActivity extends EidssBaseBlockTimeoutActivity
implements EidssAndroidHelpers.DialogDoneListener,
EidssAndroidHelpers.DialogDoneDateListener,
IGet<Object>
{
private final int DELETE_DIALOG_ID = 1;
private final int SAVE_DIALOG_ID = 2;
public final int CANCEL_DIALOG_ID = 3;
public final int BACK_DIALOG_ID = 4;
private final int SYNCHRONIZE_SAVE_DIALOG_ID = 5;
private final int FILE_SAVE_DIALOG_ID = 6;
public ASSession mCase;
private DialogFragment mReturningWithResult;
//private TabHost mTabHost;
/* private class TabFactory implements TabHost.TabContentFactory {
private final Context mContext;
public TabFactory(Context context) {
mContext = context;
}
public View createTabContent(String tag) {
View v = new View(mContext);
v.setMinimumWidth(0);
v.setMinimumHeight(0);
return v;
}
}*/
private class MyFragmentPagerAdapter extends FragmentPagerAdapter {
private List<Fragment> fragments;
public MyFragmentPagerAdapter(FragmentManager fm, List<Fragment> fragments) {
super(fm);
this.fragments = fragments;
}
@Override
public Fragment getItem(int position) {
//if (position == 2)
// return SpeciesFragment.newInstance();
//return VetCaseFragment.newInstance(position);
return this.fragments.get(position);
}
@Override
public int getCount() {
return this.fragments.size();
}
@Override
public CharSequence getPageTitle(int position) {
switch (position) {
case 0:
return getResources().getString(R.string.ASSessionPage0);
case 1:
return getResources().getString(R.string.ASSessionPage1);
case 2:
return getResources().getString(R.string.ASSessionPage2);
case 3:
return getResources().getString(R.string.ASSessionPage3);
case 4:
return getResources().getString(R.string.ASSessionPage4);
default:
return "";
}
}
@Override
public int getItemPosition(Object item)
{
if (item instanceof ASSessionFragment && ((ASSessionFragment) item).ToChange)
{
((ASSessionFragment) item).ToChange = false;
return POSITION_NONE;
}
if (item instanceof IToChange && ((IToChange) item).ToChange())
{
((IToChange) item).setToChange(false);
return POSITION_NONE;
}
return super.getItemPosition(item);
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.case_main_layout);
View filter = findViewById(R.id.spinner_list_filter);
if (filter != null)
filter.setVisibility(View.GONE);
final Intent intent = getIntent();
if (savedInstanceState != null)
mCase = savedInstanceState.getParcelable("case");
else {
long id = intent.getLongExtra(getResources().getString(R.string.EXTRA_ID_ASSESSION), 0L);
// load from database full case - with all its lists
if (id != 0) {
EidssDatabase db = new EidssDatabase(this);
List<ASSession> ret = db.ASSessionSelect(id);
db.close();
if (ret.size() == 1){
mCase = ret.get(0);
} else{
mCase = null;
}
}
}
if (mCase == null) {
mCase = ASSession.CreateNew();
}
setTitle("");
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setIcon(R.drawable.eidss_ic_as_big);
getSupportActionBar().setDisplayShowHomeEnabled(true);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
// Intialise ViewPager
this.initializeViewPager();
/* // Initialise the TabHost
this.initialiseTabHost();
if (savedInstanceState != null) {
mTabHost.setCurrentTabByTag(savedInstanceState.getString("tab")); //set the tab as per the saved state
}*/
initializePagerTabStrip();
// business rules
final Activity _this = this;
mCase.setFieldChangedHandler(new IFieldChanged() {
@Override
public void FieldChanged(String name, Object oldVal, Object newVal) {
switch (name) {
}
}
});
}
protected void onSaveInstanceState(Bundle outState) {
//outState.putString("tab", mTabHost.getCurrentTabTag()); //save the tab selected
outState.putParcelable("case", mCase);
super.onSaveInstanceState(outState);
}
private void initializeViewPager() {
List<Fragment> fragments = new Vector<>();
fragments.add(ASSessionFragment.newInstance(0));
fragments.add(ASSessionFragment.newInstance(1));
fragments.add(ASDiseasesFragment.newInstance());
fragments.add(FarmsFragment.newInstance());
fragments.add(ASSamplesFragment.newInstance());
final ViewPager pager = (ViewPager) super.findViewById(R.id.pager);
pagerAdapter = new MyFragmentPagerAdapter(super.getSupportFragmentManager(), fragments);
pager.setAdapter(pagerAdapter);
//this.pager.setOnPageChangeListener(this);
pager.setOnPageChangeListener(new ViewPager.OnPageChangeListener() {
@Override
public void onPageSelected(int position) {
Log.d("ASSession", "onPageSelected, position = " + position);
}
@Override
public void onPageScrolled(int position, float positionOffset,
int positionOffsetPixels) {
}
@Override
public void onPageScrollStateChanged(int state) {
}
});
}
private MyFragmentPagerAdapter pagerAdapter;
private void initializePagerTabStrip() {
final PagerTabStrip title = (PagerTabStrip) findViewById(R.id.pagerTabStrip);
title.setTabIndicatorColor(getResources().getColor(R.color.CommonBackground));
title.setNonPrimaryAlpha(0.48f);
title.setTextSpacing(4);
title.setMinimumHeight(100);
}
public void ReloadReadOnlyTabs(){
if (!((ASSessionFragment)pagerAdapter.getItem(1)).ToChange || !((IToChange)pagerAdapter.getItem(2)).ToChange()) {
((ASSessionFragment) pagerAdapter.getItem(1)).ToChange = true;
((IToChange) pagerAdapter.getItem(2)).setToChange(true);
pagerAdapter.notifyDataSetChanged();
}
}
@Override
public void onBackPressed() {
if (mCase.getChanged())
EidssAndroidHelpers.AlertOkResultDialog.ShowQuestion(getSupportFragmentManager(), BACK_DIALOG_ID, R.string.ConfirmCancelCase);
else
FinishThis(RESULT_CANCELED);
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == getResources().getInteger(R.integer.ACTIVITY_ID_SYNCHRONIZE_CASE)) {
if (resultCode == Activity.RESULT_OK) {
final Intent intent = new Intent(this, ASSessionActivity.class);
intent.putExtra(getResources().getString(R.string.EXTRA_ID_ASSESSION), mCase.getId());
FinishThis(Activity.RESULT_OK);
startActivity(intent);
}
} else if (requestCode == getResources().getInteger(R.integer.FILE_BROWSER_MODE_SAVE)) {
String fullFilename = data.getStringExtra(getResources().getString(R.string.EXTRA_ID_FILENAME));
if (resultCode == Activity.RESULT_OK && !fullFilename.isEmpty()) {
SaveInFile(fullFilename);
}
}
super.onActivityResult(requestCode, resultCode, data);
}
@Override
protected void onResume() {
super.onResume();
if (mReturningWithResult != null) {
// Commit your transactions here.
mReturningWithResult.show(getSupportFragmentManager(), "dialog");
}
// Reset the boolean flag back to false for next time.
mReturningWithResult = null;
}
public void FinishThis(int result) {
if (result == Activity.RESULT_OK) {
getIntent().putExtra(getResources().getString(R.string.EXTRA_ID_ASSESSION), mCase);
}
setResult(result, getIntent());
mCase.setFieldChangedHandler(null);
//finish();
Intent intent = NavUtils.getParentActivityIntent(this);
intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(intent);
}
private void Synchronize() {
Intent intent = new Intent(this, SynchronizeCasesActivity.class);
intent.putExtra("Id", mCase.getId());
intent.putExtra("Type", R.integer.SYNCHRONIZATION_TYPE_ASSESSION);
startActivityForResult(intent, getResources().getInteger(R.integer.ACTIVITY_ID_SYNCHRONIZE_CASE));
}
private void SaveCase() {
EidssDatabase db = new EidssDatabase(this);
if (mCase.getId() == 0) {
db.ASSessionInsert(mCase);
} else {
if (mCase.getStatus() != CaseStatus.NEW)
mCase.setStatusChanged();
db.ASSessionUpdate(mCase);
}
db.close();
}
private void DeleteCase() {
EidssDatabase db = new EidssDatabase(this);
Long[] id = new Long[]{mCase.getId()};
db.ASSessionDelete(id);
db.close();
}
public void Save() {
if (ValidateCase()) {
if (mCase.getChanged())
EidssAndroidHelpers.AlertOkResultDialog.ShowQuestion(getSupportFragmentManager(), SAVE_DIALOG_ID, R.string.ConfirmSaveCase);
else
FinishThis(Activity.RESULT_OK);
}
}
public void OffLine() {
if (mCase.getChanged())
EidssAndroidHelpers.AlertOkResultDialog.ShowQuestion(getSupportFragmentManager(), FILE_SAVE_DIALOG_ID, R.string.ConfirmSaveSynchCase);
else
SaveToFile();
}
public void OnLine() {
if (mCase.getChanged())
EidssAndroidHelpers.AlertOkResultDialog.ShowQuestion(getSupportFragmentManager(), SYNCHRONIZE_SAVE_DIALOG_ID, R.string.ConfirmSaveSynchCase);
else
Synchronize();
}
public void Remove() {
int title = mCase.getMonitoringSession() == 0 ? R.string.ConfirmToDeleteNewSession : R.string.ConfirmToDeleteSynSession;
EidssAndroidHelpers.AlertOkResultDialog.ShowQuestion(getSupportFragmentManager(), DELETE_DIALOG_ID, title);
}
public void Home() {
if (mCase.getChanged())
EidssAndroidHelpers.AlertOkResultDialog.ShowQuestion(getSupportFragmentManager(), CANCEL_DIALOG_ID, R.string.ConfirmCancelCase);
else
FinishThis(Activity.RESULT_CANCELED);
}
private Boolean ValidateCase() {
ValidateResult vc = mCase.Validate(EidssDatabase.GetMandatoryFields(), EidssDatabase.GetInvisibleFields());
if (vc.getCode() == ValidateCode.OK)
return true;
switch (vc.getCode()) {
case FieldMandatory:
String errMes = String.format(getResources().getString(R.string.FieldMandatory), getResources().getString(vc.getMandatory()));
EidssAndroidHelpers.AlertOkDialog.Warning(getSupportFragmentManager(), errMes);
break;
case FieldMandatoryStr:
String errMess = String.format(getResources().getString(R.string.FieldMandatory), vc.getMandatoryStr());
EidssAndroidHelpers.AlertOkDialog.Warning(getSupportFragmentManager(), errMess);
break;
case DateOfSessionStartCheckSessionEnd:
EidssAndroidHelpers.AlertOkDialog.Warning(getSupportFragmentManager(), R.string.DateOfSessionStartCheckSessionEnd);
break;
default:
break;
}
return false;
}
private void SaveToFile() {
Intent intent = new Intent(this, FileBrowser.class);
int md = getResources().getInteger(R.integer.FILE_BROWSER_MODE_SAVE);
intent.putExtra("mode", md);
intent.putExtra("mask", "Case.eidss");
startActivityForResult(intent, md);
}
public void SaveInFile(String fullFilename) {
EidssDatabase db = new EidssDatabase(this);
try {
long country = db.GisCountry(DeploymentCountry.getDefCountry()).idfsBaseReference;
List<ASSession> ass = new ArrayList<>();
ass.add(mCase);
String content = CaseSerializer.writeXml(null, null, ass, country, true);
File file = new File(fullFilename);
FileOutputStream filecon = new FileOutputStream(file);
OutputStreamWriter writer = new OutputStreamWriter(filecon);
writer.write(content);
writer.close();
filecon.close();
if (mCase.getStatus() == CaseStatus.NEW || mCase.getStatus() == CaseStatus.CHANGED) {
mCase.setStatusUploaded();
db.ASSessionUpdate(mCase);
}
db.close();
mReturningWithResult = EidssAndroidHelpers.AlertOkDialog.Show(R.string.CasesUnloaded);
} catch (Exception e) {
db.close();
mReturningWithResult = EidssAndroidHelpers.AlertOkDialog.Warning(R.string.ErrorCasesUnloaded);
}
}
private Fragment GetFragment(int fragmentPosition) {
FragmentManager fm = getSupportFragmentManager();
return fm.findFragmentByTag(getFragmentTag(R.id.pager, fragmentPosition));
}
private static String getFragmentTag(int viewPagerId, int fragmentPosition) {
return "android:switcher:" + viewPagerId + ":" + fragmentPosition;
}
@Override
public void onDone(int idDialog, boolean isPositive) {
switch (idDialog) {
case CANCEL_DIALOG_ID:
if (isPositive) {
FinishThis(RESULT_CANCELED);
}
break;
case BACK_DIALOG_ID:
if (isPositive) {
FinishThis(RESULT_CANCELED);
}
break;
case DELETE_DIALOG_ID:
if (isPositive) {
DeleteCase();
FinishThis(Activity.RESULT_FIRST_USER + 1);
}
break;
case SAVE_DIALOG_ID:
if (isPositive) {
SaveCase();
FinishThis(Activity.RESULT_OK);
}
break;
case SYNCHRONIZE_SAVE_DIALOG_ID:
if (!isPositive) {
Synchronize();
} else if (ValidateCase()) {
SaveCase();
Synchronize();
}
break;
case FILE_SAVE_DIALOG_ID:
if (!isPositive) {
SaveToFile();
} else if (ValidateCase()) {
SaveCase();
SaveToFile();
}
break;
case R.string.SessionPeriodDoesNotMatchCampaignPeriod:
case R.string.SessionContainsSpeciesAbsentInCampaign:
((ASSessionFragment) GetFragment(0)).CampaignLookupSetSelection();
break;
case R.string.SessionDiseasesDiffersFromCampaignDiseases:
case R.string.CampaignDiseasesListIsBlank:
if (isPositive) {
((ASSessionFragment) GetFragment(0)).SetCampaign();
} else {
((ASSessionFragment) GetFragment(0)).CampaignLookupSetSelection();
}
break;
}
}
@Override
public void onDone(int idDialog, int year, int month, int day) {
switch (idDialog) {
/*place snippet here!*/
case ASSession_binding.datEndDate_DialogID:
mCase.setEndDate(DateHelpers.Date(year, month, day));
DateHelpers.DisplayDate(R.id.datEndDate, this, mCase.getEndDate());
break;
case ASSession_binding.datStartDate_DialogID:
mCase.setStartDate(DateHelpers.Date(year, month, day));
DateHelpers.DisplayDate(R.id.datStartDate, this, mCase.getStartDate());
break;
/*end of place snippet here!*/
}
}
@Override
public Object get() {
return mCase;
}
}
|
<gh_stars>0
#ifndef _SIRIKIATA_IO_UTIL_HH_
#define _SIRIKIATA_IO_UTIL_HH_
#ifndef _WIN32
#include <unistd.h>
#include <sys/errno.h>
#include <sys/stat.h>
#include <fcntl.h>
#endif
#include "../vp8/util/nd_array.hh"
#include "MuxReader.hh"
namespace Sirikata {
class DecoderReader;
class DecoderWriter;
}
namespace IOUtil {
//#ifdef _WIN32
// typedef void* HANDLE_or_fd;
//#else
typedef int HANDLE_or_fd;
//#endif
inline Sirikata::uint32 ReadFull(Sirikata::DecoderReader * reader, void * vdata, Sirikata::uint32 size) {
using namespace Sirikata;
unsigned char * data = reinterpret_cast<unsigned char*>(vdata);
uint32 copied = 0;
while (copied < size) {
std::pair<Sirikata::uint32, Sirikata::JpegError> status = reader->Read(data + copied, size - copied);
copied += status.first;
if (status.second != JpegError::nil() || status.first == 0) {
return copied;
}
}
return copied;
}
class FileReader : public Sirikata::DecoderReader {
int fp;
uint32_t total_read;
uint32_t max_read;
bool is_fd_socket;
public:
FileReader(int ff, int max_read_allowed, bool is_socket) {
fp = ff;
this->is_fd_socket = is_socket;
total_read = 0;
max_read = max_read_allowed;
}
bool is_socket()const {
return is_fd_socket;
}
std::pair<Sirikata::uint32, Sirikata::JpegError> Read(Sirikata::uint8*data, unsigned int size) {
if (max_read && total_read + size > max_read) {
size = max_read - total_read;
if (size == 0) {
return std::pair<Sirikata::uint32,
Sirikata::JpegError>(0, Sirikata::JpegError::errEOF());
}
}
using namespace Sirikata;
do {
signed long nread = read(fp, data, size);
if (nread <= 0) {
if (errno == EINTR) {
continue;
}
return std::pair<Sirikata::uint32, JpegError>(0, MakeJpegError("Short read"));
}
total_read += nread;
return std::pair<Sirikata::uint32, JpegError>(nread, JpegError::nil());
} while(true); // while not EINTR
}
unsigned int bound() const {
return max_read;
}
size_t length() {
return total_read;
}
size_t getsize() {
return total_read;
}
int get_fd() const {
return fp;
}
void mark_some_bytes_already_read(uint32_t num_bytes) {
total_read += num_bytes;
}
};
class FileWriter : public Sirikata::DecoderWriter {
int fp;
int total_written;
bool close_stream;
bool is_fd_socket;
public:
FileWriter(int ff, bool do_close_stream, bool is_fd_socket) {
this->is_fd_socket = is_fd_socket;
fp = ff;
total_written = 0;
close_stream = do_close_stream;
}
bool is_socket() const {
return is_fd_socket;
}
void Close() {
if (close_stream) {
while (close(fp) == -1 && errno == EINTR){}
// not always useful (eg during SECCOMP)
}
fp = -1;
}
std::pair<Sirikata::uint32, Sirikata::JpegError> Write(const Sirikata::uint8*data, unsigned int size) {
using namespace Sirikata;
size_t data_written = 0;
while (data_written < size) {
signed long nwritten = write(fp, data + data_written, size - data_written);
if (nwritten <= 0) {
if (errno == EINTR) {
continue;
}
// The size_t -> Sirikata::uint32 cast is safe because sizeof(size) is <= sizeof(Sirikata::uint32)
return std::pair<Sirikata::uint32, JpegError>(static_cast<Sirikata::uint32>(data_written), JpegError::errShortHuffmanData());
}
data_written += nwritten;
}
total_written += size;
return std::pair<Sirikata::uint32, JpegError>(size, JpegError::nil());
}
size_t getsize() {
return total_written;
}
int get_fd() const {
return fp;
}
};
//SIRIKATA_FUNCTION_EXPORT FileReader * OpenFileOrPipe(const char * filename, int is_pipe, int max_size_read);
//SIRIKATA_FUNCTION_EXPORT FileWriter * OpenWriteFileOrPipe(const char * filename, int is_pipe);
SIRIKATA_FUNCTION_EXPORT FileReader * BindFdToReader(int fd, uint32_t max_size_read, bool is_socket);
SIRIKATA_FUNCTION_EXPORT FileWriter * BindFdToWriter(int fd, bool is_socket);
Sirikata::Array1d<uint8_t, 16> send_and_md5_result(const uint8_t *data,
size_t data_size,
HANDLE_or_fd send_to_subprocess,
HANDLE_or_fd recv_from_subprocess,
size_t *output_size);
// returns the md5sum of the input and tee'd input stores the output in the ResizableByteBuffer
Sirikata::Array1d<uint8_t, 16> transfer_and_md5(Sirikata::Array1d<uint8_t, 2> header,
size_t start_byte,
size_t end_byte,
bool send_header,
int input, HANDLE_or_fd input_tee,
HANDLE_or_fd output, size_t *input_size,
Sirikata::MuxReader::ResizableByteBuffer *stored_outpt,
bool is_socket);
struct SubprocessConnection {
HANDLE_or_fd pipe_stdin;
HANDLE_or_fd pipe_stdout;
HANDLE_or_fd pipe_stderr;
int sub_pid;
};
SubprocessConnection start_subprocess(int argc, const char **argv, bool pipe_stder, bool stderr_to_nul=false);
}
#endif
|
// https://open.kattis.com/problems/rockpaperscissors
#include <iomanip>
#include <iostream>
#include <vector>
using namespace std;
typedef vector<int> vi;
int main() {
bool first = true;
cout << fixed << setprecision(3);
while (true) {
int n, k;
cin >> n;
if (!n) break;
cin >> k;
vi w(n, 0);
vi l(n, 0);
for (int i = 0; i < k * n * (n - 1) / 2; i++) {
int p1, p2;
string m1, m2;
cin >> p1 >> m1 >> p2 >> m2;
if (m1 == m2) continue;
else if (m1 == "paper" && m2 == "rock") {
w[p1 - 1]++;
l[p2 - 1]++;
} else if (m1 == "rock" && m2 == "paper") {
w[p2 - 1]++;
l[p1 - 1]++;
} else if (m1 == "scissors" && m2 == "paper") {
w[p1 - 1]++;
l[p2 - 1]++;
} else if (m1 == "paper" && m2 == "scissors") {
w[p2 - 1]++;
l[p1 - 1]++;
} else if (m1 == "rock" && m2 == "scissors") {
w[p1 - 1]++;
l[p2 - 1]++;
} else if (m1 == "scissors" && m2 == "rock") {
w[p2 - 1]++;
l[p1 - 1]++;
}
}
if (first) first = false;
else cout << endl;
for (int i = 0; i < n; i++) {
if (!(w[i] + l[i])) cout << "-";
else cout << double(w[i]) / (w[i] + l[i]);
cout << endl;
}
}
}
|
#!/bin/bash
# URL of the Launchpad mirror list
MIRROR_LIST=https://launchpad.net/ubuntu/+archivemirrors
# Set to the architecture you're looking for (e.g., amd64, i386, arm64, armhf, armel, powerpc, ...).
# See https://wiki.ubuntu.com/UbuntuDevelopment/PackageArchive#Architectures
ARCH=$1
# Set to the Ubuntu distribution you need (e.g., precise, saucy, trusty, ...)
# See https://wiki.ubuntu.com/DevelopmentCodeNames
DIST=$2
# Set to the repository you're looking for (main, restricted, universe, multiverse)
# See https://help.ubuntu.com/community/Repositories/Ubuntu
REPO=$3
mirrorList=()
# First, we retrieve the Launchpad mirror list, and massage it to obtain a newline-separated list of HTTP mirrors
for url in $(curl -s $MIRROR_LIST | ggrep -Po 'http://.*(?=">http</a>)'); do
mirrorList+=( "$url" )
done
for url in "${mirrorList[@]}"; do
(
# If you like some output while the script is running (feel free to comment out the following line)
echo "Processing $url..."
# retrieve the header for the URL $url/dists/$DIST/$REPO/binary-$ARCH/; check if status code is of the form 2.. or 3..
if curl --connect-timeout 1 -m 1 -s --head "$url/dists/$DIST/$REPO/binary-$ARCH/" | head -n 1 | grep -q "HTTP/1.[01] [23]..";
then
echo "FOUND: $url"
fi
) &
done
wait
echo "All done!"
|
<gh_stars>0
# == Schema Information
#
# Table name: steps
#
# id :bigint not null, primary key
# job_id :bigint not null
# name :string(255) not null
# start_at :datetime
# end_at :datetime
# created_at :datetime not null
# updated_at :datetime not null
#
# Indexes
#
# index_steps_on_job_id (job_id)
#
class Step < ApplicationRecord
belongs_to :job
has_many :transitions, class_name: "StepTransition", autosave: false
validates_presence_of :name
validates_presence_of :start_at, if: Proc.new { |step| step.end_at.present? }
delegate :can_transition_to?, :current_state, :history, :last_transition, :transition_to!, :transition_to, :in_state?, to: :state_machine
def state_machine
@state_machine ||= StepStateMachine.new(
self, transition_class: StepTransition, association_name: :transitions
)
end
end
|
'use strict';
jest.autoMockOff();
const getProjectDependencies = require('../getProjectDependencies');
const path = require('path');
describe('getProjectDependencies', () => {
it('should return an array of project dependencies', () => {
jest.setMock(
path.join(process.cwd(), './package.json'),
{ dependencies: { lodash: '^6.0.0', 'react-native': '^16.0.0' }}
);
expect(getProjectDependencies()).toEqual(['lodash']);
});
it('should return an empty array when no dependencies set', () => {
jest.setMock(path.join(process.cwd(), './package.json'), {});
expect(getProjectDependencies()).toEqual([]);
});
});
|
#!/system/bin/sh
# cpapk: A command-line APK 'extraction' tool that copies a desired application to /sdcard for Android devices.
# Author: Sativa (https://github.com/suhtiva)
# Source: https://github.com/suhtiva/cpapk/
if [[ $1 != "" ]]; then
echo "Attempting to copy app '$1' to /sdcard/$1.apk"
# Grab package
package="$(pm list packages | grep -i $1 | awk -F ':' '{print $2}')"
if [[ $package != "" ]]; then
echo "Package found: $package"
# Grab APK path
apk_file="$(pm path $package | awk -F ':' '{print $2}' | grep -i '==/base.apk')"
if [[ $apk_file != "" ]]; then
echo "APK file found: $apk_file"
# Copy APK file to /sdcard
echo "Copying $apk_file to /sdcard/$1.apk"
cp $apk_file "/sdcard/$1.apk"
echo "Copy successful"
else
echo "APK file could not be located"
fi
else
echo "Could not find package for $1, searching through System apps"
# Grab directory from /system/app
apk_file="$(ls /system/app | grep -i $1)"
if [[ $apk_file != "" ]]; then
echo "System APK file found: $apk_file.apk"
# Copy APK file to /sdcard
echo "Copying /system/app/$apk_file/$apk_file.apk to /sdcard/$apk_file.apk"
cp /system/app/$apk_file/$apk_file.apk /sdcard/
echo "Copy successful"
else
echo "System APK file could not be located"
fi
fi
echo "Attempt completed"
else
echo "usage: cpapk app_name"
fi |
import { IExchangeratesapiParams, IExchangeratesapiRates, IExchangeratesapiTimeseriesRates } from "@ittkm/exchangeratesapi";
export declare type IExchangeratesapiOldParams = IExchangeratesapiParams;
export interface IExchangeratesapiOldResponse {
base: string;
date: string;
rates: IExchangeratesapiRates;
}
export interface IExchangeratesapiOldHistoryParams {
start_at: string;
end_at: string;
base?: string;
symbols?: string | string[];
}
export interface IExchangeratesapiOldHistoryResponse {
base: string;
rates: IExchangeratesapiTimeseriesRates;
}
export declare class exchangeratesapiWrapper {
constructor(ACCESS_KEY: string);
latest(params?: IExchangeratesapiOldParams): Promise<IExchangeratesapiOldResponse>;
historical(params: IExchangeratesapiOldParams): Promise<IExchangeratesapiOldResponse>;
history(params: IExchangeratesapiOldHistoryParams): Promise<IExchangeratesapiOldHistoryResponse>;
}
export default exchangeratesapiWrapper;
|
package com.yohan.espressotest;
import android.content.Context;
import android.graphics.Bitmap;
import android.net.Uri;
import android.os.Build;
import android.util.Log;
import android.view.View;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
public class ScreenShooter {
private static final String TAG = ScreenShooter.class.getSimpleName();
private static final float HIGH_DENSITY_THRESHOLD = 2.5f;
private static final float MEDIUM_DENSITY_THRESHOLD = 1.8f;
private Context context;
public ScreenShooter(Context context) {
this.context = context;
}
/**
* Creates a PNG image on disk with the specified view
* @param view the view
* @return a {@link Uri} to the location of the image on disk
*/
public Uri shoot(View view) {
if (!view.isDrawingCacheEnabled()) {
view.setDrawingCacheEnabled(true);
//noinspection ResourceType
view.setDrawingCacheQuality(determineQualityOfDrawingCache());
}
Bitmap bitmap = view.getDrawingCache();
Uri location = convertBitmapToPng(bitmap);
return location;
}
private Uri convertBitmapToPng(Bitmap bitmap) {
File screenshot = new File(getCacheDir(), "test-screenshot.png");
try {
FileOutputStream fos = new FileOutputStream(screenshot);
bitmap.compress(Bitmap.CompressFormat.PNG, 0, fos);
fos.flush();
fos.close();
Log.d(TAG, "location of screenshot: " + screenshot.getAbsolutePath());
return Uri.fromFile(screenshot);
} catch (IOException e) {
Log.e(TAG, "Unable to write bitmap data to file",e);
return null;
}
}
private String getCacheDir() {
String cacheDir;
if (Build.VERSION.SDK_INT >= 23) {
cacheDir = context.getFilesDir().getAbsolutePath();
}
else {
cacheDir = context.getExternalCacheDir().getAbsolutePath();;
}
new File(cacheDir).mkdirs();
return cacheDir;
}
private int determineQualityOfDrawingCache() {
float density = context.getResources().getDisplayMetrics().density;
if (density > HIGH_DENSITY_THRESHOLD) {
return View.DRAWING_CACHE_QUALITY_HIGH;
}
else if (density > MEDIUM_DENSITY_THRESHOLD){
return View.DRAWING_CACHE_QUALITY_AUTO;
}
else {
return View.DRAWING_CACHE_QUALITY_LOW;
}
}
}
|
<gh_stars>1-10
// This file is part of SWGANH which is released under the MIT license.
// See file LICENSE or go to http://swganh.com/LICENSE
#pragma once
#include <cstdint>
#include "swganh/byte_buffer.h"
#include "base_swg_message.h"
namespace swganh {
namespace messages {
struct ParametersMessage : public BaseSwgMessage
{
uint16_t Opcount() const { return 2; }
uint32_t Opcode() const { return 0x487652DA; }
uint32_t parameter_flag; // default: 900 = decimal, 384 = hex, 1110000100 = binary
void OnSerialize(swganh::ByteBuffer& buffer) const
{
buffer.write(parameter_flag);
}
void OnDeserialize(swganh::ByteBuffer& buffer)
{
parameter_flag = buffer.read<uint32_t>();
}
};
}} // namespace swganh::messages
|
// Calculate the total number of pages
const totalPages = Math.ceil(this.objPagination.length / this.objPagination.pageSize);
// Generate pagination controls
const paginationContainer = document.getElementById('pagination-container');
paginationContainer.innerHTML = ''; // Clear previous pagination controls
for (let i = 1; i <= totalPages; i++) {
const pageButton = document.createElement('button');
pageButton.textContent = i;
pageButton.addEventListener('click', () => {
// Fetch data for the selected page and update UI
const offset = (i - 1) * this.objPagination.pageSize;
const nextPageData = fetchDataForPage(offset, this.objPagination.pageSize); // Assume a function fetchDataForPage(offset, limit) to fetch data
this.dataService.listData(nextPageData);
});
paginationContainer.appendChild(pageButton);
} |
#!/bin/bash
echo ""
echo "Applying migration CountryOfEstablishmentFromEu"
echo "Adding routes to conf/app.routes"
echo "" >> ../conf/app.routes
echo "GET /:period/countryOfEstablishmentFromEu controllers.CountryOfEstablishmentFromEuController.onPageLoad(mode: Mode = NormalMode, period: Period)" >> ../conf/app.routes
echo "POST /:period/countryOfEstablishmentFromEu controllers.CountryOfEstablishmentFromEuController.onSubmit(mode: Mode = NormalMode, period: Period)" >> ../conf/app.routes
echo "GET /:period/changeCountryOfEstablishmentFromEu controllers.CountryOfEstablishmentFromEuController.onPageLoad(mode: Mode = CheckMode, period: Period)" >> ../conf/app.routes
echo "POST /:period/changeCountryOfEstablishmentFromEu controllers.CountryOfEstablishmentFromEuController.onSubmit(mode: Mode = CheckMode, period: Period)" >> ../conf/app.routes
echo "Adding messages to conf.messages"
echo "" >> ../conf/messages.en
echo "countryOfEstablishmentFromEu.title = countryOfEstablishmentFromEu" >> ../conf/messages.en
echo "countryOfEstablishmentFromEu.heading = countryOfEstablishmentFromEu" >> ../conf/messages.en
echo "countryOfEstablishmentFromEu.option1 = Option 1" >> ../conf/messages.en
echo "countryOfEstablishmentFromEu.option2 = Option 2" >> ../conf/messages.en
echo "countryOfEstablishmentFromEu.checkYourAnswersLabel = countryOfEstablishmentFromEu" >> ../conf/messages.en
echo "countryOfEstablishmentFromEu.error.required = Select countryOfEstablishmentFromEu" >> ../conf/messages.en
echo "countryOfEstablishmentFromEu.change.hidden = CountryOfEstablishmentFromEu" >> ../conf/messages.en
echo "Adding to UserAnswersEntryGenerators"
awk '/trait UserAnswersEntryGenerators/ {\
print;\
print "";\
print " implicit lazy val arbitraryCountryOfEstablishmentFromEuUserAnswersEntry: Arbitrary[(CountryOfEstablishmentFromEuPage.type, JsValue)] =";\
print " Arbitrary {";\
print " for {";\
print " page <- arbitrary[CountryOfEstablishmentFromEuPage.type]";\
print " value <- arbitrary[CountryOfEstablishmentFromEu].map(Json.toJson(_))";\
print " } yield (page, value)";\
print " }";\
next }1' ../test/generators/UserAnswersEntryGenerators.scala > tmp && mv tmp ../test/generators/UserAnswersEntryGenerators.scala
echo "Adding to PageGenerators"
awk '/trait PageGenerators/ {\
print;\
print "";\
print " implicit lazy val arbitraryCountryOfEstablishmentFromEuPage: Arbitrary[CountryOfEstablishmentFromEuPage.type] =";\
print " Arbitrary(CountryOfEstablishmentFromEuPage)";\
next }1' ../test/generators/PageGenerators.scala > tmp && mv tmp ../test/generators/PageGenerators.scala
echo "Adding to ModelGenerators"
awk '/trait ModelGenerators/ {\
print;\
print "";\
print " implicit lazy val arbitraryCountryOfEstablishmentFromEu: Arbitrary[CountryOfEstablishmentFromEu] =";\
print " Arbitrary {";\
print " Gen.oneOf(CountryOfEstablishmentFromEu.values.toSeq)";\
print " }";\
next }1' ../test/generators/ModelGenerators.scala > tmp && mv tmp ../test/generators/ModelGenerators.scala
echo "Adding to UserAnswersGenerator"
awk '/val generators/ {\
print;\
print " arbitrary[(CountryOfEstablishmentFromEuPage.type, JsValue)] ::";\
next }1' ../test/generators/UserAnswersGenerator.scala > tmp && mv tmp ../test/generators/UserAnswersGenerator.scala
echo "Migration CountryOfEstablishmentFromEu completed"
|
<filename>packages/vue/src/components/organisms/SfProductCard/SfProductCard.spec.ts
import { shallowMount } from "@vue/test-utils";
import SfProductCard from "@/components/organisms/SfProductCard/SfProductCard.vue";
const title = "Product A";
const wishlistIconButtonClass = ".sf-product-card__wishlist-icon";
const clickEventName = "click:wishlist";
describe("SfProductCard.vue", () => {
it("renders Product Card", () => {
const component = shallowMount(SfProductCard, {
propsData: {
title
}
});
expect(component.exists()).toBe(true);
expect(component.isVueInstance()).toBe(true);
expect(component.isVisible()).toBe(true);
});
});
describe("SfProductCard.vue: Wish list icon button", () => {
it("renders Product Card", () => {
const component = shallowMount(SfProductCard, {
propsData: {
title
}
});
expect(component.exists()).toBe(true);
expect(component.isVueInstance()).toBe(true);
expect(component.isVisible()).toBe(true);
});
it("has correct CSS class for container", () => {
const component = shallowMount(SfProductCard, {
propsData: {
title
}
});
expect(component.classes()).toContain("sf-product-card");
});
it("has default slot content when no custom content for wish list icon is passed", () => {
const component = shallowMount(SfProductCard, {
propsData: {
title
}
});
const wishlistIconButton = component.find(wishlistIconButtonClass);
const sfIconForWishlist = wishlistIconButton.find("sficon-stub");
expect(sfIconForWishlist.exists()).toBe(true);
});
it("has default wish list icon when none is passed (isOnWishlist=false)", () => {
const component = shallowMount(SfProductCard, {
propsData: {
title
}
});
const defaultWishlistIcon = component.props().wishlistIcon;
const wishlistIconButton = component.find(wishlistIconButtonClass);
const sfIconForWishlist = wishlistIconButton.find("sficon-stub");
const iconInSfIconForWishlist = sfIconForWishlist.props().icon;
expect(iconInSfIconForWishlist).toBe(defaultWishlistIcon);
});
it("has default wish list icon when none is passed (isOnWishlist=true)", () => {
const component = shallowMount(SfProductCard, {
propsData: {
title,
isOnWishlist: true
}
});
const defaultIsOnWishlistIcon = component.props().isOnWishlistIcon;
const wishlistIconButton = component.find(wishlistIconButtonClass);
const sfIconForWishlist = wishlistIconButton.find("sficon-stub");
const iconInSfIconForWishlist = sfIconForWishlist.props().icon;
expect(iconInSfIconForWishlist).toBe(defaultIsOnWishlistIcon);
});
it("has custom wish list icon when SVG path is passed (isOnWishlist=false)", () => {
const customWishlistIconPath =
"M6.04545 0L7 1.12025L1.92207 7L7 12.8798L6.04545 14L0 6.99994L6.04545 0Z";
const component = shallowMount(SfProductCard, {
propsData: {
title,
wishlistIcon: customWishlistIconPath
}
});
const wishlistIconButton = component.find(wishlistIconButtonClass);
const sfIconForWishlist = wishlistIconButton.find("sficon-stub");
const iconInSfIconForWishlist = sfIconForWishlist.props().icon;
expect(iconInSfIconForWishlist).toBe(customWishlistIconPath);
});
it("has custom wish list icon when SVG path is passed (isOnWishlist=true)", () => {
const customIsOnWishlistIconPath =
"M0.954545 14L0 12.8798L5.07793 7L0 1.12024L0.954545 0L7 7.00006L0.954545 14Z";
const component = shallowMount(SfProductCard, {
propsData: {
title,
isOnWishlistIcon: customIsOnWishlistIconPath,
isOnWishlist: true
}
});
const wishlistIconButton = component.find(wishlistIconButtonClass);
const sfIconForWishlist = wishlistIconButton.find("sficon-stub");
const iconInSfIconForWishlist = sfIconForWishlist.props().icon;
expect(iconInSfIconForWishlist).toBe(customIsOnWishlistIconPath);
});
it("has no wish list button when wishlistIcon is false (isOnWishlist=false)", () => {
const component = shallowMount(SfProductCard, {
propsData: {
title,
wishlistIcon: false
}
});
const wishlistIconButton = component.find(wishlistIconButtonClass);
expect(wishlistIconButton.exists()).toBe(false);
});
it("has no wish list button when wishlistIcon is false (isOnWishlist=true)", () => {
const component = shallowMount(SfProductCard, {
propsData: {
title,
wishlistIcon: false,
isOnWishlist: true
}
});
const wishlistIconButton = component.find(wishlistIconButtonClass);
expect(wishlistIconButton.exists()).toBe(false);
});
it("has custom slot content when slot is used", () => {
const customSlotContentText = "Wish List";
const customSlotContent = `<b>${customSlotContentText}</b>`;
const component = shallowMount(SfProductCard, {
propsData: {
title
},
slots: {
"wishlist-icon": customSlotContent
}
});
const wishlistIconButton = component.find(wishlistIconButtonClass);
const sfIconForWishlist = wishlistIconButton.find("sficon-stub");
expect(sfIconForWishlist.exists()).toBe(false);
expect(wishlistIconButton.text()).toBe(customSlotContentText);
});
it("emits click:wishlist event on button click", () => {
const component = shallowMount(SfProductCard, {
propsData: {
title
}
});
const wishlistIconButton = component.find(wishlistIconButtonClass);
wishlistIconButton.trigger("click");
expect(component.emitted()[clickEventName].length).toBe(1);
});
it("emits click:wishlist event with payload=true on button click when isOnWishlist=false", () => {
const component = shallowMount(SfProductCard, {
propsData: {
title
}
});
const wishlistIconButton = component.find(wishlistIconButtonClass);
wishlistIconButton.trigger("click");
expect(component.emitted()[clickEventName][0][0]).toBe(true);
});
it("emits click:wishlist event with payload=false on button click when isOnWishlist=true", () => {
const component = shallowMount(SfProductCard, {
propsData: {
title,
isOnWishlist: true
}
});
const wishlistIconButton = component.find(wishlistIconButtonClass);
wishlistIconButton.trigger("click");
expect(component.emitted()[clickEventName][0][0]).toBe(false);
});
});
|
import * as gamify from '../../../src/actions/gamify'
import * as types from '../../../src/actions/types'
import * as faker from 'faker'
describe('gamify action should', () => {
it('update score should dispatch points in event UPDATE_SCORE', () => {
let points = faker.random.number(200),
assert = jest.fn()
gamify.updateScore(points)(assert)
expect(assert).toBeCalledWith({payload: points, type: types.UPDATE_SCORE})
})
}) |
var redis_lib = require('./red-cli/lib/redis-client.js'),
sys = require("sys"),
undef;
redis_lib.debugMode = false;
var redis = redis_lib.createClient();
exports.debugMode = true;
exports.useCache = false;
function ucwords(str) {
str = str.split("_");
for (i = 0; i < str.length; ++i) {
str[i] = str[i].substring(0, 1).toUpperCase() + str[i].substring(1).toLowerCase();
}
return str.join('');
}
function debug (m) {
if (exports.debugMode) {
sys.debug(m);
}
}
function castForDatabase(properties, attr, data) {
var type = properties[attr].type;
switch (typeof type == 'function' ? type.name : type) {
case 'json':
return new Buffer(JSON.stringify(data), 'utf-8');
case 'Date':
case 'String':
case 'Number':
return new Buffer((data == undef || data == null ? '' : data).toString(), 'utf-8');
default:
return data && data.id ? data.id.toString() : data ? data.toString() : '';
}
}
function castFromDatabase(properties, attr, data) {
if (!properties[attr]) {
return;
}
var type = properties[attr].type;
switch (typeof type == 'function' ? type.name : type) {
case 'Number':
data = parseInt(data, 10);
break;
case 'Date':
if (data == '') data = null;
data = new Date(data);
break;
case 'String':
data = (data || '').toString();
break;
case 'Boolean':
data = data == 'true' || data == '1';
break;
case 'json':
try {
data = JSON.parse(data.toString('utf-8'));
} catch(e) {
console.log(data.toString('binary'));
throw e;
}
break;
default:
data = parseInt(data, 10);
break;
}
return data;
}
exports.mixPersistMethods = function (Model, description) {
var model_name = description.className,
model_name_lowercase = model_name.toLowerCase(),
primary_key = description.primaryKey || 'id',
table_name = description.tableName,
properties = description.properties,
associations = description.associations,
scopes = description.scopes;
var cache = {};
Object.defineProperty(Model, 'connection', {
enumerable: false,
value: redis
});
Object.defineProperty(Model, 'redis', {
enumerable: false,
value: redis_lib
});
Model.prototype.connection = redis;
// define primary key
var pk_defined = false;
for (var i in properties) {
if (properties[i].primary) {
pk_defined = true;
}
}
if (!pk_defined) {
properties['id'] = {type: Number, primary: true};
}
// initializer
Model.prototype.initialize = function (params, paramsOnly) {
params = params || {};
Object.keys(properties).forEach(function (attr) {
var _attr = '_' + attr,
attr_was = attr + '_was';
if (paramsOnly && !params.hasOwnProperty(attr)) {
return;
}
// Hidden property to store currrent value
Object.defineProperty(this, _attr, {
writable: true,
enumerable: false,
configurable: true,
value: params[attr] !== undef ? params[attr] : (this[attr] !== undef ? this[attr] : null)
});
// Public setters and getters
Object.defineProperty(this, attr, {
get: function () {
return this[_attr];
},
set: function (value) {
this[_attr] = value;
},
configurable: true,
enumerable: true
});
// Getter for initial property
Object.defineProperty(this, attr_was, {
get: function () { return params[attr]; },
configurable: true,
enumerable: false
});
}.bind(this));
};
// Define class methods
Model.build = function () {
};
/**
* TODO doc
* Create new object in storage
*/
Model.create = function (params) {
var callback = arguments[arguments.length - 1];
if (arguments.length == 0 || params === callback) {
params = {};
}
if (typeof callback !== 'function') {
callback = function () {};
}
debug("create new " + model_name_lowercase + "");
var self = new Model;
redis.incr('ids:' + model_name_lowercase, function (err, id) {
if (!err) {
debug("fetched next id for " + model_name_lowercase + ":" + id);
cache[id] = self;
self.id = id;
params.id = id;
if (properties.created_at) self.created_at = new Date;
if (properties.updated_at) self.updated_at = new Date;
self.save(params, callback.bind(self, id, self));
} else {
debug('can not fetch next id for ' + model_name_lowercase);
}
});
};
/**
* TODO test
* Find object in database
* @param {Number} id identifier of record
* @param {Function} callback(err) Function will be called after search
* it takes two arguments:
* - error
* - found object
* * applies to found object
*/
Model.findById = Model.find = function (id, callback) {
if (!id) {
throw new Error(model_name + '.find(): `id` param required');
}
if (typeof callback !== 'function') {
throw new Error(model_name + '.find(): `callback` param required');
}
// check cache
if (cache[id]) {
// ok, we got it, sync with database
cache[id].reload(function () {
callback.call(this, null, this);
});
return;
}
// load new object
redis.hgetall(model_name_lowercase + ':' + id, function (err, hash) {
var found = false;
if (!err) {
var obj = {};
obj.id = id;
for (var attr in hash) {
found = true;
obj[attr] = castFromDatabase(properties, attr, hash[attr]);
}
var object = new Model(obj);
cache[id] = object;
callback.call(found ? object: null, found ? null : true, found ? object : null);
} else {
callback.call(null, true);
}
});
};
/**
* TODO document
* Checks whether record with given id exists in database
* @param id - primary id of record
* @param callback - takes two params: err and exists (Boolean)
*/
Model.exists = function (id, callback) {
redis.exists(model_name_lowercase + ':' + id, function (err, exists) {
if (typeof callback == 'function') {
callback(exists);
}
});
};
// Define instance methods
/**
* Checks is property changed based on current property and initial value
* @param {attr} String - property name
* @return Boolean
*/
Model.prototype.propertyChanged = function (attr) {
return this['_' + attr] !== this[attr + '_was'];
};
/**
* TODO test
* Exports all defined properties to JSON
* @return JSON string
*/
Model.prototype.toJSON = function () {
var data = {};
Object.keys(properties).forEach(function (attr) {
data[attr] = this[attr];
}.bind(this));
return JSON.stringify(data);
};
/**
* Check whether object is new record
* @return Boolean
*/
Model.prototype.isNewRecord = function () {
return !this.id;
};
Model.prototype.reload = function (callback) { // TODO test, doc, refactor to do not use `new`
if (this.isNewRecord()) {
if (typeof callback == 'function') {
callback.call(this, true);
}
return;
}
redis.hgetall(model_name_lowercase + ':' + this.id, function (err, hash) {
var obj = {};
for (var attr in hash) {
obj[attr] = castFromDatabase(properties, attr, hash[attr]);
}
this.initialize(obj);
callback.call(this, err);
}.bind(this));
};
/**
* TODO test
* Destroy record (delete from persistence)
* @param callback -- function to call after operation
* takes two params:
* - err
* - succ
*/
Model.prototype.destroy = function (callback) {
redis.del(model_name_lowercase + ':' + this.id, function (err, succ) {
delete cache[this.id];
delete this;
callback(err, succ);
}.bind(this));
};
Model.prototype.save = function (data, callback) {
var wait = 0, error = false;
if (typeof data == 'function') {
callback = data;
data = {};
}
if (callback === undef) {
callback = function () {};
}
if (data === undef) {
data = {};
}
if (this.isNewRecord()) {
for (var i in data) {
this[i] = data[i];
}
debug('new record, should fetch id first');
Model.create(this, function (id) {
callback.call(this, !id);
});
return;
}
if (properties.hasOwnProperty('updated_at')) this.updated_at = new Date;
debug('saving ' + model_name_lowercase);
Object.keys(properties).forEach(function (attr) {
if (this[attr] !== undef || data[attr] !== undef) {
++wait;
if (data[attr] !== undef) {
this[attr] = data[attr];
}
process.nextTick(function () {
this.updateAttribute(attr, this[attr], function (err) {
--wait;
error = error || err;
if (wait === 0) {
callback.call(this, error);
}
});
}.bind(this));
}
}.bind(this));
};
Model.prototype.updateAttribute = function accessor(attr, value, callback) {
debug(model_name + '[' + this.id + '].updateAttribute(' + attr + ')');
debug(value);
this[attr] = value;
if (typeof callback !== 'function') {
callback = function () {};
}
if (this.propertyChanged(attr)) {
redis.hset(
model_name_lowercase + ':' + this.id,
attr,
castForDatabase(properties, attr, value),
function (err) {
var fix = {};
fix[attr] = value;
this.initialize(fix, true);
callback.call(this, err);
}.bind(this)
);
} else {
debug('property `' + attr + '` is not modified');
callback.call(this, false);
}
};
Model.all = function (callback) {
redis.keys(model_name_lowercase + ':*', function (error, ids) {
for (var i in ids) ids[i] = parseInt(ids[i].toString().split(':')[1], 10);
callback.call(null, ids);
});
};
// Model.all = function (options, callback) {
// if (arguments.length == 1) {
// callback = options;
// options = {};
// }
// var page = options.page || 0,
// shape_size = options.shape_size || 2;
// redis.get('ids:' + model_name_lowercase, function (err, value) {
// if (!value) {
// value = 0;
// }
// value = value.toString();
// if (value.length > shape_size) {
// var mask = value.slice(0, -shape_size);
// for (var i = 0; i < shape_size; i++) {
// mask += '?';
// }
// } else {
// var mask = '*';
// }
// redis.keys(model_name_lowercase + ':' + mask, function (error, ids) {
// for (var i in ids) ids[i] = parseInt(ids[i].toString().split(':')[1], 10);
// callback.call(null, ids);
// });
// });
// };
Model.allInstances = Model.all_instances = function (options, callback) {
if (arguments.length == 1) {
callback = options;
options = {};
}
var result = [];
Model.all(function (ids) {
var count = ids ? ids.length : 0;
if (count > 0) {
for (var i in ids) {
Model.find(ids[i], function () {
result.push(this);
count -= 1;
if (count == 0) {
if (options.order) {
if (typeof options.order == 'function') {
result.sort(options.order);
} else {
result.sort(function (a, b) {
return a[options.order] > b[options.order];
});
}
}
callback(result);
}
});
}
} else {
callback([]);
}
});
};
};
|
public class MergeTwoArrays {
public static int[] mergeArrays(int[] arr1, int[] arr2) {
int[] result = new int[arr1.length + arr2.length];
int index1 = 0;
int index2 = 0;
int resultIndex = 0;
while (index1 < arr1.length && index2 < arr2.length) {
if (arr1[index1] <= arr2[index2]) {
result[resultIndex++] = arr1[index1++];
} else {
result[resultIndex++] = arr2[index2++];
}
}
//copy rest of the elements
while (index1 < arr1.length) {
result[resultIndex++] = arr1[index1++];
}
while (index2 < arr2.length) {
result[resultIndex++] = arr2[index2++];
}
return result;
}
public static void main(String args[]) {
int[] arr1 = {1,4,8};
int[] arr2 = {2,3,7};
int[] result = mergeArrays(arr1, arr2);
System.out.println("Merged array : ");
for(int i : result)
System.out.print(i+" ");
}
} |
#!/bin/sh
set -e
echo "Job started: $(date)"
DATE=$(date +%Y%m%d_%H%M%S)
FILE="/backup-dest/backup-$DATE.tar.gz"
tar -zcvf $FILE backup-source/
echo "Job finished: $(date)" |
Rails.application.routes.draw do
root to: "dashboard#index"
devise_for :users
resources :users, except: [:destroy, :show]
resources :genres, except: [:show]
resources :authors, except: [:show]
resources :books, except: [:show] do
member do
patch :toggle_featured
end
end
end
|
<reponame>wj2061/leetcode
// Given two strings s and t, determine if they are isomorphic.
//
// Two strings are isomorphic if the characters in s can be replaced to get t.
//
// All occurrences of a character must be replaced with another character while preserving the order of characters. No two characters may map to the same character but a character may map to itself.
//
// For example,
// Given "egg", "add", return true.
//
// Given "foo", "bar", return false.
//
// Given "paper", "title", return true.
//
// Note:
// You may assume both s and t have the same length.
/**
* @param {string} s
* @param {string} t
* @return {boolean}
*/
var isIsomorphic = function(s, t) {
if(s.length !== t.length){
return false;
}else if (s === t){
return true;
}
let dict1 = {};
let dict2 = {};
for(let i = 0; i < s.length; i++){
let m = s.charAt(i);
let n = t.charAt(i);
let k = dict1[m];
if(typeof(k) === "undefined"){
dict1[m] = n;
}else if(k !== n){
return false;
}
let j = dict2[n];
if(typeof(j) === "undefined"){
dict2[n] = m;
}else if(j !== m){
return false;
}
}
return true;
};
|
class Admin::SettingsController < Admin::BaseController
def show
end
def update
if Current.site.update site_params
redirect_to admin_settings_path, notice: I18n.t('flash.site_is_successfully_created')
else
render 'update_form'
end
end
private
def site_params
params.require(:site).permit(:title, :description, :logo, :icon)
end
end
|
<gh_stars>0
import React from 'react';
import {
FaUsb,
FaSketch,
FaUserAlt,
FaRegSun,
FaChartArea,
FaChartLine,
FaBullhorn,
FaInfoCircle,
FaRegEnvelope,
FaStarHalfAlt,
} from 'react-icons/fa';
import Container from './Container';
import Menu from './Menu';
import MenuTitle from './MenuTitle';
import MenuItem from './MenuItem';
function SideMenu() {
return (
<Container>
<Menu>
<MenuTitle style={{ color: '#42b549' }}>HOME</MenuTitle>
</Menu>
<Menu>
<MenuTitle>LEARNING</MenuTitle>
<MenuItem>
<FaSketch size={20} style={{ margin: '0px 10px 0px 0' }} /> Courses
</MenuItem>
<MenuItem>
<FaUsb size={20} style={{ margin: '0px 10px 0px 0' }} /> Learning
plans
</MenuItem>
</Menu>
<Menu>
<MenuTitle>MANAGE</MenuTitle>
<MenuItem>
<FaUserAlt size={20} style={{ margin: '0px 10px 0px 0' }} /> User
</MenuItem>
<MenuItem>
<FaRegSun size={20} style={{ margin: '0px 10px 0px 0' }} /> Skills
</MenuItem>
<MenuItem>
<FaChartArea size={20} style={{ margin: '0px 10px 0px 0' }} /> Reports
</MenuItem>
<MenuItem>
<FaChartLine size={20} style={{ margin: '0px 10px 0px 0' }} />{' '}
Analitycs
</MenuItem>
<MenuItem>
<FaBullhorn size={20} style={{ margin: '0px 10px 0px 0' }} />{' '}
Announcements
</MenuItem>
</Menu>
<Menu>
<MenuTitle>CONFIGURE</MenuTitle>
<MenuItem>
<FaStarHalfAlt size={20} style={{ margin: '0px 10px 0px 0' }} />{' '}
Points
</MenuItem>
<MenuItem>
<FaRegSun size={20} style={{ margin: '0px 10px 0px 0' }} /> Rewards
</MenuItem>
<MenuItem>
<FaRegEnvelope size={20} style={{ margin: '0px 10px 0px 0' }} /> Email
templates
</MenuItem>
<MenuItem>
<FaInfoCircle size={20} style={{ margin: '0px 10px 0px 0' }} />{' '}
Company info
</MenuItem>
<MenuItem>
<FaUserAlt size={20} style={{ margin: '0px 10px 0px 0' }} /> Billing
</MenuItem>
</Menu>
</Container>
);
}
export default SideMenu;
|
package com.ride.myride.roomDB;
import androidx.room.Entity;
import androidx.room.PrimaryKey;
import java.util.Date;
@Entity
public class RecentPlacesEntity {
@PrimaryKey
private int id;
private String name;
private Date date;
public RecentPlacesEntity(String name, Date date) {
this.name = name;
this.date = date;
}
public Date getDate() {
return date;
}
public void setDate(Date date) {
this.date = date;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.