text stringlengths 1 1.05M |
|---|
helm repo add argo https://argoproj.github.io/argo-helm
helm install argocd argo/argo-cd |
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; };
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _propTypes = require('prop-types');
var _propTypes2 = _interopRequireDefault(_propTypes);
var _Editor = require('../components/Editor');
var _Editor2 = _interopRequireDefault(_Editor);
var _MarkdownPreview = require('../components/MarkdownPreview');
var _MarkdownPreview2 = _interopRequireDefault(_MarkdownPreview);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; } /**
*
* @authors ZiQiangWang
* @email <EMAIL>
* @date 2017-07-11 16:23:02
*/
var MarkdownEditor = function (_Component) {
_inherits(MarkdownEditor, _Component);
function MarkdownEditor(props) {
_classCallCheck(this, MarkdownEditor);
var _this = _possibleConstructorReturn(this, (MarkdownEditor.__proto__ || Object.getPrototypeOf(MarkdownEditor)).call(this, props));
_this.onMarkdownChange = function (cm, change) {
_this.setState(_extends({}, _this.state, {
markdownSrc: cm.getValue()
}));
_this.props.onArticleChange(cm.getValue());
if (change.origin === 'setValue') {
cm.clearHistory();
}
};
_this.onEditorScroll = function (cm) {
if (_this.owner === 'editor') {
var scrollInfo = _this.editor.getScrollInfo();
_this.editorLine = _this.editor.lineAtHeight(scrollInfo.top, 'local');
var previewPos = _this.preview.querySelector('[line-number="' + (_this.editorLine + 1) + '"]');
if (previewPos != null) {
_this.preview.scrollTop = previewPos.offsetTop - 10;
}
}
};
_this.onPreviewScroll = function (e) {
if (_this.owner === 'preview') {
var lineNumbers = _this.preview.getElementsByClassName('line-number');
var line = 0;
/* eslint-disable no-restricted-syntax */
var _iteratorNormalCompletion = true;
var _didIteratorError = false;
var _iteratorError = undefined;
try {
for (var _iterator = lineNumbers[Symbol.iterator](), _step; !(_iteratorNormalCompletion = (_step = _iterator.next()).done); _iteratorNormalCompletion = true) {
var ele = _step.value;
if (_this.preview.scrollTop > ele.offsetTop) {
line = ele.getAttribute('line-number');
}
}
} catch (err) {
_didIteratorError = true;
_iteratorError = err;
} finally {
try {
if (!_iteratorNormalCompletion && _iterator.return) {
_iterator.return();
}
} finally {
if (_didIteratorError) {
throw _iteratorError;
}
}
}
var height = _this.editor.heightAtLine(parseInt(line - 1, 10), 'local');
_this.editor.scrollTo(null, height);
}
};
_this.previewOwner = function () {
_this.owner = 'preview';
};
_this.editorOwner = function () {
_this.owner = 'editor';
};
_this.state = {
markdownSrc: ''
};
_this.owner = 'editor';
_this.editorLine = 0;
return _this;
}
_createClass(MarkdownEditor, [{
key: 'componentDidMount',
value: function componentDidMount() {
this.editor = this.refs.editor.editorInstance();
this.preview = this.refs.preview.previewInstance();
}
/* eslint-disable no-unused-vars */
/* eslint-disable no-unused-vars */
/* eslint-disable no-unused-vars */
}, {
key: 'render',
value: function render() {
var _props = this.props,
height = _props.height,
width = _props.width,
value = _props.value,
showEditor = _props.showEditor,
showEditorNav = _props.showEditorNav,
showPreview = _props.showPreview,
showOrder = _props.showOrder,
markBtns = _props.markBtns,
registMarkBtns = _props.registMarkBtns,
markedOptions = _props.markedOptions,
codemirrorOptions = _props.codemirrorOptions;
return _react2.default.createElement(
'div',
{
className: 'markdown-editor',
style: { flexDirection: showOrder ? 'row' : 'row-reverse', height: height }
},
_react2.default.createElement(_Editor2.default, {
ref: 'editor',
show: showEditor,
showNav: showEditorNav,
options: codemirrorOptions,
markBtns: this.props.markBtns,
registMarkBtns: this.props.registMarkBtns,
value: value,
onChange: this.onMarkdownChange,
onMouseEnter: this.editorOwner,
onScroll: this.onEditorScroll
}),
_react2.default.createElement('span', { className: 'split' }),
_react2.default.createElement(_MarkdownPreview2.default, {
ref: 'preview',
show: showPreview,
source: this.state.markdownSrc,
onMouseEnter: this.previewOwner,
onScroll: this.onPreviewScroll,
options: markedOptions
})
);
}
}]);
return MarkdownEditor;
}(_react.Component);
MarkdownEditor.defaultProps = {
value: '',
height: '400px',
width: '100%',
showEditor: true,
showEditorNav: true,
showPreview: true,
showOrder: true,
markBtns: ['*'],
registMarkBtns: {},
markedOptions: {},
codemirrorOptions: {}
};
MarkdownEditor.propTypes = {
value: _propTypes2.default.string,
height: _propTypes2.default.string,
width: _propTypes2.default.string,
showEditor: _propTypes2.default.bool,
showEditorNav: _propTypes2.default.bool,
showPreview: _propTypes2.default.bool,
showOrder: _propTypes2.default.bool,
markedOptions: _propTypes2.default.object,
codemirrorOptions: _propTypes2.default.object,
markBtns: _propTypes2.default.array,
registMarkBtns: _propTypes2.default.object,
/* eslint-disable react/require-default-props */
onArticleChange: _propTypes2.default.func
};
exports.default = MarkdownEditor; |
#!/bin/bash
#SBATCH -p main
#SBATCH -J param_stats
#SBATCH -N 1
#SBATCH --ntasks-per-node=1
#SBATCH -t 01:00:00
#SBATCH --mem=64G
cd /gpfs/space/home/holgerv/gis_holgerv/river_quality/scripts/grqa_processing
module purge
module load python-3.7.1
source activate river_quality
~/.conda/envs/river_quality/bin/python param_stats.py
|
import React, { useState, useEffect } from 'react';
import axios from 'axios';
const ProductSearch = () => {
const [products, setProducts] = useState([]);
const [searchTerm, setSearchTerm] = useState("");
const [error, setError] = useState(null);
const handleChange = (event) => {
setSearchTerm(event.target.value);
};
useEffect(() => {
const fetchProducts = async () => {
try {
const response = await axios.get("https://my-api-endpoint/products?search=${searchTerm}");
setProducts(response.data.products);
} catch (err) {
setError(err);
}
};
if (searchTerm) {
fetchProducts();
}
}, [searchTerm]);
return (
<div>
<input type="text" onChange={handleChange} value={searchTerm} />
{products.map((product) => (
<div key={product.id}>{product.title}</div>
))}
{error && <div>{error.message}</div>}
</div>
);
};
export default ProductSearch; |
<gh_stars>0
import React, { Component, PropTypes } from 'react'
import { connect } from 'react-redux'
import { browserHistory } from 'react-router'
import NavBar from '../components/NavBar'
class App extends Component {
render() {
const { children } = this.props
return (
<div className="app-wrapper">
<NavBar />
<div className="container">
{children}
</div>
</div>
)
}
}
App.propTypes = {
children: PropTypes.node
}
export default connect()(App)
|
package com.partyrgame.authservice.controller;
import java.util.Map;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.partyrgame.authservice.service.AuthService;
import com.partyrgame.userservice.model.PartyrUser;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import lombok.extern.slf4j.Slf4j;
@Slf4j
@RestController
@RequestMapping(value = "/api")
public class AuthController {
@Autowired
AuthService authService;
/**
* googleSignIn.
*/
@PostMapping(value = "/google-authenticate")
public ResponseEntity<Boolean> googleSignIn(@RequestBody Map<String, String> body, HttpServletResponse res)
throws Exception {
try {
String idToken = body.get("idToken");
PartyrUser user = authService.googleSignIn(idToken);
log.info(user.toString()); // TODO: change this method return back to int or bool!!
if (user != null) {
Cookie authCookie = new Cookie("AUTH_ID_TOKEN", idToken);
authCookie.setPath("/");
authCookie.setHttpOnly(true);
Cookie loggedInCookie = new Cookie("LOGGED_IN", "TRUE");
loggedInCookie.setHttpOnly(true);
loggedInCookie.setPath("/");
res.addCookie(authCookie);
res.addCookie(loggedInCookie);
return ResponseEntity.ok(true);
}
} catch (Exception e) {
log.error(e.getMessage());
}
return new ResponseEntity<>(false, HttpStatus.UNAUTHORIZED);
}
/**
* checkAuthToken.
*/
@GetMapping(value = "check-auth")
public ResponseEntity<Boolean> checkAuthToken(HttpServletRequest req) throws Exception {
Cookie[] cookies = req.getCookies();
Boolean authorized = false;
try {
for (Cookie cookie : cookies)
if (cookie.getName().equalsIgnoreCase("AUTH_ID_TOKEN"))
authorized = authService.checkAuthToken(cookie.getValue());
} catch (Exception e) {
log.error("Error getting id token");
}
return ResponseEntity.ok(authorized);
}
} |
if which rbenv > /dev/null; then eval "$(rbenv init -)"; fi
if which plenv > /dev/null; then eval "$(plenv init -)"; fi
if which pyenv > /dev/null; then eval "$(pyenv init --path)"; fi
if which nodenv > /dev/null; then eval "$(nodenv init -)"; fi
if which goenv > /dev/null; then eval "$(goenv init -)"; fi
if which jenv > /dev/null; then eval "$(jenv init -)"; fi
if which direnv > /dev/null; then eval "$(direnv hook zsh)"; fi
if which zoxide > /dev/null; then eval "$(zoxide init zsh)"; fi
if which starship > /dev/null; then eval "$(starship init zsh)"; fi
if [ -r '/usr/local/opt/zsh-navigation-tools/share/zsh-navigation-tools/zsh-navigation-tools.plugin.zsh' ]; then source '/usr/local/opt/zsh-navigation-tools/share/zsh-navigation-tools/zsh-navigation-tools.plugin.zsh'; fi
if [ -r '/usr/local/opt/fzf/shell/completion.zsh' ]; then source '/usr/local/opt/fzf/shell/completion.zsh'; fi
if [ -r '/usr/local/opt/fzf/shell/key-bindings.zsh' ]; then source '/usr/local/opt/fzf/shell/key-bindings.zsh'; fi
if [ -r '/usr/local/opt/zsh-autosuggestions/share/zsh-autosuggestions/zsh-autosuggestions.zsh' ]; then source '/usr/local/opt/zsh-autosuggestions/share/zsh-autosuggestions/zsh-autosuggestions.zsh'; fi
if [ -r '/usr/local/opt/zsh-syntax-highlighting/share/zsh-syntax-highlighting/zsh-syntax-highlighting.zsh' ]; then source '/usr/local/opt/zsh-syntax-highlighting/share/zsh-syntax-highlighting/zsh-syntax-highlighting.zsh'; fi
if [ -r '/usr/local/share/zsh/site-functions/_aws' ]; then source '/usr/local/share/zsh/site-functions/_aws'; fi
if [ -r "$HOME/Repositories/github.com/seebi/dircolors-solarized" ]; then eval $(gdircolors "$HOME/Repositories/github.com/seebi/dircolors-solarized"); fi
complete -o nospace -C /usr/local/bin/terraform terraform
preexec() { print '' }
|
import type { AccountId } from '@polkadot/types/interfaces';
import type { AnyJson } from '@polkadot/types/types';
import { ApiPromise } from '@polkadot/api';
import { Abi } from '../Abi';
import { Contract as BaseContract } from '../base';
export declare class Contract extends BaseContract<'promise'> {
constructor(api: ApiPromise, abi: AnyJson | Abi, address: string | AccountId);
}
|
<reponame>troopaloop8/react-speech-1<filename>src/style.js<gh_stars>0
module.exports = {
container: {
width: '100%'
},
text: {
width: '100%',
display: ''
},
play: {
hover: {
backgroundColor: 'GhostWhite'
},
button: {
width: '50',
height: '44',
cursor: 'pointer',
pointerEvents: 'none',
outline: 'none',
backgroundColor: 'black',
backgroundImage: url('https://i.imgur.com/uBz9rp9.png'),
border: 'solid 1px rgba(255,255,255,1)',
borderRadius: 6
}
}
};
|
phenotype=/vol/bmd/meliao/data/psychiatric_trait_phenotypes/2020-04-10_collected-phenotypes.txt
covariate=/vol/bmd/data/ukbiobank/psychiatric_traits/2019-12-17_psychiatric-trait_covariates.csv
idp=/vol/bmd/yanyul/UKB/ukb_idp_genetic_arch/prediction/pred_idp.gw_ridge.parquet
indiv_list=/vol/bmd/yanyul/UKB/predicted_expression_tf2/British.txt
tmp_indiv=tmp_indiv.txt
cat $indiv_list | tail -n +2 | cut -f 1 -d ' ' > $tmp_indiv
thisdir=`pwd`
# conda activate ukb_idp
export PYTHONPATH=/vol/bmd/yanyul/GitHub/misc-tools/pyutil
python ../run_imagexcan.py \
--covariate_table $covariate eid \
--phenotype_table $phenotype eid \
--covariate_yaml covar.yaml \
--phenotype_yaml pheno.yaml \
--individual_list $tmp_indiv \
--idp_table $idp indiv \
--first_30_idp \
--output output.test_run.csv
|
public static String reverseString(String str) {
char[] strArray = str.toCharArray();
int left = 0;
int right = strArray.length - 1;
while (left < right) {
// Swap values of left and right
char temp = strArray[left];
strArray[left] = strArray[right];
strArray[right] = temp;
// Move left and right by one
left++;
right--;
}
return String.valueOf(strArray);
}
String str = "Hello World";
System.out.println(reverseString(str)); |
<gh_stars>1-10
<?hh // strict
namespace Waffle\Log\__Private;
use namespace HH\Lib\Str;
use type Waffle\Lib\Json;
use type DateTimeInterface;
use type Throwable;
use function is_object;
use function get_resource_type;
use function get_class;
use function gettype;
final class VariableNormalizer
{
const string SIMPLE_DATE = "Y-m-d\TH:i:s.uP";
private string $dateFormat;
public function __construct(?string $dateFormat = null)
{
$this->dateFormat = $dateFormat ?? VariableNormalizer::SIMPLE_DATE;
}
public function normalize(mixed $value): string
{
if ($value is bool) {
$value = ($value ? 'true' : 'false');
} elseif ($value is string) {
$value = '"'.$value.'"';
} elseif ($value is num) {
$value = $value is int ? $value : Str\format_number($value, 1);
} elseif ($value is resource) {
$value = 'resource['.get_resource_type($value).']';
} elseif (null === $value) {
$value = 'null';
} elseif (is_object($value) && !$value is Container<_>) {
if ($value is Throwable) {
$value = get_class($value).'['.
'message=' . $this->normalize($value->getMessage()) .
', code=' . $this->normalize($value->getCode()) .
', file=' . $this->normalize($value->getFile()) .
', line=' . $this->normalize($value->getLine()) .
', trace= ' . $this->normalize($value->getTrace()) .
', previous=' . $this->normalize($value->getPrevious()) .
']';
} elseif ($value is DateTimeInterface) {
$value = get_class($value) . '['. $value->format($this->dateFormat) .']';
} else {
$value = 'object[' . get_class($value) . ']';
}
} elseif ($value is Container<_>) {
$value = Json::encode($value, false);
} else {
$value = '!' . gettype($value).Json::encode($value, false);
}
return (string) $value;
}
}
|
#include <stdio.h>
int main()
{
int arr[] = {5, 6, 4, 8, 9};
int n = sizeof(arr)/sizeof(arr[0]);
int i;
int max = arr[0];
for (i = 1; i < n; i++){
if (arr[i] > max)
max = arr[i];
}
printf("Largest element = %d", max);
return 0;
} |
#!/usr/bin/env bash
docker run --rm -it -v ${PWD}:/docs squidfunk/mkdocs-material:$(grep 'MKDOCS_VERSION:' .cirrus.yml | cut -d\ -f4) build
|
from django.db import models
from response.core.models import Incident
class Notification(models.Model):
incident = models.ForeignKey(Incident, on_delete=models.CASCADE)
key = models.CharField(max_length=30)
time = models.DateTimeField()
repeat_count = models.IntegerField(default=0)
completed = models.BooleanField(default=False)
class Meta:
unique_together = ("incident", "key")
def __str__(self):
nice_date = self.time.strftime('%Y-%m-%d %H:%M:%S')
return f"{nice_date} - {self.incident} - {self.key}"
|
var fs = require('fs');
var _ = require('underscore');
module.exports = {
//handle errors in express routers
handleError: function(err,res) {
if(err) {
if (_.has(err,'message'))
print(err.message,'ERROR');
else
print(err,'ERROR');
//if res defined, also give server answer
if (res) {
res.setHeader('Content-Type','application/json');
res.status(500).send({ error: err });
}
return true;
}
return false;
},
//escape paths in mongoose pre save middleware
escapePath: function(doc, path) {
elements = doc.get(path);
if (_.isArray(elements))
doc.set(path, _.map(elements,_.escape))
else
doc.set(path, _.escape(elements));
}
} |
<filename>viewer/javascript/lib/highchart-7.2.0/es-modules/mixins/centered-series.js
/* *
*
* (c) 2010-2019 <NAME>
*
* License: www.highcharts.com/license
*
* !!!!!!! SOURCE GETS TRANSPILED BY TYPESCRIPT. EDIT TS FILE ONLY. !!!!!!!
*
* */
'use strict';
import H from '../parts/Globals.js';
/**
* @private
* @interface Highcharts.RadianAngles
*/ /**
* @name Highcharts.RadianAngles#end
* @type {number}
*/ /**
* @name Highcharts.RadianAngles#start
* @type {number}
*/
import U from '../parts/Utilities.js';
var isNumber = U.isNumber;
var deg2rad = H.deg2rad, pick = H.pick, relativeLength = H.relativeLength;
/* eslint-disable valid-jsdoc */
/**
* @private
* @mixin Highcharts.CenteredSeriesMixin
*/
H.CenteredSeriesMixin = {
/**
* Get the center of the pie based on the size and center options relative
* to the plot area. Borrowed by the polar and gauge series types.
*
* @private
* @function Highcharts.CenteredSeriesMixin.getCenter
*
* @return {Array<number>}
*/
getCenter: function () {
var options = this.options, chart = this.chart, slicingRoom = 2 * (options.slicedOffset || 0), handleSlicingRoom, plotWidth = chart.plotWidth - 2 * slicingRoom, plotHeight = chart.plotHeight - 2 * slicingRoom, centerOption = options.center, positions = [
pick(centerOption[0], '50%'),
pick(centerOption[1], '50%'),
options.size || '100%',
options.innerSize || 0
], smallestSize = Math.min(plotWidth, plotHeight), i, value;
for (i = 0; i < 4; ++i) {
value = positions[i];
handleSlicingRoom = i < 2 || (i === 2 && /%$/.test(value));
// i == 0: centerX, relative to width
// i == 1: centerY, relative to height
// i == 2: size, relative to smallestSize
// i == 3: innerSize, relative to size
positions[i] = relativeLength(value, [plotWidth, plotHeight, smallestSize, positions[2]][i]) + (handleSlicingRoom ? slicingRoom : 0);
}
// innerSize cannot be larger than size (#3632)
if (positions[3] > positions[2]) {
positions[3] = positions[2];
}
return positions;
},
/**
* getStartAndEndRadians - Calculates start and end angles in radians.
* Used in series types such as pie and sunburst.
*
* @private
* @function Highcharts.CenteredSeriesMixin.getStartAndEndRadians
*
* @param {number} [start]
* Start angle in degrees.
*
* @param {number} [end]
* Start angle in degrees.
*
* @return {Highcharts.RadianAngles}
* Returns an object containing start and end angles as radians.
*/
getStartAndEndRadians: function (start, end) {
var startAngle = isNumber(start) ? start : 0, // must be a number
endAngle = ((isNumber(end) && // must be a number
end > startAngle && // must be larger than the start angle
// difference must be less than 360 degrees
(end - startAngle) < 360) ?
end :
startAngle + 360), correction = -90;
return {
start: deg2rad * (startAngle + correction),
end: deg2rad * (endAngle + correction)
};
}
};
|
def convert_list_to_string(list_of_words):
return ", ".join(list_of_words) |
<reponame>alklimenko/calculator
package ru.alklimenko.calculator;
public class Number extends Term {
private final double value;
Number(double value) {
this.value = value;
}
double getValue() {
return value;
}
public String toString() {
return " " + Double.toString(value);
}
public void print() {
System.out.print(toString());
}
@Override
public int getType() {
return NUMBER;
}
}
|
<reponame>YoffieYF/yf_album<filename>android/src/main/java/plugin/album/utils/downloader/AsyncDownLoader.java<gh_stars>1-10
package plugin.album.utils.downloader;
import java.io.File;
public class AsyncDownLoader {
public static DownLoadTask downLoad(String url, File file, DownLoaderListener listener) {
DownLoadTask download = new DownLoadTask(file, listener);
download.executeOnExecutor(DownLoadTask.THREAD_POOL_EXECUTOR, url);
return download;
}
public static DownLoadTask downLoad(String url, File file, DownLoaderListener listener, int readTimeOut, int connectTimeOut) {
DownLoadTask download = new DownLoadTask(file, listener, readTimeOut, connectTimeOut);
download.executeOnExecutor(DownLoadTask.THREAD_POOL_EXECUTOR, url);
return download;
}
public interface DownLoaderListener {
void onSuccess(File file);
void onFailed(int code, File file);
void onProgress(long current, long total);
}
}
|
<gh_stars>1-10
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
// โ Copyright (c) 2021 by the author of the React-weather project. All rights reserved. โ
// โ This owner-supplied source code has no limitations on the condition imposed on the โ
// โ maintenance of the copyright notice. โ
// โ For more information, read the LICENSE file at the root of the project. โ
// โ Written by author <NAME> <<EMAIL>>. โ
// โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ
import axios from 'axios';
const api_openWeatherMap = axios.create({
baseURL: 'https://api.openweathermap.org/data/2.5/weather',
params: {
lang: 'pt_br',
units: 'metric',
appid: process.env.REACT_APP_API_OPENWAETHERMAP,
},
});
export default class OpenWeatherMap {
public static async GetCity(city: string) {
const options = {
params: {
q: city,
},
};
return new Promise((resolve, reject) => {
api_openWeatherMap
.request(options)
.then(({ data }) =>
resolve({
status: {
dt: data.dt,
cod: data.cod,
message: '',
},
location: {
id: data.id,
name: data.name,
country: data.sys.country,
coord_lon: data.coord.lon,
coord_lat: data.coord.lat,
timezone: data.timezone,
temp: Math.floor(data.main.temp),
},
astro: {
sunrise: data.sys.sunrise,
sunset: data.sys.sunset,
},
element: {
feels_like: Math.floor(data.main.feels_like),
temp_min: Math.floor(data.main.temp_min),
temp_max: Math.floor(data.main.temp_max),
pressure: data.main.pressure,
humidity: data.main.humidity,
wind_speed: data.wind.speed,
wind_deg: data.wind.deg,
clouds: data.clouds.all,
},
condition: {
id: data.weather[0].id,
name: data.weather[0].main,
description: data.weather[0].description,
icon: data.weather[0].icon,
},
})
)
.catch(({ response }) => {
console.log(response.data);
reject(response.data);
});
});
}
}
|
package nl.pvanassen.steam.store.history;
import java.util.Date;
/**
* A row in the steam history
*
* @author <NAME>
*/
public class HistoryRow {
private final String rowName;
private final Date listed;
private final Date acted;
private final int price;
HistoryRow(String rowName, Date listed, Date acted, int price) {
super();
this.rowName = rowName;
this.listed = listed;
this.acted = acted;
this.price = price;
}
/**
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj == null) {
return false;
}
if (!(obj instanceof HistoryRow)) {
return false;
}
HistoryRow other = (HistoryRow) obj;
if (acted == null) {
if (other.acted != null) {
return false;
}
}
else if (!acted.equals(other.acted)) {
return false;
}
if (listed == null) {
if (other.listed != null) {
return false;
}
}
else if (!listed.equals(other.listed)) {
return false;
}
if (price != other.price) {
return false;
}
if (rowName == null) {
if (other.rowName != null) {
return false;
}
}
else if (!rowName.equals(other.rowName)) {
return false;
}
return true;
}
/**
* @return the acted
*/
public final Date getActed() {
return new Date(acted.getTime());
}
/**
* @return the listed
*/
public final Date getListed() {
return new Date(listed.getTime());
}
/**
* @return the price
*/
public final int getPrice() {
return price;
}
/**
* @return the steamId
*/
public final String getRowName() {
return rowName;
}
/**
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = (prime * result) + ((acted == null) ? 0 : acted.hashCode());
result = (prime * result) + ((listed == null) ? 0 : listed.hashCode());
result = (prime * result) + price;
result = (prime * result) + ((rowName == null) ? 0 : rowName.hashCode());
return result;
}
/**
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return "HistoryRow [steamId=" + rowName + ", listed=" + listed + ", acted=" + acted + ", price=" + price + "]";
}
}
|
const postcss = require('postcss')
module.exports = postcss.plugin('trim', function (opts) {
return function (css) {
css.walk(function (node) {
if (node.type === 'rule' || node.type === 'atrule') {
node.raws.before = node.raws.after = '\n'
}
})
}
})
|
<gh_stars>0
#include <pybind11/pybind11.h>
#include <iostream>
#include <sstream>
#include <pybind11/numpy.h>
#include "nifty/python/graph/undirected_list_graph.hxx"
#include "nifty/python/graph/undirected_grid_graph.hxx"
#include "nifty/graph/node_weighted_watersheds.hxx"
#include "nifty/python/converter.hxx"
namespace py = pybind11;
namespace nifty{
namespace graph{
template<class GRAPH>
void exportNodeWeightedWatershedsT(py::module & module) {
// function
module.def("nodeWeightedWatershedsSegmentation",
[](
const GRAPH & graph,
nifty::marray::PyView<uint64_t,1> seeds,
nifty::marray::PyView<float,1> nodeWeights
){
nifty::marray::PyView<uint64_t> labels({seeds.shape(0)});
nodeWeightedWatershedsSegmentation(graph, nodeWeights, seeds, labels);
return labels;
},
py::arg("graph"),
py::arg("seeds"),
py::arg("nodeWeights"),
"Node weighted watershed on a graph\n\n"
"Arguments:\n\n"
" graph : the input graph\n"
" seeds (numpy.ndarray): the seeds\n"
" nodeWeights (numpy.ndarray): the node weights\n\n"
"Returns:\n\n"
" numpy.ndarray : the segmentation"
);
}
void exportNodeWeightedWatersheds(py::module & module) {
{
typedef UndirectedGraph<> GraphType;
exportNodeWeightedWatershedsT<GraphType>(module);
}
{
typedef UndirectedGridGraph<2, true> GraphType;
exportNodeWeightedWatershedsT<GraphType>(module);
}
{
typedef UndirectedGridGraph<3, true> GraphType;
exportNodeWeightedWatershedsT<GraphType>(module);
}
}
}
}
|
<reponame>JoshFrankfurth/ecal<filename>app/rec/rec_server_core/src/rec_server.cpp
/* ========================= eCAL LICENSE =================================
*
* Copyright (C) 2016 - 2019 Continental Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* ========================= eCAL LICENSE =================================
*/
#include <rec_server_core/rec_server.h>
#include "rec_server_impl.h"
namespace eCAL
{
namespace rec_server
{
////////////////////////////////////
// Constructor & Destructor
////////////////////////////////////
RecServer::RecServer() : rec_server_impl_(std::make_unique<RecServerImpl>()) {}
RecServer::~RecServer() {}
////////////////////////////////////
// Client management
////////////////////////////////////
bool RecServer::SetEnabledRecClients(const std::map<std::string, ClientConfig>& enabled_rec_clients) { return rec_server_impl_->SetEnabledRecClients(enabled_rec_clients); }
std::map<std::string, ClientConfig> RecServer::GetEnabledRecClients() const { return rec_server_impl_->GetEnabledRecClients(); }
bool RecServer::SetHostFilter(const std::string& hostname, const std::set<std::string>& host_filter) { return rec_server_impl_->SetHostFilter(hostname, host_filter); }
std::set<std::string> RecServer::GetHostFilter(const std::string& hostname) const { return rec_server_impl_->GetHostFilter(hostname); }
bool RecServer::SetConnectionToClientsActive(bool active) { return rec_server_impl_->SetConnectionToClientsActive(active); }
bool RecServer::IsConnectionToClientsActive() const { return rec_server_impl_->IsConnectionToClientsActive(); }
////////////////////////////////////
// Recorder control
////////////////////////////////////
bool RecServer::ConnectToEcal () { return rec_server_impl_->ConnectToEcal(); }
bool RecServer::DisconnectFromEcal () { return rec_server_impl_->DisconnectFromEcal(); }
bool RecServer::SavePreBufferedData() { return rec_server_impl_->SavePreBufferedData(); }
bool RecServer::StartRecording () { return rec_server_impl_->StartRecording(); }
bool RecServer::StopRecording () { return rec_server_impl_->StopRecording(); }
bool RecServer::IsConnectedToEcal () const { return rec_server_impl_->IsConnectedToEcal(); }
bool RecServer::IsRecording () const { return rec_server_impl_->IsRecording(); }
int64_t RecServer::GetCurrentlyRecordingMeasId() const { return rec_server_impl_->GetCurrentlyRecordingMeasId(); }
bool RecServer::IsAnyRequestPending () const { return rec_server_impl_->IsAnyRequestPending(); }
std::set<std::string> RecServer::GetHostsWithPendingRequests() const { return rec_server_impl_->GetHostsWithPendingRequests(); }
void RecServer::WaitForPendingRequests () const { rec_server_impl_->WaitForPendingRequests(); }
////////////////////////////////////
// Status
////////////////////////////////////
eCAL::rec_server::RecorderStatusMap_T RecServer::GetRecorderStatuses() const { return rec_server_impl_->GetRecorderStatuses(); }
eCAL::rec::RecorderStatus RecServer::GetBuiltInRecorderInstanceStatus() const { return rec_server_impl_->GetBuiltInRecorderInstanceStatus(); }
TopicInfoMap_T RecServer::GetTopicInfo() const { return rec_server_impl_->GetTopicInfo(); }
HostsRunningEcalRec_T RecServer::GetHostsRunningEcalRec() const { return rec_server_impl_->GetHostsRunningEcalRec(); }
std::list<eCAL::rec_server::JobHistoryEntry> RecServer::GetJobHistory() const { return rec_server_impl_->GetJobHistory(); }
RecServerStatus RecServer::GetStatus() const { return rec_server_impl_->GetStatus(); }
////////////////////////////////////
// General Client Settings
////////////////////////////////////
void RecServer::SetMaxPreBufferLength (std::chrono::steady_clock::duration max_pre_buffer_length) { rec_server_impl_->SetMaxPreBufferLength(max_pre_buffer_length); }
void RecServer::SetPreBufferingEnabled(bool pre_buffering_enabled) { rec_server_impl_->SetPreBufferingEnabled(pre_buffering_enabled); }
bool RecServer::SetRecordMode (eCAL::rec::RecordMode record_mode) { return rec_server_impl_->SetRecordMode(record_mode); }
bool RecServer::SetRecordMode (eCAL::rec::RecordMode record_mode, const std::set<std::string>& listed_topics) { return rec_server_impl_->SetRecordMode(record_mode, listed_topics); }
bool RecServer::SetListedTopics (std::set<std::string> listed_topics) { return rec_server_impl_->SetListedTopics(listed_topics); }
std::chrono::steady_clock::duration RecServer::GetMaxPreBufferLength () const { return rec_server_impl_->GetMaxPreBufferLength(); }
bool RecServer::GetPreBufferingEnabled() const { return rec_server_impl_->GetPreBufferingEnabled(); }
eCAL::rec::RecordMode RecServer::GetRecordMode () const { return rec_server_impl_->GetRecordMode(); }
std::set<std::string> RecServer::GetListedTopics () const { return rec_server_impl_->GetListedTopics(); }
////////////////////////////////////
// Job Settings
////////////////////////////////////
void RecServer::SetMeasRootDir (std::string meas_root_dir) { rec_server_impl_->SetMeasRootDir(meas_root_dir); }
void RecServer::SetMeasName (std::string meas_name) { rec_server_impl_->SetMeasName(meas_name); }
void RecServer::SetMaxFileSizeMib (unsigned int max_file_size_mib) { rec_server_impl_->SetMaxFileSizeMib(max_file_size_mib); }
void RecServer::SetDescription (std::string description) { rec_server_impl_->SetDescription(description); }
std::string RecServer::GetMeasRootDir () const { return rec_server_impl_->GetMeasRootDir(); }
std::string RecServer::GetMeasName () const { return rec_server_impl_->GetMeasName(); }
unsigned int RecServer::GetMaxFileSizeMib() const { return rec_server_impl_->GetMaxFileSizeMib(); }
std::string RecServer::GetDescription () const { return rec_server_impl_->GetDescription(); }
////////////////////////////////////
// Server Settings
////////////////////////////////////
void RecServer::SetMonitoringUpdateCallback(PostUpdateCallback_T post_update_callback) { rec_server_impl_->SetMonitoringUpdateCallback(post_update_callback); }
bool RecServer::SetUsingBuiltInRecorderEnabled(bool enabled) { return rec_server_impl_->SetUsingBuiltInRecorderEnabled(enabled); }
bool RecServer::IsUsingBuiltInRecorderEnabled() const { return rec_server_impl_->IsUsingBuiltInRecorderEnabled(); }
////////////////////////////////////
// Measurement Upload
////////////////////////////////////
void RecServer::SetUploadConfig(const UploadConfig& upload_config) { rec_server_impl_->SetUploadConfig(upload_config); }
UploadConfig RecServer::GetUploadConfig() const { return rec_server_impl_->GetUploadConfig(); }
int RecServer::GetInternalFtpServerOpenConnectionCount() const { return rec_server_impl_->GetInternalFtpServerOpenConnectionCount(); }
uint16_t RecServer::GetInternalFtpServerPort() const { return rec_server_impl_->GetInternalFtpServerPort(); }
eCAL::rec::Error RecServer::UploadMeasurement(int64_t meas_id) { return rec_server_impl_->UploadMeasurement(meas_id); }
bool RecServer::CanUploadMeasurement(int64_t meas_id) const { return rec_server_impl_->CanUploadMeasurement(meas_id); };
eCAL::rec::Error RecServer::SimulateUploadMeasurement(int64_t meas_id) const { return rec_server_impl_->SimulateUploadMeasurement(meas_id); };
int RecServer::UploadNonUploadedMeasurements() { return rec_server_impl_->UploadNonUploadedMeasurements(); };
bool RecServer::HasAnyUploadError(int64_t meas_id) const { return rec_server_impl_->HasAnyUploadError(meas_id); }
////////////////////////////////////
// Comments
////////////////////////////////////
eCAL::rec::Error RecServer::AddComment(int64_t meas_id, const std::string& comment) { return rec_server_impl_->AddComment(meas_id, comment); }
bool RecServer::CanAddComment(int64_t meas_id) const { return rec_server_impl_->CanAddComment(meas_id); }
eCAL::rec::Error RecServer::SimulateAddComment(int64_t meas_id) const { return rec_server_impl_->SimulateAddComment(meas_id); }
////////////////////////////////////
// Delete measurement
////////////////////////////////////
bool RecServer::CanDeleteMeasurement(int64_t meas_id) const { return rec_server_impl_->CanDeleteMeasurement(meas_id); }
eCAL::rec::Error RecServer::SimulateDeleteMeasurement(int64_t meas_id) const { return rec_server_impl_->SimulateDeleteMeasurement(meas_id); }
eCAL::rec::Error RecServer::DeleteMeasurement(int64_t meas_id) { return rec_server_impl_->DeleteMeasurement(meas_id); }
////////////////////////////////////
// Config Save / Load
////////////////////////////////////
bool RecServer::ClearConfig () { return rec_server_impl_->ClearConfig(); }
bool RecServer::SaveConfigToFile (const std::string& path) const { return rec_server_impl_->SaveConfigToFile(path); }
bool RecServer::LoadConfigFromFile(const std::string& path) { return rec_server_impl_->LoadConfigFromFile(path); }
std::string RecServer::GetLoadedConfigPath() const { return rec_server_impl_->GetLoadedConfigPath(); }
int RecServer::GetLoadedConfigVersion() const { return rec_server_impl_->GetLoadedConfigVersion(); }
int RecServer::GetNativeConfigVersion() const { return rec_server_impl_->GetNativeConfigVersion(); }
}
} |
/*
*
* PromosPage actions
*
*/
import {
DEFAULT_ACTION,
} from './constants';
export function defaultAction() {
return {
type: DEFAULT_ACTION,
};
}
|
//
// Created by devilox on 11/20/20.
//
//-----------------------------//
#include <stdexcept>
#include "dVulkanMesh.h"
//-----------------------------//
dVulkanMesh::dVulkanMesh( VkPhysicalDevice tGPU, VkDevice tLogicalGPU,
VkQueue tTransferQueue, VkCommandPool tTransferCommandPool,
const std::vector <Vertex>& tVertices, const std::vector <uint32_t>& tIndices) {
mGPU = tGPU;
mLogicalGPU = tLogicalGPU;
mVertexCount = tVertices.size();
mIndexCount = tIndices.size();
createVertexBuffer(tTransferQueue, tTransferCommandPool, tVertices);
createIndexBuffer(tTransferQueue, tTransferCommandPool, tIndices);
}
//-----------------------------//
size_t dVulkanMesh::getVertexCount() {
return mVertexCount;
}
VkBuffer dVulkanMesh::getVertexBuffer() {
return mVertexBuffer;
}
size_t dVulkanMesh::getIndexCount() {
return mIndexCount;
}
VkBuffer dVulkanMesh::getIndexBuffer() {
return mIndexBuffer;
}
void dVulkanMesh::destroyBuffers() {
vkDestroyBuffer(mLogicalGPU, mVertexBuffer, nullptr);
vkFreeMemory(mLogicalGPU, mVertexBufferMemory, nullptr);
vkDestroyBuffer(mLogicalGPU, mIndexBuffer, nullptr);
vkFreeMemory(mLogicalGPU, mIndexBufferMemory, nullptr);
}
//-----------------------------//
void dVulkanMesh::createVertexBuffer(VkQueue tTransferQueue, VkCommandPool tTransferCommandPool, const std::vector <Vertex>& tVertices) {
VkDeviceSize BufferSize = sizeof(Vertex) * mVertexCount;
VkBuffer StagingBuffer;
VkDeviceMemory StagingBufferMemory;
void* Data;
createBuffer(
BufferSize,
VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
&StagingBuffer,
&StagingBufferMemory
);
vkMapMemory(mLogicalGPU, StagingBufferMemory, 0, BufferSize, 0, &Data);
memcpy(Data, tVertices.data(), static_cast <size_t>(BufferSize));
vkUnmapMemory(mLogicalGPU, StagingBufferMemory);
createBuffer(
BufferSize,
VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
&mVertexBuffer,
&mVertexBufferMemory
);
copyBuffer(tTransferQueue, tTransferCommandPool, StagingBuffer, mVertexBuffer, BufferSize);
vkDestroyBuffer(mLogicalGPU, StagingBuffer, nullptr);
vkFreeMemory(mLogicalGPU, StagingBufferMemory, nullptr);
}
void dVulkanMesh::createIndexBuffer(VkQueue tTransferQueue, VkCommandPool tTransferCommandPool, const std::vector <uint32_t>& tIndices) {
VkDeviceSize BufferSize = sizeof(uint32_t) * mIndexCount;
VkBuffer StagingBuffer;
VkDeviceMemory StagingBufferMemory;
void* Data;
createBuffer(
BufferSize,
VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
&StagingBuffer,
&StagingBufferMemory
);
vkMapMemory(mLogicalGPU, StagingBufferMemory, 0, BufferSize, 0, &Data);
memcpy(Data, tIndices.data(), static_cast <size_t>(BufferSize));
vkUnmapMemory(mLogicalGPU, StagingBufferMemory);
createBuffer(
BufferSize,
VK_BUFFER_USAGE_TRANSFER_DST_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
&mIndexBuffer,
&mIndexBufferMemory
);
copyBuffer(tTransferQueue, tTransferCommandPool, StagingBuffer, mIndexBuffer, BufferSize);
vkDestroyBuffer(mLogicalGPU, StagingBuffer, nullptr);
vkFreeMemory(mLogicalGPU, StagingBufferMemory, nullptr);
}
uint32_t dVulkanMesh::findMemoryTypeIndex(uint32_t tAllowedTypes, VkMemoryPropertyFlags tFlags) {
VkPhysicalDeviceMemoryProperties MemoryProperties;
vkGetPhysicalDeviceMemoryProperties(mGPU, &MemoryProperties);
for (uint32_t i = 0; i < MemoryProperties.memoryTypeCount; i++) {
if ((tAllowedTypes & (1 << i)) && (MemoryProperties.memoryTypes[i].propertyFlags & tFlags) == tFlags) {
return i;
}
}
}
void dVulkanMesh::createBuffer(VkDeviceSize tBufferSize, VkBufferUsageFlags tUsageFlags, VkMemoryPropertyFlags tPropertyFlags, VkBuffer* tBuffer, VkDeviceMemory* tMemory) {
VkBufferCreateInfo BufferCreateInfo {
.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
.pNext = nullptr,
.flags = 0,
.size = tBufferSize,
.usage = tUsageFlags,
.sharingMode = VK_SHARING_MODE_EXCLUSIVE
};
if (vkCreateBuffer(mLogicalGPU, &BufferCreateInfo, nullptr, tBuffer)) {
throw std::runtime_error("Failed to create buffer!");
}
VkMemoryRequirements MemoryRequirements;
vkGetBufferMemoryRequirements(mLogicalGPU, *tBuffer, &MemoryRequirements);
VkMemoryAllocateInfo MemoryAllocateInfo {
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
.pNext = nullptr,
.allocationSize = MemoryRequirements.size,
.memoryTypeIndex = findMemoryTypeIndex(MemoryRequirements.memoryTypeBits, tPropertyFlags)
};
if (vkAllocateMemory(mLogicalGPU, &MemoryAllocateInfo, nullptr, tMemory)) {
throw std::runtime_error("Failed to allocate buffer memory!");
}
vkBindBufferMemory(mLogicalGPU, *tBuffer, *tMemory, 0);
}
void dVulkanMesh::copyBuffer(VkQueue tTransferQueue, VkCommandPool tTransferCommandPool, VkBuffer tScrBuffer, VkBuffer tDstBuffer, VkDeviceSize tBufferSize) {
VkCommandBuffer TransferCommandBuffer;
VkCommandBufferAllocateInfo AllocateInfo {
.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
.pNext = nullptr,
.commandPool = tTransferCommandPool,
.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
.commandBufferCount = 1
};
vkAllocateCommandBuffers(mLogicalGPU, &AllocateInfo, &TransferCommandBuffer);
VkCommandBufferBeginInfo BeginInfo {
.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
.pNext = nullptr,
.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT
};
vkBeginCommandBuffer(TransferCommandBuffer, &BeginInfo);
{
VkBufferCopy BufferCopyRegion{
.srcOffset = 0,
.dstOffset = 0,
.size = tBufferSize
};
vkCmdCopyBuffer(TransferCommandBuffer, tScrBuffer, tDstBuffer, 1, &BufferCopyRegion);
}
vkEndCommandBuffer(TransferCommandBuffer);
VkSubmitInfo SubmitInfo {
.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
.pNext = nullptr,
.waitSemaphoreCount = 0,
.pWaitSemaphores = nullptr,
.pWaitDstStageMask = nullptr,
.commandBufferCount = 1,
.pCommandBuffers = &TransferCommandBuffer
};
vkQueueSubmit(tTransferQueue, 1, &SubmitInfo, VK_NULL_HANDLE);
vkQueueWaitIdle(tTransferQueue);
vkFreeCommandBuffers(mLogicalGPU, tTransferCommandPool, 1, &TransferCommandBuffer);
} |
<filename>genie_pkg/delimited_genie.py
import csv
from genie_pkg.generators import (generate_email_id, one_of,
random_date_from_today, random_float,
random_geo_coords, random_integer,
random_mastercard_number, random_string,
random_string_with_special_chars,
random_visacard_number)
def _gen(data_type, optional):
if data_type == 'email':
data = generate_email_id(*optional)
elif data_type == 'int':
data = str(random_integer(*optional))
elif data_type == 'float':
data = str(random_float(*optional))
elif data_type == 'date':
data = random_date_from_today(*optional)
elif data_type == 'special_string':
data = random_string_with_special_chars(*optional)
elif data_type == 'geo_coord':
data = random_geo_coords(*optional)
elif data_type == 'cc_mastercard':
data = random_mastercard_number()
elif data_type == 'cc_visacard':
data = random_visacard_number(*optional)
elif data_type == 'one_of':
data = one_of(*optional)
else:
data = random_string(*optional)
return data
def _generate_columns(colspecs):
row_data = []
for col in colspecs:
data_type, *optional = col
data = _gen(data_type, optional)
if data_type == 'geo_coord':
x0, y0 = data
row_data.append(str(x0))
row_data.append(str(y0))
else:
row_data.append(data)
return row_data
def anonymise_columns(row: bytes, anonymous_col_specs, encoding='utf-8', delimiter=',') -> bytes:
"""Generate delimited data for the provided specification.
Args:
row (bytes): Encoded bytes of the row data
anonymous_col_specs (tuple-> (from, to, type, optional)): List of offset specifications
encoding (str): Required encoding
delimiter (str): Defaults to ,
Returns:
data: Mutated row.
"""
lines = row.decode(encoding).splitlines()
anonymised_csv = list(csv.reader(lines, delimiter=delimiter))[0]
for ac in anonymous_col_specs:
col_index, data_type, *optional = ac
data = _gen(data_type, optional)
if data_type == 'geo_coord':
x0, y0 = data
anonymised_csv[col_index] = str(x0)
anonymised_csv[col_index+1] = str(y0)
else:
anonymised_csv[col_index] = data
anonymised = delimiter.join(anonymised_csv)
return anonymised.encode(encoding)
def generate(colspecs, nrows, encoding='utf-8', delimiter=','):
"""Generate fixedwidth data for the provided specification.
Args:
colspecs (tuple-> (length, type, optional)): List of column specifications (similar to pandas)
nrows (int): Number of desired rows.
encoding (str): Required encoding
delimiter (str): Defaults to ,
Returns:
data: Iterator over nrows.
"""
for _ in range(nrows):
row_data = _generate_columns(colspecs)
yield delimiter.join(row_data).encode(encoding)
|
<gh_stars>0
import numpy as np
from sparse_time_approx import Approx
from matplotlib import pyplot as plt
import seaborn as sns
sns.set_theme()
def find_best_k(A, k):
u, s, v = np.linalg.svd(A, full_matrices=False)
u = u[:, : k]
v = v[: k, :]
s = s[: k]
A_k = np.dot(np.dot(u, np.diag(s)), v)
return A_k
def Perf(A, A_, k):
# || A - (C C^{+} A)_{k} ||_{F}^{2} <= (1 + \epsilon) || A - A_{k} ||_{F}^{2}
Ak = find_best_k(A, k)
M = np.matmul(A_, np.linalg.pinv(A_))
M = np.matmul(M, A)
M = find_best_k(M, k)
FrA = np.linalg.norm((A - Ak), ord="fro")**2
FrC = np.linalg.norm((A - M), ord="fro")**2
return FrC/(FrA+1e-8) # Finding lower bound for (1 + \epsilon)
P = []
for k in range(10, 101, 10):
p = 0
for i in range(100):
print("k = {}, i = {}".format(k, i), flush=True, end="\r")
A = np.matrix(np.random.rand(500, 500))
A_w = Approx(A, k).getApprox()
p += Perf(A, A_w, k)
p /= 100
P.append(p)
plt.figure(figsize=(10, 5))
plt.plot(range(10, 101, 10), P, '--o')
plt.xlabel("k")
plt.ylabel(r"$(1+\epsilon)$")
plt.title(r"Mean $(1+\epsilon)$ values for 500x500 matrix (100 trials)")
plt.savefig("plot_for_k.png")
P = []
for c in range(500, 5001, 500):
k = c//8
p = 0
for i in range(5):
print("c = {}, i = {}".format(c, i), flush=True, end="\r")
A = np.matrix(np.random.rand(c//2, c))
A_w = Approx(A, k).getApprox()
p += Perf(A, A_w, k)
p /= 5
print(p)
P.append(p)
plt.figure(figsize=(10, 5))
plt.plot(range(500, 5001, 500), P, '--o')
plt.xlabel("# Columns")
plt.ylabel(r"$(1+\epsilon)$")
plt.title(r"Mean $(1+\epsilon)$ values for (C/4)xC matrix with k = C/8 (5 trials)")
plt.savefig("plot_for_C.png") |
package x
import (
"net/http"
"github.com/ory/herodot"
)
var PseudoPanic = herodot.DefaultError{
StatusField: http.StatusText(http.StatusInternalServerError),
ErrorField: "Code Bug Detected",
ReasonField: "The code ended up at a place where it should not have. Please report this as an issue at https://github.com/ory/kratos",
CodeField: http.StatusConflict,
}
|
#!/usr/bin/env bash
# shellcheck disable=SC2154,SC2034
# these top lines are moved during build
# --- Modified Version ---
# Name : checksec.sh
# Version : 1.7.0
# Author : Brian Davis
# Date : Feburary 2014
# Download: https://github.com/slimm609/checksec.sh
#
# --- Modified Version ---
# Name : checksec.sh
# Version : based on 1.5
# Author : Robin David
# Date : October 2013
# Download: https://github.com/RobinDavid/checksec
#
# --- Original version ---
# Name : checksec.sh
# Version : 1.5
# Author : Tobias Klein
# Date : November 2011
# Download: http://www.trapkit.de/tools/checksec.html
# Changes : http://www.trapkit.de/tools/checksec_changes.txt
|
package com.ibm.socialcrm.notesintegration.ui.dialogs;
/****************************************************************
* IBM OpenSource
*
* (C) Copyright IBM Corp. 2012
*
* Licensed under the Apache License v2.0
* http://www.apache.org/licenses/LICENSE-2.0
*
***************************************************************/
import org.eclipse.jface.dialogs.Dialog;
import org.eclipse.jface.layout.GridDataFactory;
import org.eclipse.jface.layout.GridLayoutFactory;
import org.eclipse.swt.SWT;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Display;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Monitor;
import org.eclipse.swt.widgets.Shell;
import com.ibm.socialcrm.notesintegration.core.SugarContact;
import com.ibm.socialcrm.notesintegration.utils.SFAImageManager;
import com.ibm.socialcrm.notesintegration.utils.UtilsPlugin;
import com.ibm.socialcrm.notesintegration.utils.UtilsPluginNLSKeys;
public class OverwriteContactDialog extends Dialog {
private SugarContact contact;
public OverwriteContactDialog(Shell shell, SugarContact contact) {
super(shell);
this.contact = contact;
}
@Override
protected void configureShell(Shell newShell) {
super.configureShell(newShell);
Monitor primary = Display.getDefault().getPrimaryMonitor();
Rectangle bounds = primary.getBounds();
Rectangle rect = newShell.getBounds();
int x = bounds.x + (bounds.width - rect.width) / 2;
int y = bounds.y + (bounds.height - rect.height) / 2;
newShell.setLocation(x, y);
newShell.setText(UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.ADD_CONTACT_TITLE));
}
@Override
protected Control createDialogArea(Composite parent) {
Composite composite = new Composite(parent, SWT.NONE);
composite.setLayout(GridLayoutFactory.fillDefaults().numColumns(2).margins(10, 10).spacing(15, 0).create());
composite.setLayoutData(GridDataFactory.fillDefaults().grab(true, true).create());
Label errorLabel = new Label(composite, SWT.NONE);
errorLabel.setImage(SFAImageManager.getImage(SFAImageManager.LARGE_ERROR));
Label messageLabel = new Label(composite, SWT.WRAP);
messageLabel.setText(UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.OVERWRITE_CONTACT_MESSAGE, new String[] { contact.getName(), contact.getFirstName() }));
messageLabel.setLayoutData(GridDataFactory.fillDefaults().grab(false, true).hint(400, SWT.DEFAULT).create());
return composite;
}
@Override
protected Button createButton(Composite parent, int id, String label, boolean defaultButton) {
Button button = super.createButton(parent, id, label, defaultButton);
if (id == Dialog.OK) {
button.setText(UtilsPlugin.getDefault().getResourceString(UtilsPluginNLSKeys.UPDATE));
}
return button;
}
}
|
<reponame>neuling/fso-livetest-chrome-extension<gh_stars>10-100
function(page,done)
{
var do_not_run_string = '%DO_NOT_RUN_DOMAINS%'; //some websites have request limits that block your IP if you do to many requests, these domains can be added here, comma seperated i.e. ecco-verde,vitalabo
let do_not_run = [];
var that = this;
const url = page.getURL('last');
let u = new URL(url);
const sdom = page.getStaticDom();
let msg_partial = "";
let type = 'info';
let canonical = false;
var self_reference = false;
let max_wait_time = 10000;
let onpage_hreflang = '';
let page_vs_canonical = 'this page';
let maybeinbody = false;
if(do_not_run_string!='%'+'DO_NOT_RUN_DOMAINS%'){
do_not_run = do_not_run_string.split(',');
do_not_run = do_not_run.map(s => s.trim());
if(do_not_run.some(s => u.hostname.includes(s)))
{
done(that.createResult("HEAD", "Link-Rel-Alternate-Hreflang Multipage check disabled on "+u.hostname, "warning"));return;
return;
}
}
let references_tested = 0;
let is_done = false;
//collect all rel=alternate
let hreflangs = sdom.querySelectorAll('head > link[rel=alternate][hreflang]');
let canonicals = sdom.querySelectorAll('head > link[rel=canonical]');
if(hreflangs.length<1)
{
//collect all rel=alternate
hreflangs = sdom.querySelectorAll('link[rel=alternate][hreflang]');
canonicals = sdom.querySelectorAll('link[rel=canonical]');
maybeinbody=true;
}
let upgradType = (new_type,type) =>
{
let Status_table = {
'info': 0,
'warning': 1,
'error': 2
}
if(Status_table[new_type]>Status_table[type])
{
return new_type;
}
return type;
}
let getHrefs = (nl) =>
{
if (!nl) { return []; }
let nla = Array.from(nl);
let hrefs = [];
for (var a of nl)
{
if(a.href)
{
hrefs.push(nl.href)
}
}
return hrefs;
}
if(!(hreflangs) || hreflangs.length === 0)
{
/*//check if the hreflangs are not in the head
if(sdom.querySelectorAll('link[rel=alternate][hreflang]').length!=0)
{
done(that.createResult('HEAD', "Link-Rel-Alternate-Hreflang markup in <body> not in <head>! (Maybe <noscript> in <head>?)"+that.partialCodeLink(canonicals, hreflangs), "error", 'static')); return;
}*/
done();return;
}
if(maybeinbody===true)
{
msg_partial = msg_partial + " Markup maybe in the <body> or there is a DOM parsing issue.<br>";
type = upgradType("warning",type);
}
//collect canonical
if(canonicals.length === 1){
canonical = canonicals[0].href;
page_vs_canonical = "<a href='"+canonical+"'>canonical URL</a>"
}
else { //warning of no canonical
msg_partial = msg_partial+"No valid canonical found. ";
if(url.indexOf('?')===-1)
{
msg_partial = msg_partial + "Rel=alternate invalid if page <a href='"+url+"?foo=bar'>referenced using parameters</a>. ";
}
else
{
msg_partial = msg_partial + "Rel=alternate invalid if page referenced using parameters. ";
}
type = upgradType("warning",type);
canonical = url;
}
//onpage check for self reference
let onpage_self_reference_relalts = sdom.querySelectorAll('head > link[rel="alternate"][hreflang][href="'+canonical+'"]');
if(onpage_self_reference_relalts.length > 0)
{
self_reference = true;
//we just choose the first self reference even though there could be multiple...
onpage_hreflang = onpage_self_reference_relalts[0].hreflang;
} else {
msg_partial = msg_partial+"<b>No onpage self reference found!</b> ";
type = upgradType("error",type);
}
//get all URLs
for (var relalt of hreflangs)
{
if(relalt.href && relalt.href === canonical)
{
//self_reference = true;
//no need to check, pointing to itself
//msg_partial = msg_partial + ' (self reference) running test nr'+references_tested;
references_tested++
}
else if(relalt.href)
{
let analyzer = (relalt, origin_url) => {
this.fetch(relalt.href, { responseFormat: 'text' }, (response) =>
{
//msg_partial = msg_partial + ' running test nr'+references_tested;
let is_200 = true;
//check reference URLs for redirects
if(response.redirected === true)
{
msg_partial = msg_partial+"<br>'<a href='"+relalt.href+"'>"+relalt.hreflang+"</a>' URL triggers redirect!"+that.partialCodeLink(relalt)+" ";
type = upgradType("warning",type);
}
if(response.status!=200)
{
msg_partial = msg_partial+"<br>'<a href='"+relalt.href+"'>"+relalt.hreflang+"</a>' returns HTTP "+response.status+"!"+that.partialCodeLink(relalt)+" ";
type = upgradType("error",type);
is_200 = false;
}
if(is_200)
{
let relalt_self_reference = false;
let relalt_back_reference = false;
if (response.body)
{
//now create new DOMs
let parser = new DOMParser();
let dom = parser.parseFromString(response.body, "text/html");
let selfies_selector = 'head > link[rel=alternate][hreflang="'+relalt.hreflang+'"][href="'+relalt.href+'"]';
let backreferences_selector = 'head > link[rel="alternate"][hreflang][href="'+canonical+'"]';
if(self_reference===true)
{
backreferences_selector = 'head > link[rel="alternate"][hreflang="'+onpage_hreflang+'"][href="'+canonical+'"]';
}
let selfies = dom.querySelectorAll(selfies_selector);
if(selfies.length>0){ relalt_self_reference = true; }
let backreferences = dom.querySelectorAll(backreferences_selector);
if(backreferences.length>0) { relalt_back_reference = true; }
}
if(!relalt_self_reference )
{
msg_partial = msg_partial+"<br>'<a href='"+relalt.href+"'>"+relalt.hreflang+"</a>' no self reference found!"+that.partialCodeLink(relalt)+"";
type = upgradType("error",type);
}
if(!relalt_back_reference )
{
msg_partial = msg_partial+"<br>'<a href='"+relalt.href+"'>"+relalt.hreflang+"</a>' no back reference to "+page_vs_canonical+" found!"+that.partialCodeLink(relalt)+"";
type = upgradType("error",type);
}
}
references_tested++
if(references_tested === hreflangs.length)
{
endgame();
}
});
}
analyzer(relalt, canonical);
}
}
var endgame = () =>
{
if(msg_partial!='')
{
done(that.createResult('HEAD', "Link-Rel-Alternate-Hreflang"+that.partialCodeLink(canonicals, hreflangs)+": "+msg_partial, type, 'static'));
is_done = true;
return;
}
done(that.createResult('HEAD', "Link-Rel-Alternate-Hreflang was checked succesfully and is correct!", 'info', 'static', 709));
is_done = true;
return;
}
setTimeout(function(){if(!is_done){endgame();}},max_wait_time);
} |
// @ts-nocheck
import { Modifier } from '../types';
/**
* @param modifiers
*/
export default function mergeByName(modifiers: Partial<Modifier<any, any>>[]): Partial<Modifier<any, any>>[] {
const merged = modifiers.reduce((merged, current) => {
const existing = merged[current.name];
merged[current.name] = existing
? {
...existing,
...current,
options: { ...existing.options, ...current.options },
data: { ...existing.data, ...current.data }
}
: current;
return merged;
}, {});
// IE11 does not support Object.values
return Object.keys(merged).map(key => merged[key]);
}
|
import random
def check_palindrome(num):
"""Function to check if a number is a palindrome.
"""
num_str = str(num)
rev_str = num_str[::-1]
if num_str == rev_str:
return True
else:
return False
# Generate a random 6-digit number
num = random.randint(100000, 999999)
# Check if it is a palindrome
if check_palindrome(num):
print(f"The number {num} is a palindrome")
else:
print(f"The number {num} is not a palindrome") |
<reponame>Ren1336621051/2020FSD_Milestone5_backend
package com.carson.cloud.business.controller;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.DeleteMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.PutMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import com.carson.cloud.business.entity.UserEntity;
import com.carson.cloud.business.service.UserService;
@RestController
@RequestMapping("/user")
public class UserController {
@Autowired
private UserService userService;
/**
*
* @return
*/
// @GetMapping("/findUserById/{id}")
// public UserEntity findUserById(@PathVariable Integer id){
//
// return userService.findUserById(id);
// }
//password encode
@PostMapping("/regist")
public ResponseEntity<UserEntity> regist(UserEntity user){
userService.registUser(user);
return ResponseEntity.status(HttpStatus.CREATED).body(user);
}
@PutMapping("/updatePassword")
public ResponseEntity<UserEntity> updatePassword(String password,String username){
UserEntity userEntity = userService.updatePassword(password,username);
return ResponseEntity.ok(userEntity);
}
}
|
#include <iostream>
#include <string>
// Define the ComponentInfo data structure
struct ComponentInfo {
std::string component_name;
std::string version;
std::string description;
};
// Define the findLocalComponent function
class ComponentInfoRepository {
public:
bool findLocalComponent(const std::string& component_name, ComponentInfo& ci) {
// Implementation of searching for the component in the system
// If found, populate the ci object and return true; otherwise, return false
// Example implementation:
if (component_name == "example_component") {
ci.component_name = "example_component";
ci.version = "1.0";
ci.description = "This is an example component";
return true;
}
return false;
}
};
// Implement the findComponentInfo function
ComponentInfo findComponentInfo(const std::string& component_name, ComponentInfoRepository* cir_) {
ComponentInfo ci;
if (cir_->findLocalComponent(component_name, ci)) {
return ci;
} else {
// If the component is not found, return an empty ComponentInfo object
return ComponentInfo{"", "", ""};
}
}
int main() {
ComponentInfoRepository cir;
ComponentInfo found_component = findComponentInfo("example_component", &cir);
if (found_component.component_name.empty()) {
std::cout << "Component not found" << std::endl;
} else {
std::cout << "Component Name: " << found_component.component_name << std::endl;
std::cout << "Version: " << found_component.version << std::endl;
std::cout << "Description: " << found_component.description << std::endl;
}
return 0;
} |
#!/bin/sh
# --quiet
# (ะธะปะธ -q) ะฟะพะดะฐะฒะปัะตั ะฒัะฒะพะด ะปะธัะฝะตะน ะธะฝัะพัะผะฐัะธะธ, ะฟัะธะฒะพะดั ะบ ะฒัะฒะพะดั ัะพะปัะบะพ ะธะฝัะพัะผะฐัะธะธ ะพะฑ ะพัะธะฑะบะฐั
.
# --verbose
# (ะธะปะธ -v) ะทะฐััะฐะฒะปัะตั valgrind ะฒัะฒะพะดะธัั ะฟะพะดัะพะฑะฝัั ะธะฝัะพัะผะฐัะธั ะพ ัะฒะพะตะน ัะฐะฑะพัะต.
# --log-file
# ะฟะพะทะฒะพะปัะตั ะทะฐะดะฐัั ะธะผั ัะฐะนะปะฐ ะฒ ะบะพัะพััะน ะฑัะดะตั ะฒัะฒะพะดะธัััั ะพััะตั ะพ ัะฐะฑะพัะต. ะ ะทะฐะดะฐะฝะฝะพะผ ะธะผะตะฝะธ ะผะพะณัั ะธัะฟะพะปัะทะพะฒะฐัััั ัะฟะตัะธะฐะปัะฝัะต ัะฐะฑะปะพะฝั, ะบัะดะฐ ะฑัะดัั ะฟะพะดััะฐะฒะปััััั ัะฐะทะปะธัะฝัะต ะทะฝะฐัะตะฝะธั, ะฝะฐะฟัะธะผะตั, ะธะดะตะฝัะธัะธะบะฐัะพั ะฟัะพัะตััะฐ (ัะฐะฑะปะพะฝ %p).
# --log-socket
# ะฟะพะทะฒะพะปัะตั ะทะฐะดะฐัั ะฐะดัะตั ะธ ะฟะพัั ะฝะฐ ะบะพัะพััะน ะฑัะดะตั ะฟะตัะตะดะฐะฒะฐัััั ะพััะตั ะพ ัะฐะฑะพัะต.
# --log-fd
# ะฟะพะทะฒะพะปัะตั ัะบะฐะทะฐัั ะดะตัะบัะธะฟัะพั ัะฐะนะปะฐ, ะฒ ะบะพัะพััะน ะฑัะดะตั ะฒัะฒะพะดะธัััั ะพััะตั ะพ ัะฐะฑะพัะต (ะฟะพ ัะผะพะปัะฐะฝะธั ััะพ ัะธัะปะพ 2 โ ััะฐะฝะดะฐััะฝัะน ะฒัะฒะพะด ัะพะพะฑัะตะฝะธะน ะพะฑ ะพัะธะฑะบะฐั
).
# --track-fds
# (yes ะธะปะธ no, ะฟะพ ัะผะพะปัะฐะฝะธั no) ะทะฐััะฐะฒะปัะตั valgrind ะฒัะดะฐัั ัะฟะธัะพะบ ะพัะบััััั
ะดะตัะบัะธะฟัะพัะพะฒ ัะฐะนะปะพะฒ ะฟัะธ ะพะบะพะฝัะฐะฝะธะธ ัะฐะฑะพัั.
# --trace-children
# (yes ะธะปะธ no, ะฟะพ ัะผะพะปัะฐะฝะธั no) ัะฐะทัะตัะฐะตั ััะฐััะธัะพะฒะบั ะฟัะพัะตััะพะฒ, ะทะฐะฟััะตะฝะฝัั
ะฐะฝะฐะปะธะทะธััะตะผะพะน ะฟัะพะณัะฐะผะผะพะน ั ะฟะพะผะพััั ัะธััะตะผะฝะพะณะพ ะฒัะทะพะฒะฐ exec.
# --time-stamp
# (yes ะธะปะธ no, ะฟะพ ัะผะพะปัะฐะฝะธั no) ะฟัะธะฒะพะดะธั ะบ ะฒัะดะฐัะต ะฒัะตะผะตะฝะฝัั
ะผะตัะพะบ ะฒ ะพััะตั ะพ ัะฐะฑะพัะต (ะฒัะตะผั ะพัััะธััะฒะฐะตััั ะพั ะฝะฐัะฐะปะฐ ัะฐะฑะพัั ะฟัะพะณัะฐะผะผั).
# --leak-check
# ะฒะบะปััะฐะตั (ะทะฝะฐัะตะฝะธะต yes, summary ะธะปะธ full) ะธะปะธ ะพัะบะปััะฐะตั (ะทะฝะฐัะตะฝะธะต no) ััะฝะบัะธั ะพะฑะฝะฐััะถะตะฝะธั ััะตัะตะบ ะฟะฐะผััะธ. ะกัะพะธั ะพัะผะตัะธัั, ััะพ ะฟัะธ ะธัะฟะพะปัะทะพะฒะฐะฝะธะธ ะทะฝะฐัะตะฝะธั summary, memcheck ะฒัะดะฐะตั ะปะธัั ะบัะฐัะบัั ะธะฝัะพัะผะฐัะธั ะพะฑ ััะตัะบะฐั
ะฟะฐะผััะธ, ัะพะณะดะฐ ะบะฐะบ ะฟัะธ ะดััะณะธั
ะทะฝะฐัะตะฝะธัั
, ะบัะพะผะต ัะฒะพะดะฝะพะน ะธะฝัะพัะผะฐัะธะธ, ะฑัะดะตั ะฒัะดะฐะฒะฐัััั ะตัะต ะธ ะธะฝัะพัะผะฐัะธั ะพ ะผะตััะต, ะฒ ะบะพัะพัะพะผ ะฟัะพะธัั
ะพะดะธั ััะฐ ััะตัะบะฐ ะฟะฐะผััะธ.
# --leak-resolution
# (ะฒะพะทะผะพะถะฝัะต ะทะฝะฐัะตะฝะธั low, med ะธะปะธ high) ัะบะฐะทัะฒะฐะตั ัะฟะพัะพะฑ ััะฐะฒะฝะตะฝะธั ััะตะบะฐ ะฒัะทะพะฒะพะฒ ััะฝะบัะธะน. ะัะธ ะทะฝะฐัะตะฝะธัั
low ะธ med, ะฒ ััะฐะฒะฝะตะฝะธะธ ะธัะฟะพะปัะทััััั ะดะฒะฐ ะธะปะธ ัะตัััะต ะฟะพัะปะตะดะฝะธั
ะฒัะทะพะฒะฐ, ัะพะพัะฒะตัััะฒะตะฝะฝะพ, ะฐ ะฟัะธ high, ััะฐะฒะฝะธะฒะฐะตััั ะฟะพะปะฝัะน ััะตะบ ะฒัะทะพะฒะฐ. ะญัะฐ ะพะฟัะธั ะฒะปะธัะตั ะปะธัั ะฝะฐ ัะฟะพัะพะฑ ะฟัะตะดััะฐะฒะปะตะฝะธั ัะตะทัะปััะฐัะพะฒ ะฟะพะธัะบะฐ ะพัะธะฑะพะบ.
# --undef-value-errors
# (yes ะธะปะธ no) ะพะฟัะตะดะตะปัะตั, ะฑัะดัั ะปะธ ะฟะพะบะฐะทัะฒะฐัั ะพัะธะฑะบะธ ะพะฑ ะธัะฟะพะปัะทะพะฒะฐะฝะธะธ ะฝะต ะธะฝะธัะธะฐะปะธะทะธัะพะฒะฐะฝะฝัั
ะทะฝะฐัะตะฝะธะน.
valgrind\
--track-origins=yes\
--leak-check=full\
--show-reachable=yes\
--leak-resolution=med\
./coffee-break
|
<gh_stars>0
import React, { Component } from 'react';
import reactAutobind from 'react-autobind';
import { Card, Col, Row, CardBody, CardHeader, Button, Modal, ModalHeader, ModalBody, ModalFooter, Table } from "reactstrap"
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'
import { faMedal } from '@fortawesome/free-solid-svg-icons'
import { connect } from "react-redux"
import InfoIcon from '@material-ui/icons/Info';
import Ice from "../assets/ice.png"
import MenuIcon from '@material-ui/icons/Menu';
import FastfoodIcon from '@material-ui/icons/Fastfood';
import VideogameAssetIcon from '@material-ui/icons/VideogameAsset';
import VideocamIcon from '@material-ui/icons/Videocam';
import SportsEsportsIcon from '@material-ui/icons/SportsEsports';
import LocalPlayIcon from '@material-ui/icons/LocalPlay';
function prizef(balance) {
if (parseInt(balance) >= 500) {
return ({
"next": "soon",
"pointer": ["gold", <faTrophy />],
"coupons": [
{
"name": "Uber EATS",
"desc": "$10 off on your next UBER EATS purchase",
"code": "001",
"icon": <MenuIcon />,
"logoprop": "icon"
},
{
"name": "Subway",
"desc": "50% off if you purchase at Subway",
"code": "002",
"icon": <FastfoodIcon />,
"logoprop": "icon"
}
]
})
}
else if (parseInt(balance) >= 100) {
return ({
"next": 500,
"pointer": ["silver", <faMedal />],
"coupons": [
{
"name": "Riot Points",
"desc": "10% off on your next purchase",
"code": "001",
"icon": <VideogameAssetIcon />,
"logoprop": "icon"
},
{
"name": "Steam",
"desc": "20% off on your next purchase",
"code": "002",
"icon": <VideocamIcon />,
"logoprop": "icon"
}
]
})
}
else {
return ({
"next": 100,
"pointer": ["#FF5733", <faStar />],
"coupons": [
{
"name": "GameStop",
"desc": "$5 off any purchase",
"code": "001",
"icon": <SportsEsportsIcon />,
"logoprop": "icon"
},
{
"name": "<NAME>",
"desc": "Save 10$ when you spend 50 or more ",
"code": "002",
"icon": <LocalPlayIcon />,
"logoprop": "icon"
}
]
})
}
}
class Coupons extends Component {
constructor(props) {
super(props);
this.state = {
coupons: [],
couponsState: [],
medal: <FontAwesomeIcon icon={faMedal} />,
level: [],
next: "",
memory: "",
infoModal: false
}
reactAutobind(this)
}
sync() {
let prize = prizef(this.props.account_reducer.result.data.accounts[0].balance)
let temp = prize["coupons"]
for (let i = 0; i < temp.length; i++) {
temp["stat"] = false
}
this.setState({
coupons: temp,
level: prize["pointer"],
next: prize["next"]
})
}
componentDidMount() {
this.sync()
console.log(this.props.account_reducer)
}
componentWillUnmount() {
}
componentDidUpdate(preProps, preState) {
if (!this.props.account_reducer.loading) {
if (JSON.stringify(this.props.account_reducer) !== JSON.stringify(this.state.memory))
this.setState({
memory: this.props.account_reducer
}, this.sync())
}
}
activate(event, code) {
let temp = this.state.coupons
for (let i = 0; i < temp.length; i++) {
if (temp[i]["code"] === event) {
temp[i]["stat"] = true
break
}
}
this.setState({
coupons: temp
})
window.open(code)
}
render() {
return (
<div>
{
this.props.account_reducer.result.data.status === "ACT" ?
<>
<div style={{ WebkitTextStroke: "0.7px black", fontSize: "2rem", color: this.state.level[0] }} >
Rank {this.state.medal}
</div>
<>
(next rank
{
this.state.next === "soon" ?
<>
{" soon"}
</>
:
<>
{" "}{this.state.next - this.props.account_reducer.result.data.accounts[0].balance
}{this.props.account_reducer.result.data.accounts[0].currency}{" more"}
</>
}
)
{
<InfoIcon onClick={() => {
this.setState({
infoModal: true
})
}} />
}
{
<Modal isOpen={this.state.infoModal} backdrop={"static"} >
<ModalHeader >Ranking Information</ModalHeader>
<ModalBody>
<Table style={{ fontSize: "0.8rem" }}>
<thead>
<tr>
<th>#</th>
<th>Bronze</th>
<th>Silver</th>
<th>Gold</th>
</tr>
</thead>
<tbody>
<tr>
<th scope="row">Rewards membership
</th>
<td>YES</td>
<td>YES</td>
<td>YES</td>
</tr>
<tr>
<th scope="row">online gaming discounts</th>
<td>NO</td>
<td>YES</td>
<td>YES</td>
</tr>
<tr>
<th scope="row">Money
</th>
<td>0 - 100</td>
<td>100 - 500</td>
<td>+500</td>
</tr>
</tbody>
</Table>
</ModalBody>
<ModalFooter>
<Button style={{ borderRadius: "25px", background: "#2461fb", borderColor: "#2461fb" }} onClick={() => {
this.setState({
infoModal: false
})
}}>Close</Button>
</ModalFooter>
</Modal>
}
</>
<div style={{ paddingBottom: "20px" }} />
{
this.props.account_reducer.result.data.metadata.gifts !== undefined &&
<>
{
this.props.account_reducer.result.data.metadata.gifts.map((element, index) => (
<div key={index}>
<Card style={{ borderRadius: "25px" }} >
<CardHeader>
{element.caption}
</CardHeader>
<CardBody>
<div className="image-container">
<img style={{ borderRadius: "10px" }} alt="icon" width="100%" src={element.logo} />
<div className="image-bottom-left" style={{ fontSize: "1.5rem", WebkitTextStroke: "0.5px black" }}>
{element.amount + " " + element.currency}
</div>
</div>
</CardBody>
<Button disabled={element.stat} style={{ borderRadius: "25px", background: "#2461fb", borderColor: "#2461fb" }} onClick={() => this.activate(element.code, element.reference)}>
{
!element.stat ?
"Redeem" :
"Redeemed"
}
</Button>
</Card>
<br />
</div>
))
}
</>
}
</>
:
<>
<img alt="ice" src={Ice} width="90%"></img>
<br />
<br />
<div>
<h1>Account frozen</h1>
</div>
</>
}
</div>
);
}
}
const mapStateToProps = (state) => {
return {
account_reducer: state.account_reducer,
sol_reducer: state.sol_reducer,
login_reducer: state.login_reducer,
}
}
export default connect(mapStateToProps, null)(Coupons); |
#! @shell@
# - make Nix store etc.
# - copy closure of Nix to target device
# - register validity
# - with a chroot to the target device:
# * nix-env -p /nix/var/nix/profiles/system -i <nix-expr for the configuration>
# * install the boot loader
# Ensure a consistent umask.
umask 0022
# Re-exec ourselves in a private mount namespace so that our bind
# mounts get cleaned up automatically.
if [ "$(id -u)" = 0 ]; then
if [ -z "$NIXOS_INSTALL_REEXEC" ]; then
export NIXOS_INSTALL_REEXEC=1
exec unshare --mount --uts -- "$0" "$@"
else
mount --make-rprivate /
fi
fi
# Parse the command line for the -I flag
extraBuildFlags=()
chrootCommand=(/run/current-system/sw/bin/bash)
buildUsersGroup="nixbld"
while [ "$#" -gt 0 ]; do
i="$1"; shift 1
case "$i" in
--max-jobs|-j|--cores|-I)
j="$1"; shift 1
extraBuildFlags+=("$i" "$j")
;;
--option)
j="$1"; shift 1
k="$1"; shift 1
extraBuildFlags+=("$i" "$j" "$k")
;;
--root)
mountPoint="$1"; shift 1
;;
--closure)
closure="$1"; shift 1
buildUsersGroup=""
;;
--no-channel-copy)
noChannelCopy=1
;;
--no-root-passwd)
noRootPasswd=1
;;
--no-bootloader)
noBootLoader=1
;;
--show-trace)
extraBuildFlags+=("$i")
;;
--chroot)
runChroot=1
if [[ "$@" != "" ]]; then
chrootCommand=("$@")
fi
break
;;
--help)
exec man nixos-install
exit 1
;;
*)
echo "$0: unknown option \`$i'"
exit 1
;;
esac
done
set -e
shopt -s nullglob
if test -z "$mountPoint"; then
mountPoint=/mnt
fi
if ! test -e "$mountPoint"; then
echo "mount point $mountPoint doesn't exist"
exit 1
fi
# Get the path of the NixOS configuration file.
if test -z "$NIXOS_CONFIG"; then
NIXOS_CONFIG=/etc/nixos/configuration.nix
fi
if [ ! -e "$mountPoint/$NIXOS_CONFIG" ] && [ -z "$closure" ]; then
echo "configuration file $mountPoint/$NIXOS_CONFIG doesn't exist"
exit 1
fi
# Builds will use users that are members of this group
extraBuildFlags+=(--option "build-users-group" "$buildUsersGroup")
# Inherit binary caches from the host
# TODO: will this still work with Nix 1.12 now that it has no perl? Probably not...
binary_caches="$(@perl@/bin/perl -I @nix@/lib/perl5/site_perl/*/* -e 'use Nix::Config; Nix::Config::readConfig; print $Nix::Config::config{"binary-caches"};')"
extraBuildFlags+=(--option "binary-caches" "$binary_caches")
# We only need nixpkgs in the path if we don't already have a system closure to install
if [[ -z "$closure" ]]; then
nixpkgs="$(readlink -f "$(nix-instantiate --find-file nixpkgs)")"
export NIX_PATH="nixpkgs=$nixpkgs:nixos-config=$mountPoint/$NIXOS_CONFIG"
fi
unset NIXOS_CONFIG
# TODO: do I need to set NIX_SUBSTITUTERS here or is the --option binary-caches above enough?
# A place to drop temporary closures
trap "rm -rf $tmpdir" EXIT
tmpdir="$(mktemp -d)"
# Build a closure (on the host; we then copy it into the guest)
function closure() {
nix-build "${extraBuildFlags[@]}" --no-out-link -E "with import <nixpkgs> {}; runCommand \"closure\" { exportReferencesGraph = [ \"x\" (buildEnv { name = \"env\"; paths = [ ($1) stdenv ]; }) ]; } \"cp x \$out\""
}
system_closure="$tmpdir/system.closure"
# Use a FIFO for piping nix-store --export into nix-store --import, saving disk
# I/O and space. nix-store --import is run by nixos-prepare-root.
mkfifo $system_closure
if [ -z "$closure" ]; then
expr="(import <nixpkgs/nixos> {}).system"
system_root="$(nix-build -E "$expr")"
system_closure="$(closure "$expr")"
else
system_root=$closure
# Create a temporary file ending in .closure (so nixos-prepare-root knows to --import it) to transport the store closure
# to the filesytem we're preparing. Also delete it on exit!
# Run in background to avoid blocking while trying to write to the FIFO
# $system_closure refers to
nix-store --export $(nix-store -qR $closure) > $system_closure &
fi
channel_root="$(nix-env -p /nix/var/nix/profiles/per-user/root/channels -q nixos --no-name --out-path 2>/dev/null || echo -n "")"
channel_closure="$tmpdir/channel.closure"
nix-store --export $channel_root > $channel_closure
# Populate the target root directory with the basics
@prepare_root@/bin/nixos-prepare-root "$mountPoint" "$channel_root" "$system_root" @nixClosure@ "$system_closure" "$channel_closure"
# nixos-prepare-root doesn't currently do anything with file ownership, so we set it up here instead
chown @root_uid@:@nixbld_gid@ $mountPoint/nix/store
mount --rbind /dev $mountPoint/dev
mount --rbind /proc $mountPoint/proc
mount --rbind /sys $mountPoint/sys
# Grub needs an mtab.
ln -sfn /proc/mounts $mountPoint/etc/mtab
# Switch to the new system configuration. This will install Grub with
# a menu default pointing at the kernel/initrd/etc of the new
# configuration.
echo "finalising the installation..."
if [ -z "$noBootLoader" ]; then
NIXOS_INSTALL_BOOTLOADER=1 chroot $mountPoint \
/nix/var/nix/profiles/system/bin/switch-to-configuration boot
fi
# Run the activation script.
chroot $mountPoint /nix/var/nix/profiles/system/activate
# Ask the user to set a root password.
if [ -z "$noRootPasswd" ] && chroot $mountPoint [ -x /run/wrappers/bin/passwd ] && [ -t 0 ]; then
echo "setting root password..."
chroot $mountPoint /run/wrappers/bin/passwd
fi
echo "installation finished!"
|
package com.leetcode;
import org.testng.annotations.Test;
public class Solution_125Test {
@Test
public void testIsPalindrome() {
Solution_125 solution_125 = new Solution_125();
solution_125.isPalindrome("A man, a plan, a canal: Panama");
}
} |
package com.java.study.offset;
import com.java.study.util.KafkaUtil;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.TopicPartition;
import java.time.Duration;
import java.util.Arrays;
import java.util.List;
public class OffSetConsumer implements Runnable {
@Override
public void run() {
org.apache.kafka.clients.consumer.KafkaConsumer<String,String> consumer = KafkaUtil.consumerNoTopic();
TopicPartition tp = new TopicPartition(KafkaUtil.topic,0);
consumer.assign(Arrays.asList(tp));
long lastConsumedOffset = -1;
while (true){
ConsumerRecords<String,String> records = consumer.poll(Duration.ofMillis(1000));
if (records.isEmpty()){
break;
}
List<ConsumerRecord<String,String>> partitionRecords = records.records(tp);
lastConsumedOffset = partitionRecords.get(partitionRecords.size()-1).offset();
consumer.commitSync();
}
System.out.println("comsumed offset is " + lastConsumedOffset);
OffsetAndMetadata offsetAndMetadata = consumer.committed(tp);
System.out.println("commited offset is " + offsetAndMetadata.offset());
long posititon = consumer.position(tp);
System.out.println("the offset of the next record is " + posititon);
}
}
|
<filename>test/e2e/ingress_default.go
package e2e
import (
"net/http"
"time"
api "github.com/appscode/voyager/apis/voyager/v1beta1"
"github.com/appscode/voyager/test/framework"
"github.com/appscode/voyager/test/test-server/client"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
extensions "k8s.io/api/extensions/v1beta1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/util/intstr"
)
var _ = Describe("IngressCoreOperations", func() {
var (
f *framework.Invocation
ing *api.Ingress
ext *extensions.Ingress
)
BeforeEach(func() {
f = root.Invoke()
ext = &extensions.Ingress{
ObjectMeta: metav1.ObjectMeta{
Name: f.Ingress.UniqueName(),
Namespace: f.Namespace(),
Annotations: map[string]string{
"kubernetes.io/ingress.class": "voyager",
},
},
Spec: extensions.IngressSpec{
Rules: []extensions.IngressRule{
{
IngressRuleValue: extensions.IngressRuleValue{
HTTP: &extensions.HTTPIngressRuleValue{
Paths: []extensions.HTTPIngressPath{
{
Path: "/testpath",
Backend: extensions.IngressBackend{
ServiceName: f.Ingress.TestServerName(),
ServicePort: intstr.FromInt(80),
},
},
},
},
},
},
},
},
}
// This ensures compatibility with extensions.Ingress
var err error
ing, err = api.NewEngressFromIngress(ext)
Expect(err).NotTo(HaveOccurred())
})
JustBeforeEach(func() {
_, err := f.KubeClient.ExtensionsV1beta1().Ingresses(ext.Namespace).Create(ext)
Expect(err).NotTo(HaveOccurred())
f.Ingress.EventuallyStarted(ing).Should(BeTrue())
By("Checking generated resource")
Expect(f.Ingress.IsExistsEventually(ing)).Should(BeTrue())
})
AfterEach(func() {
if options.Cleanup {
f.KubeClient.ExtensionsV1beta1().Ingresses(ext.Namespace).Delete(ext.Name, &metav1.DeleteOptions{})
}
})
Describe("Create", func() {
It("Should response HTTP", func() {
By("Getting HTTP endpoints")
eps, err := f.Ingress.GetHTTPEndpoints(ing)
Expect(err).NotTo(HaveOccurred())
Expect(len(eps)).Should(BeNumerically(">=", 1))
err = f.Ingress.DoHTTP(framework.MaxRetry, "", ing, eps, "GET", "/testpath/ok", func(r *client.Response) bool {
return Expect(r.Status).Should(Equal(http.StatusOK)) &&
Expect(r.Method).Should(Equal("GET")) &&
Expect(r.Path).Should(Equal("/testpath/ok"))
})
Expect(err).NotTo(HaveOccurred())
})
})
Describe("Delete", func() {
It("Should delete Ingress resource", func() {
By("Deleting Ingress resource")
err := f.KubeClient.ExtensionsV1beta1().Ingresses(ext.Namespace).Delete(ext.Name, &metav1.DeleteOptions{})
Expect(err).NotTo(HaveOccurred())
Eventually(func() bool {
return f.Ingress.IsExists(ing) == nil
}, "5m", "10s").Should(BeFalse())
})
})
Describe("Update", func() {
It("Should update Loadbalancer", func() {
By("Updating Ingress resource")
uing, err := f.KubeClient.ExtensionsV1beta1().Ingresses(ext.Namespace).Get(ext.Name, metav1.GetOptions{})
Expect(err).NotTo(HaveOccurred())
uing.Spec.Rules[0].HTTP.Paths[0].Path = "/newTestPath"
_, err = f.KubeClient.ExtensionsV1beta1().Ingresses(ext.Namespace).Update(uing)
Expect(err).NotTo(HaveOccurred())
By("Waiting some time for update to be applied")
time.Sleep(time.Second * 10)
By("Getting HTTP endpoints")
eps, err := f.Ingress.GetHTTPEndpoints(ing)
Expect(err).NotTo(HaveOccurred())
Expect(len(eps)).Should(BeNumerically(">=", 1))
By("Calling new HTTP path")
err = f.Ingress.DoHTTP(framework.MaxRetry, "", ing, eps, "GET", "/newTestPath/ok", func(r *client.Response) bool {
return Expect(r.Status).Should(Equal(http.StatusOK)) &&
Expect(r.Method).Should(Equal("GET")) &&
Expect(r.Path).Should(Equal("/newTestPath/ok"))
})
Expect(err).NotTo(HaveOccurred())
By("Checking old path")
err = f.Ingress.DoHTTP(framework.NoRetry, "", ing, eps, "GET", "/testpath/ok", func(r *client.Response) bool {
return true
})
Expect(err).To(HaveOccurred())
})
})
})
|
#!/usr/bin/env bash
source npu_set_env.sh
/usr/local/Ascend/driver/tools/msnpureport -d 0 -g error
/usr/local/Ascend/driver/tools/msnpureport -d 4 -g error
currentDir=$(cd "$(dirname "$0")";pwd)
currtime=`date +%Y%m%d%H%M%S`
train_log_dir=${currentDir}/result/training_8p_job_${currtime}
mkdir -p ${train_log_dir}
cd ${train_log_dir}
echo "train log path is ${train_log_dir}"
python3.7 ${currentDir}/examples/imagenet/main.py \
--data=/data/imagenet \
--arch=efficientnet-b0 \
--batch-size=4096 \
--lr=1.6 \
--momentum=0.9 \
--epochs=100 \
--autoaug \
--amp \
--pm=O1 \
--loss_scale=32 \
--val_feq=10 \
--addr=$(hostname -I |awk '{print $1}') \
--dist-backend=hccl \
--multiprocessing-distributed \
--world-size 1 \
--rank 0 \
--device_list '0,1,2,3,4,5,6,7' > ${train_log_dir}/train_8p.log 2>&1 & |
FIGURE=figure_complex_analysis.png
montage -pointsize 24 -geometry 'x400' -tile 2x3 -font Liberation-Sans-Bold -label 'A' rmsd_histo_c_apdtrpap.png -label 'B' rmsd_histo_c_apdtrpap_Tn.png -label 'C' endtoend_c_apdtrpap.png -label 'D' endtoend_c_apdtrpap_Tn.png -label 'E' ramachandran_c_apdtrpap.png -label 'F' ramachandran_c_apdtrpap_Tn.png $FIGURE
|
# This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rake db:seed (or created alongside the db with db:setup).
#
# Examples:
#
# cities = City.create([{ name: 'Chicago' }, { name: 'Copenhagen' }])
# Mayor.create(name: 'Emanuel', city: cities.first)
wd = ["<PASSWORD>"]
ids = [*1..33]
#passwordใฏrใใใใทใฅๅใใใใฎ
ids.each do |i|
User.create(name:"#{i}",
password:"<PASSWORD>",
word:wd.join(" ")
)
end
wd.each do |w|
Point.create(word:w,user_id:ids.join(" "))
end
|
<gh_stars>1-10
import * as G from './utils'
export {
G
}
|
package com.serchinastico.mechrunner.schedule.service;
import android.content.Context;
import android.media.MediaPlayer;
import com.serchinastico.mechrunner.R;
import com.serchinastico.mechrunner.schedule.domain.model.Schedule;
import com.serchinastico.mechrunner.schedule.domain.model.Step;
import java.util.Iterator;
import java.util.Timer;
import java.util.TimerTask;
public class ScheduleTimer {
private final Context context;
private Iterator<Step> stepsIterator;
private Timer timer;
private MediaPlayer player;
public ScheduleTimer(Context context) {
this.context = context;
}
public synchronized void start(Schedule schedule) {
stepsIterator = schedule.iterator();
timer = new Timer();
scheduleNextStep();
}
public synchronized void stop() {
releasePlayer();
if (timer != null) {
timer.cancel();
timer.purge();
timer = null;
}
}
private synchronized void scheduleNextStep() {
if (stepsIterator.hasNext()) {
Step step = stepsIterator.next();
timer.schedule(createTimerTask(), step.getDurationSeconds() * 1000);
}
}
private TimerTask createTimerTask() {
return new TimerTask() {
@Override
public void run() {
releasePlayer();
player = MediaPlayer.create(context, R.raw.beep);
player.start();
scheduleNextStep();
}
};
}
private void releasePlayer() {
if (player != null) {
player.stop();
player.release();
player = null;
}
}
}
|
#pragma once
#include <cpp11/external_pointer.hpp>
#include <cpp11/strings.hpp> // required to avoid link error only
#include <cpp11/list.hpp>
#include <cpp11/integers.hpp>
#include <cpp11/doubles.hpp>
#include <cpp11/protect.hpp>
#include <dust/random/random.hpp>
#include <dust/r/random.hpp>
#include <dust/utils.hpp>
#include <mode/mode.hpp>
#include <mode/r/helpers.hpp>
namespace mode {
namespace r {
template <typename T>
cpp11::list mode_alloc(cpp11::list r_pars, double time, size_t n_particles,
size_t n_threads,
cpp11::sexp control, cpp11::sexp r_seed) {
auto pars = mode::mode_pars<T>(r_pars);
auto seed = dust::random::r::as_rng_seed<typename T::rng_state_type>(r_seed);
auto ctl = mode::r::validate_control(control);
cpp11::sexp info = mode_info(pars);
mode::r::validate_positive(n_threads, "n_threads");
container<T> *d = new mode::container<T>(pars, time, n_particles,
n_threads, ctl, seed);
cpp11::external_pointer<container<T>> ptr(d, true, false);
return cpp11::writable::list({ptr, info});
}
template <typename T>
void mode_set_n_threads(SEXP ptr, size_t n_threads) {
T *obj = cpp11::as_cpp<cpp11::external_pointer<T>>(ptr).get();
mode::r::validate_positive(n_threads, "n_threads");
obj->set_n_threads(n_threads);
}
template <typename T>
cpp11::sexp mode_control(SEXP ptr) {
T *obj = cpp11::as_cpp<cpp11::external_pointer<T>>(ptr).get();
auto ctl = obj->ctl();
return mode::r::control(ctl);
}
template <typename T>
double mode_time(SEXP ptr) {
T *obj = cpp11::as_cpp<cpp11::external_pointer<T>>(ptr).get();
return obj->time();
}
template <typename T>
void mode_set_index(SEXP ptr, cpp11::sexp r_index) {
T *obj = cpp11::as_cpp<cpp11::external_pointer<T>>(ptr).get();
if (r_index == R_NilValue) {
obj->initialise_index();
} else {
const size_t index_max = obj->n_state_full();
const std::vector <size_t> index =
mode::r::r_index_to_index(r_index, index_max);
obj->set_index(index);
}
}
template <typename T>
void mode_set_stochastic_schedule(SEXP ptr, cpp11::sexp r_time) {
T *obj = cpp11::as_cpp<cpp11::external_pointer<T>>(ptr).get();
std::vector<double> time;
if (r_time != R_NilValue) {
time = cpp11::as_cpp<std::vector<double>>(cpp11::as_doubles(r_time));
for (size_t i = 1; i < time.size(); ++i) {
if (time[i] <= time[i - 1]) {
cpp11::stop("schedule must be strictly increasing; see time[%d]",
i + 1);
}
}
}
obj->set_stochastic_schedule(time);
}
template <typename T>
cpp11::sexp mode_run(SEXP ptr, double end_time) {
T *obj = cpp11::as_cpp<cpp11::external_pointer<T>>(ptr).get();
auto time = obj->time();
if (end_time < time) {
cpp11::stop("'end_time' (%f) must be greater than current time (%f)",
end_time, time);
}
obj->run(end_time);
std::vector<double> dat(obj->n_state_run() * obj->n_particles());
obj->state_run(dat);
return mode::r::state_array(dat, obj->n_state_run(), obj->n_particles());
}
template <typename T>
cpp11::sexp mode_state_full(SEXP ptr) {
T *obj = cpp11::as_cpp<cpp11::external_pointer<T>>(ptr).get();
std::vector<double> dat(obj->n_state_full() * obj->n_particles());
obj->state_full(dat);
return mode::r::state_array(dat, obj->n_state_full(), obj->n_particles());
}
template <typename T>
cpp11::sexp mode_state(SEXP ptr, SEXP r_index) {
T *obj = cpp11::as_cpp<cpp11::external_pointer<T>>(ptr).get();
const size_t index_max = obj->n_state_full();
const std::vector <size_t> index =
mode::r::r_index_to_index(r_index, index_max);
size_t n = index.size();
std::vector<double> dat(n * obj->n_particles());
obj->state(dat, index);
return mode::r::state_array(dat, n, obj->n_particles());
}
template <typename T>
cpp11::sexp mode_stats(SEXP ptr) {
T *obj = cpp11::as_cpp<cpp11::external_pointer<T>>(ptr).get();
std::vector<size_t> dat(3 * obj->n_particles());
obj->statistics(dat);
return mode::r::stats_array(dat, obj->n_particles());
}
template <typename T>
cpp11::sexp mode_update_state(SEXP ptr, SEXP r_pars, SEXP r_time, SEXP r_state,
SEXP r_index,
SEXP r_set_initial_state,
SEXP r_reset_step_size) {
mode::container<T> *obj =
cpp11::as_cpp < cpp11::external_pointer<mode::container<T>>>(ptr).get();
std::vector<size_t> index;
const size_t index_max = obj->n_variables();
if (r_index != R_NilValue) {
index = mode::r::r_index_to_index(r_index, index_max);
} else {
index.clear();
index.reserve(index_max);
for (size_t i = 0; i < index_max; ++i) {
index.push_back(i);
}
}
const size_t n_state_full = obj->n_state_full();
auto set_initial_state = mode::r::validate_set_initial_state(r_state,
r_pars,
r_time,
r_set_initial_state);
auto reset_step_size = mode::r::validate_reset_step_size(r_time,
r_pars,
r_reset_step_size);
auto time = mode::r::validate_time(r_time);
auto state = mode::r::validate_state(r_state,
index.size(),
n_state_full,
static_cast<int>(obj->n_particles()));
cpp11::sexp ret = R_NilValue;
if (r_pars != R_NilValue) {
auto pars = mode::mode_pars<T>(r_pars);
obj->set_pars(pars);
ret = mode_info<T>(pars);
}
obj->update_state(time, state, index, set_initial_state, reset_step_size);
return ret;
}
template <typename T>
size_t mode_n_state_full(SEXP ptr) {
T *obj = cpp11::as_cpp<cpp11::external_pointer<T>>(ptr).get();
return obj->n_state_full();
}
template <typename T>
size_t mode_n_state_run(SEXP ptr) {
T *obj = cpp11::as_cpp<cpp11::external_pointer<T>>(ptr).get();
return obj->n_state_run();
}
template <typename T>
size_t mode_n_variables(SEXP ptr) {
T *obj = cpp11::as_cpp<cpp11::external_pointer<T>>(ptr).get();
return obj->n_variables();
}
template <typename T>
void mode_reorder(SEXP ptr, cpp11::sexp r_index) {
T *obj = cpp11::as_cpp<cpp11::external_pointer<T>>(ptr).get();
const size_t index_max = obj->n_particles();
const std::vector <size_t> index =
mode::r::r_index_to_index(r_index, index_max);
if (index.size() != index_max) {
cpp11::stop("'index' must be a vector of length %d",
index_max);
}
obj->reorder(index);
}
}
}
|
<filename>controllers/location.ts
import { plainToClass } from 'class-transformer';
import { getSquareup } from './http-requests';
import { ISquareupListLocationsResponse } from '../models/endpoints/location/i-squareup-list-locations';
import { ISquareupLocation, ISquareupLocationCapability } from '../models/i-squareup-location'
export function listLocations(): Promise<ISquareupLocation[]> {
return getSquareup(['locations']).then((response: ISquareupLocation) => {
const resp = plainToClass(ISquareupListLocationsResponse, response);
return Promise.resolve(resp.locations);
});
}
export function findLocationCapability(locations: ISquareupLocation[], capability: ISquareupLocationCapability) {
for (let i = locations.length - 1; i >= 0; i--) {
if (locations[i].capabilities.indexOf(capability) > -1) {
return locations[i];
}
}
}
|
#!/bin/bash
#
# This script is just to test the IRIS image outside of Kubernetes
#
source ./buildtools.sh
docker run --rm -it --name sds_iris_template \
-p 1972:1972 -p 52773:52773 \
-v $PWD/license:/external/license \
$LOCAL_IMAGE_NAME --key /external/license/iris.key |
<gh_stars>0
package org.firstinspires.ftc.teamcode.hardware.devices;
import android.support.annotation.NonNull;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorSimple;
import org.firstinspires.ftc.teamcode.hardware.Robot;
import org.firstinspires.ftc.teamcode.hardware.RobotHardwareDevice;
import org.firstinspires.ftc.teamcode.hardware.controllers.RobotMotorController;
import org.firstinspires.ftc.teamcode.subsystem.drive.drivecontroller.PID.PID;
public class RobotMotor extends RobotHardwareDevice implements PID.PidInput {
private RobotMotorController motorController = null;
public RobotMotor(@NonNull DcMotor motor) {
super(motor, "");
}
public RobotMotor(@NonNull DcMotor motor, @NonNull String name) {
super(motor, name);
//Maybe use .getDeviceName from DcMotor class instead?
}
public RobotMotor(@NonNull RobotMotorController motorController, @NonNull DcMotor motor, @NonNull String name) {
super(motor, name);
//Maybe use .getDeviceName from DcMotor class instead?
this.motorController = motorController;
}
public RobotMotorController getMotorController() {
return motorController;
}
public void setMotorController(@NonNull RobotMotorController motorController) {
this.motorController = motorController;
}
//TODO: Implement
public boolean isStalling() {
return false;
}
@Override
public double getInput(PID pid) {
return getEncoderCount(); //TODO:ticks or inches?
}
public int getEncoderCount() {
return ((DcMotor) getHardwareDevice()).getCurrentPosition();
}
public void resetEncoderCount() {
DcMotor.RunMode previousRunMode = ((DcMotor) getHardwareDevice()).getMode();
setRunMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
if (Robot.getOpMode() instanceof LinearOpMode) {
((LinearOpMode) Robot.getOpMode()).idle();
}
setRunMode(previousRunMode);
}
public void setDirection(DcMotorSimple.Direction direction) {
((DcMotor) getHardwareDevice()).setDirection(direction);
}
public void setPower(double power) {
((DcMotor) getHardwareDevice()).setPower(power);
}
public void setRunMode(DcMotor.RunMode runMode) {
((DcMotor) getHardwareDevice()).setMode(runMode);
}
public void setZeroPowerBehavior(DcMotor.ZeroPowerBehavior zeroPowerBehavior) {
((DcMotor) getHardwareDevice()).setZeroPowerBehavior(zeroPowerBehavior);
}
}
|
<filename>LeetCodeSolutions/152 - Maximum Product Subarray(medium) - Java.java
class Solution {
public int maxProduct(int[] nums) {
int max_elem = nums[0];
for(int i = 1;i < nums.length;i++){
int temp = nums[i];
if(nums[i]>max_elem){
max_elem = nums[i];
}
for (int j = i - 1;j >= 0;j--){
temp = temp * nums[j];
if (temp > max_elem){
max_elem = temp;
}
}
}
return max_elem;
}
} |
class ArgumentParser:
def __init__(self):
self.arguments = {}
def add_argument(self, name, action, nargs):
self.arguments[name] = {'action': action, 'nargs': nargs, 'values': []}
def parse_args(self, args):
parsed_args = {'arg_opt_' + k: 0 if v['action'] == 'store_true' else [] for k, v in self.arguments.items()}
parsed_args['arg_pos'] = []
i = 0
while i < len(args):
if args[i].startswith('--'):
arg_name = args[i]
if arg_name not in self.arguments:
raise ValueError(f"Unrecognized argument: {arg_name}")
arg_info = self.arguments[arg_name]
if arg_info['action'] == 'store_true':
parsed_args['arg_opt_' + arg_name] += 1
else:
if i + arg_info['nargs'] >= len(args):
raise ValueError(f"Option {arg_name} requires {arg_info['nargs']} value(s)")
parsed_args['arg_opt_' + arg_name].extend(args[i + 1:i + 1 + arg_info['nargs']])
i += arg_info['nargs']
else:
parsed_args['arg_pos'].append(args[i])
i += 1
return parsed_args
# Example usage
parser = ArgumentParser()
parser.add_argument('--input', action='store', nargs=1)
parser.add_argument('--output', action='store', nargs=1)
parser.add_argument('--verbose', action='store_true')
args = parser.parse_args(['--input', 'file1.txt', '--output', 'file2.txt', '--verbose', 'extra_arg'])
print(args) |
<gh_stars>10-100
import React from 'react'
import { shallow } from 'enzyme'
import AddLocation from './AddLocation'
describe('AddLocation', () => {
it('renders correctly', () => {
const wrapper = shallow(<AddLocation />)
expect(wrapper).toMatchSnapshot()
})
})
|
#!/bin/bash
# Builds documentation for each release in both HTML and PDF versions
sh utils/build.sh
myreleases=$(cat website/_data/releases.yml | grep release | cut -d ":" -f 2- | sort -u -r)
mydocs=$(cat website/_data/releases.yml | grep name | cut -d ":" -f 2- | sort -u)
mystaticreleases=$(cat website/_data/static.yml | grep release | cut -d ":" -f 2- | sort -u -r)
mystaticdocs=$(cat website/_data/static.yml | grep name | cut -d ":" -f 2- | sort -u)
# Empty index
>website/index.html
# Versioned documents
echo "<ul>" >>website/index.html
for release in ${myreleases}; do
for doc in ${mydocs}; do
echo "<li><a href=\"${release}/${doc}\">${release}-${doc}</a></li>" >>website/index.html
done
done
echo "</ul>" >>website/index.html
# Static documents
echo "<ul>" >>website/index.html
for release in ${mystaticreleases}; do
for doc in ${mystaticdocs}; do
echo "<li><a href=\"${release}/${doc}\">${release}-${doc}</a></li>" >>website/index.html
done
done
echo "</ul>" >>website/index.html
|
<reponame>JKusiak/isshu
import axios from 'axios';
import { useContext, useState } from 'react';
import { useParams } from 'react-router-dom';
import AddColumnButton from '../../components/Column/AddColumnButton';
import { ActionTypes } from '../../reducers/BoardReducer';
import { BoardReducerContext } from '../Board/GetBoard';
const AddColumn = () => {
const { boardId } = useParams<{ boardId: string }>();
const [columnName, setColumnName] = useState<string>('');
const { dispatch } = useContext(BoardReducerContext);
function onSubmit(e: React.SyntheticEvent) {
// e.preventDefault();
const requestBody = {
columnName: columnName,
boardId: boardId,
};
axios.post('/columns/add', requestBody)
.then((resp) => {
setColumnName('');
const payload = {
...resp.data,
issues: [],
}
dispatch({ type: ActionTypes.AddColumn, payload: payload })
})
}
return (
<>
<AddColumnButton
onSubmit={onSubmit}
setColumnName={setColumnName}
/>
</>
);
}
export default AddColumn; |
<filename>src/main/java/chylex/hee/entity/projectile/EntityProjectilePotion.java
package chylex.hee.entity.projectile;
import net.minecraft.entity.Entity;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.projectile.EntityPotion;
import net.minecraft.init.Items;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.util.MovingObjectPosition;
import net.minecraft.world.World;
import chylex.hee.item.base.ItemAbstractPotion;
import chylex.hee.system.abstractions.entity.EntityDataWatcher;
import chylex.hee.system.abstractions.entity.EntitySelector;
public class EntityProjectilePotion extends EntityPotion{
private enum Data{ ITEM_ID }
private EntityDataWatcher entityData;
private Item potionItem;
public EntityProjectilePotion(World world){
super(world);
}
public EntityProjectilePotion(World world, EntityLivingBase thrower, ItemAbstractPotion potion){
super(world, thrower, new ItemStack(potion, 1, 1));
this.potionItem = potion;
}
public Item getPotionItem(){
if (potionItem == null || potionItem == Items.potionitem){
if ((potionItem = Item.getItemById(entityData.getShort(Data.ITEM_ID))) == null)potionItem = Items.potionitem;
}
return potionItem;
}
@Override
protected void entityInit(){
super.entityInit();
entityData = new EntityDataWatcher(this);
entityData.addShort(Data.ITEM_ID);
}
@Override
public void onUpdate(){
super.onUpdate();
if (ticksExisted == 1)entityData.setShort(Data.ITEM_ID, Item.getIdFromItem(potionItem));
}
@Override
protected void onImpact(MovingObjectPosition mop){
if (!worldObj.isRemote && potionItem instanceof ItemAbstractPotion){
for(Entity entity:EntitySelector.any(worldObj, boundingBox.expand(4D, 2D, 4D))){
double dist = getDistanceSqToEntity(entity);
if (dist < 16D)((ItemAbstractPotion)potionItem).applyEffectThrown(entity, mop.entityHit == entity ? Double.MAX_VALUE : dist);
}
worldObj.playAuxSFX(2002, (int)Math.round(posX), (int)Math.round(posY), (int)Math.round(posZ), 0);
setDead();
}
}
@Override
public void writeEntityToNBT(NBTTagCompound nbt){
super.writeEntityToNBT(nbt);
nbt.setShort("potionItemID", (short)Item.getIdFromItem(potionItem));
}
@Override
public void readEntityFromNBT(NBTTagCompound nbt){
super.readEntityFromNBT(nbt);
potionItem = Item.getItemById(nbt.getShort("potionItemID"));
}
}
|
def linear_search(input_arr, search_term):
for i in range(len(input_arr)):
if input_arr[i] == search_term:
return i
return -1 |
#!/bin/bash
# Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of NVIDIA CORPORATION nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
REPO_VERSION=${NVIDIA_TENSORRT_SERVER_VERSION}
if [ "$#" -ge 1 ]; then
REPO_VERSION=$1
fi
if [ -z "$REPO_VERSION" ]; then
echo -e "Repository version must be specified"
echo -e "\n***\n*** Test Failed\n***"
exit 1
fi
export CUDA_VISIBLE_DEVICES=0
CLIENT_LOG="./client.log"
PERF_CLIENT=../clients/perf_client
TRT_OP_TEST=trt_dynamic_shape_test.py
DATADIR="./models"
rm -rf ${DATADIR}
mkdir -p ${DATADIR}
cp -r /data/inferenceserver/${REPO_VERSION}/qa_variable_model_repository/plan_float32_float32_float32-4-32 ${DATADIR}/
SERVER=/opt/tritonserver/bin/trtserver
SERVER_ARGS=--model-repository=$DATADIR
SERVER_LOG="./inference_server.log"
source ../common/util.sh
rm -f *.log*
RET=0
run_server
if [ "$SERVER_PID" == "0" ]; then
echo -e "\n***\n*** Failed to start $SERVER\n***"
cat $SERVER_LOG
exit 1
fi
# Shape beyond the limits of optimization profile
set +e
$PERF_CLIENT -v -i grpc -u localhost:8001 -m plan_float32_float32_float32-4-32 --shape INPUT0:33 --shape INPUT1:33 -t 1 -p2000 -b 1 > ${CLIENT_LOG}_max 2>&1
if [ $? -eq 0 ]; then
cat ${CLIENT_LOG}_max
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
EXPECTED_MESSAGE="model expected the shape of dimension 1 to be between 4 and 32 but received"
if [ $(cat ${CLIENT_LOG}_max | grep "${EXPECTED_MESSAGE} 33" | wc -l) -eq 0 ]; then
cat ${CLIENT_LOG}_max
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
$PERF_CLIENT -v -i grpc -u localhost:8001 -m plan_float32_float32_float32-4-32 --shape INPUT0:3 --shape INPUT1:3 -t 1 -p2000 -b 1 > ${CLIENT_LOG}_min 2>&1
if [ $? -eq 0 ]; then
cat ${CLIENT_LOG}_min
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
if [ $(cat ${CLIENT_LOG}_min | grep "${EXPECTED_MESSAGE} 3" | wc -l) -eq 0 ]; then
cat ${CLIENT_LOG}_min
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
set -e
kill $SERVER_PID
wait $SERVER_PID
# Tests with multiple optimization profiles
# plan_float32_float32_float32 models with dynamic shapes has 6 profiles
# min, opt, max, idx
# [1, 1], [1, 16], [8, 33], 0 (*)
# [1, 1], [2, 16], [7, 32], 1
# [1, 1], [3, 16], [6, 32], 2
# [1, 1], [4, 16], [5, 32], 3
# [5, 1], [6, 16], [8, 32], 4 (*)
# [6, 1], [6, 16], [8, 32], 5 (*)
# [1, 1], [1, 16], [8, 32], 6
rm -rf ${DATADIR} && rm -f config.pbtxt && mkdir -p ${DATADIR}
cp -r /data/inferenceserver/${REPO_VERSION}/qa_variable_model_repository/plan_float32_float32_float32 ${DATADIR}/
# Keep a copy of original model config for different modifications
cp -r /data/inferenceserver/${REPO_VERSION}/qa_variable_model_repository/plan_float32_float32_float32/config.pbtxt .
# TrtDynamicShapeTest.test_load_specific_optimization_profile
CLIENT_LOG="./test_load_specific_optimization_profile.client.log"
SERVER_LOG="./test_load_specific_optimization_profile.inference_server.log"
cp config.pbtxt ${DATADIR}/plan_float32_float32_float32/config.pbtxt && \
sed -i "s/profile:.*/profile: [\"5\"]/" ${DATADIR}/plan_float32_float32_float32/config.pbtxt
run_server
if [ "$SERVER_PID" == "0" ]; then
echo -e "\n***\n*** Failed to start $SERVER\n***"
cat $SERVER_LOG
exit 1
fi
set +e
python $TRT_OP_TEST TrtDynamicShapeTest.test_load_specific_optimization_profile >>$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
echo -e "\n***\n*** Test Failed\n***"
cat $CLIENT_LOG
RET=1
fi
set -e
kill $SERVER_PID
wait $SERVER_PID
# TrtDynamicShapeTest.test_load_default_optimization_profile
CLIENT_LOG="./test_load_default_optimization_profile.client.log"
SERVER_LOG="./test_load_default_optimization_profile.inference_server.log"
cp config.pbtxt ${DATADIR}/plan_float32_float32_float32/config.pbtxt && \
sed -i "s/profile:.*//" ${DATADIR}/plan_float32_float32_float32/config.pbtxt
run_server
if [ "$SERVER_PID" == "0" ]; then
echo -e "\n***\n*** Failed to start $SERVER\n***"
cat $SERVER_LOG
exit 1
fi
set +e
python $TRT_OP_TEST TrtDynamicShapeTest.test_load_default_optimization_profile >>$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
echo -e "\n***\n*** Test Failed\n***"
cat $CLIENT_LOG
RET=1
fi
set -e
kill $SERVER_PID
wait $SERVER_PID
# TrtDynamicShapeTest.test_select_optimization_profile
# Note that this test needs to check server log for which OP is used
#
# finding OP that best fit the input shape:
# load OP 0, 1, 2, 3, send [4 16] and 3 should be used
SERVER_ARGS="--model-repository=$DATADIR --log-verbose=1"
CLIENT_LOG="./test_select_optimization_profile.client.best.log"
SERVER_LOG="./test_select_optimization_profile.inference_server.best.log"
(cp config.pbtxt ${DATADIR}/plan_float32_float32_float32/config.pbtxt && \
sed -i "s/max_batch_size:.*/max_batch_size: 5/" ${DATADIR}/plan_float32_float32_float32/config.pbtxt && \
sed -i "s/profile:.*/profile: [\"0\", \"1\", \"2\", \"3\"]/" ${DATADIR}/plan_float32_float32_float32/config.pbtxt)
run_server
if [ "$SERVER_PID" == "0" ]; then
echo -e "\n***\n*** Failed to start $SERVER\n***"
cat $SERVER_LOG
exit 1
fi
set +e
python $TRT_OP_TEST TrtDynamicShapeTest.test_select_optimization_profile >>$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
echo -e "\n***\n*** Test Failed\n***"
RET=1
fi
set -e
set +e
grep "Context with profile 3 \[3\] is being executed for " test_select_optimization_profile.inference_server.best.log
if [ $? -ne 0 ]; then
echo -e "\n***\n*** Failed. Expected profile 3 is used\n***"
RET=1
fi
set -e
kill $SERVER_PID
wait $SERVER_PID
# finding OP that best fit the input shape while the input shape is allowed:
# load OP 0, 5, send [4 16] and 0 should be used
# (OP 5 is the best in terms of OPT dims, but it requires min dims [6, 1])
CLIENT_LOG="./test_select_optimization_profile.client.allow.log"
SERVER_LOG="./test_select_optimization_profile.inference_server.allow.log"
cp config.pbtxt ${DATADIR}/plan_float32_float32_float32/config.pbtxt && \
sed -i "s/profile:.*/profile: [\"0\", \"5\"]/" ${DATADIR}/plan_float32_float32_float32/config.pbtxt
run_server
if [ "$SERVER_PID" == "0" ]; then
echo -e "\n***\n*** Failed to start $SERVER\n***"
cat $SERVER_LOG
exit 1
fi
set +e
python $TRT_OP_TEST TrtDynamicShapeTest.test_select_optimization_profile >>$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
echo -e "\n***\n*** Test Failed\n***"
cat $CLIENT_LOG
RET=1
fi
set -e
set +e
grep "Context with profile 0 \[0\] is being executed for " test_select_optimization_profile.inference_server.allow.log
if [ $? -ne 0 ]; then
echo -e "\n***\n*** Failed. Expected profile 0 is used\n***"
RET=1
fi
set -e
kill $SERVER_PID
wait $SERVER_PID
# TrtDynamicShapeTest.test_load_wrong_optimization_profile
SERVER_ARGS="--model-repository=$DATADIR --exit-on-error=false"
CLIENT_LOG="./test_load_wrong_optimization_profile.client.log"
SERVER_LOG="./test_load_wrong_optimization_profile.inference_server.log"
cp config.pbtxt ${DATADIR}/plan_float32_float32_float32/config.pbtxt && \
sed -i "s/profile:.*/profile: [\"7\"]/" ${DATADIR}/plan_float32_float32_float32/config.pbtxt
run_server_nowait
if [ "$SERVER_PID" == "0" ]; then
echo -e "\n***\n*** Failed to start $SERVER\n***"
cat $SERVER_LOG
exit 1
fi
sleep 10
set +e
python $TRT_OP_TEST TrtDynamicShapeTest.test_load_wrong_optimization_profile >>$CLIENT_LOG 2>&1
if [ $? -ne 0 ]; then
echo -e "\n***\n*** Test Failed\n***"
cat $CLIENT_LOG
RET=1
fi
set -e
kill $SERVER_PID
wait $SERVER_PID
if [ $RET -eq 0 ]; then
echo -e "\n***\n*** Test Passed\n***"
else
echo -e "\n***\n*** Test Failed\n***"
fi
exit $RET
|
package org.brapi.test.BrAPITestServer.service.geno;
import java.util.List;
import org.brapi.test.BrAPITestServer.model.entity.geno.MarkerPositionEntity;
import org.brapi.test.BrAPITestServer.repository.geno.MarkerPositionRepository;
import org.brapi.test.BrAPITestServer.service.PagingUtility;
import org.brapi.test.BrAPITestServer.service.SearchQueryBuilder;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
import io.swagger.model.Metadata;
import io.swagger.model.geno.MarkerPositionSearchRequest;
import io.swagger.model.geno.MarkerPosition;
@Service
public class MarkerPositionService {
private final MarkerPositionRepository markerPositionRepository;
public MarkerPositionService(MarkerPositionRepository markerPositionRepository) {
this.markerPositionRepository = markerPositionRepository;
}
public List<MarkerPosition> findMarkerPositions(String mapDbId, String linkageGroupName, String variantDbId,
Integer maxPosition, Integer minPosition, Metadata metadata) {
MarkerPositionSearchRequest request = new MarkerPositionSearchRequest();
if (mapDbId != null)
request.addMapDbIdsItem(mapDbId);
if (linkageGroupName != null)
request.addLinkageGroupNamesItem(linkageGroupName);
if (variantDbId != null)
request.addVariantDbIdsItem(variantDbId);
request.setMinPosition(minPosition);
request.setMaxPosition(maxPosition);
return findMarkerPositions(request, metadata);
}
public List<MarkerPosition> findMarkerPositions(MarkerPositionSearchRequest request, Metadata metadata) {
Pageable pageReq = PagingUtility.getPageRequest(metadata);
SearchQueryBuilder<MarkerPositionEntity> searchQuery = new SearchQueryBuilder<MarkerPositionEntity>(
MarkerPositionEntity.class).appendList(request.getLinkageGroupNames(), "linkageGroup.linkageGroupName")
.appendList(request.getVariantDbIds(), "variant.id")
.appendList(request.getMapDbIds(), "linkageGroup.genomeMap.id")
.appendNumberRange(request.getMinPosition(), request.getMaxPosition(), "position");
Page<MarkerPositionEntity> page = markerPositionRepository.findAllBySearch(searchQuery, pageReq);
List<MarkerPosition> markerPositions = page.map(this::convertFromEntity).getContent();
PagingUtility.calculateMetaData(metadata, page);
return markerPositions;
}
private MarkerPosition convertFromEntity(MarkerPositionEntity entity) {
MarkerPosition position = new MarkerPosition();
position.setAdditionalInfo(entity.getAdditionalInfoMap());
if (entity.getLinkageGroup() != null) {
position.setLinkageGroupName(entity.getLinkageGroup().getLinkageGroupName());
if (entity.getLinkageGroup().getGenomeMap() != null) {
position.setMapDbId(entity.getLinkageGroup().getGenomeMap().getId());
position.setMapName(entity.getLinkageGroup().getGenomeMap().getMapName());
}
}
position.setPosition(entity.getPosition());
if (entity.getVariant() != null) {
position.setVariantDbId(entity.getVariant().getId());
position.setVariantName(entity.getVariant().getVariantName());
}
return position;
}
}
|
import { Component } from '@angular/core';
@Component({
selector: 'app-registration-form',
template: `
<form>
<div>
<label>First Name:</label>
<input type="text" name="firstName">
</div>
<div>
<label>Last Name:</label>
<input type="text" name="lastName">
</div>
<div>
<label>Age:</label>
<input type="number" name="age">
</div>
<div>
<label>Email:</label>
<input type="email" name="email">
</div>
<button type="submit">Submit</button>
</form>
`
})
export class RegistrationFormComponent {
} |
#!/bin/sh
set -eu
/usr/bin/env python3 "$@" -Dq='2**414 - 17' -Dmodulus_bytes='32' -Da24='121665'
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.sample.tri.pojo;
import org.apache.dubbo.common.constants.CommonConstants;
import org.apache.dubbo.common.stream.StreamObserver;
import org.apache.dubbo.config.ApplicationConfig;
import org.apache.dubbo.config.ReferenceConfig;
import org.apache.dubbo.config.RegistryConfig;
import org.apache.dubbo.config.bootstrap.DubboBootstrap;
import org.apache.dubbo.sample.tri.api.PojoGreeter;
import org.apache.dubbo.sample.tri.util.StdoutStreamObserver;
import org.apache.dubbo.sample.tri.util.TriSampleConstants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
class TriPojoClient {
private static final Logger LOGGER = LoggerFactory.getLogger(TriPojoClient.class);
private final PojoGreeter delegate;
private final String clientName = "tri-pojo";
public TriPojoClient() {
ReferenceConfig<PojoGreeter> ref = new ReferenceConfig<>();
ref.setInterface(PojoGreeter.class);
ref.setTimeout(3000);
ref.setProtocol(CommonConstants.TRIPLE);
DubboBootstrap bootstrap = DubboBootstrap.getInstance();
bootstrap.application(new ApplicationConfig("tri-pojo-client"))
.registry(new RegistryConfig(TriSampleConstants.ZK_ADDRESS))
.reference(ref)
.start();
this.delegate = ref.get();
}
public static void main(String[] args) throws IOException {
final TriPojoClient consumer = new TriPojoClient();
consumer.greetUnary();
consumer.greetStream();
consumer.greetServerStream();
System.in.read();
}
public void greetUnary() {
LOGGER.info("{} Start unary", clientName);
String reply = delegate.greet("unary");
LOGGER.info("{} Unary reply <-{}", clientName, reply);
}
public void greetServerStream() {
LOGGER.info("{} Start server streaming", clientName);
delegate.greetServerStream("server stream", new StdoutStreamObserver<>("greetServerStream"));
LOGGER.info("{} Server stream done", clientName);
}
public void greetStream() {
LOGGER.info("{} Start bi streaming", clientName);
final StreamObserver<String> request = delegate.greetStream(new StdoutStreamObserver<>("greetStream"));
for (int i = 0; i < 10; i++) {
request.onNext("stream request");
}
request.onCompleted();
LOGGER.info("{} Bi stream done", clientName);
}
}
|
#!/bin/sh
wget -O data/OpenJDK11U-jdk_x64_linux_hotspot_11.0.14.1_1.tar.gz https://mirrors.tuna.tsinghua.edu.cn/AdoptOpenJDK/11/jdk/x64/linux/OpenJDK11U-jdk_x64_linux_hotspot_11.0.14.1_1.tar.gz
wget -O data/hadoop-3.3.2.tar.gz https://mirrors.tuna.tsinghua.edu.cn/apache/hadoop/common/hadoop-3.3.2/hadoop-3.3.2.tar.gz |
#!/usr/bin/env bash
set -e
stack="trio"
root=$( cd "$( dirname "${BASH_SOURCE[0]}" )/.." >/dev/null 2>&1 && pwd )
project=$(grep -m 1 '"name":' "$root/package.json" | cut -d '"' -f 4)
# make sure a network for this project has been created
docker swarm init 2> /dev/null || true
docker network create --attachable --driver overlay "$project" 2> /dev/null || true
if grep -qs "$stack" <<<"$(docker stack ls --format '{{.Name}}')"
then echo "A $stack stack is already running" && exit 0;
fi
####################
## Load Config
if [[ ! -f "$root/node.config.json" ]]
then cp "$root/ops/config/node.default.json" "$root/node.config.json"
fi
if [[ ! -f "$root/router.config.json" ]]
then cp "$root/ops/config/router.default.json" "$root/router.config.json"
fi
config=$(
cat "$root/ops/config/node.default.json" "$root/ops/config/router.default.json" \
| cat - "$root/node.config.json" "$root/router.config.json" \
| jq -s '.[0] + .[1] + .[2] + .[3]'
)
function getConfig {
value=$(echo "$config" | jq ".$1" | tr -d '"')
if [[ "$value" == "null" ]]
then echo ""
else echo "$value"
fi
}
messaging_url=$(getConfig messagingUrl)
chain_providers=$(echo "$config" | jq '.chainProviders' | tr -d '\n\r ')
default_providers=$(jq '.chainProviders' "$root/ops/config/node.default.json" | tr -d '\n\r ')
common="networks:
- '$project'
logging:
driver: 'json-file'
options:
max-size: '100m'"
####################
## Start dependency stacks
if [[ "$chain_providers" == "$default_providers" ]]
then
bash "$root/ops/start-chains.sh"
config=$(
echo "$config" '{"chainAddresses":'"$(cat "$root/.chaindata/chain-addresses.json")"'}' \
| jq -s '.[0] + .[1]'
)
fi
if [[ -z "$messaging_url" ]]
then bash "$root/ops/start-messaging.sh"
fi
echo
echo "Preparing to launch $stack stack"
########################################
## Node config
internal_node_port="8000"
internal_prisma_port="5555"
carol_node_port="8005"
carol_prisma="5555"
carol_mnemonic="owner warrior discover outer physical intact secret goose all photo napkin fall"
echo "$stack.carol will be exposed on *:$carol_node_port"
dave_node_port="8006"
dave_prisma="5556"
dave_mnemonic="woman benefit lawn ignore glove marriage crumble roast tool area cool payment"
echo "$stack.dave will be exposed on *:$dave_node_port"
roger_node_port="8007"
roger_prisma="5557"
roger_mnemonic="spice notable wealth rail voyage depth barely thumb skill rug panel blush"
echo "$stack.roger will be exposed on *:$roger_node_port"
config=$(echo "$config" '{"nodeUrl":"http://roger:'$internal_node_port'"}' | jq -s '.[0] + .[1]')
public_url="http://localhost:$roger_node_port"
node_image="image: '${project}_builder'
entrypoint: 'bash modules/server-node/ops/entry.sh'
volumes:
- '$root:/root'
tmpfs: /tmp"
node_env="environment:
VECTOR_CONFIG: '$config'"
########################################
## Router config
router_port="8000"
router_public_port="8009"
echo "$stack.router will be exposed on *:$router_public_port"
router_image="image: '${project}_builder'
entrypoint: 'bash modules/router/ops/entry.sh'
volumes:
- '$root:/root'
ports:
- '$router_public_port:$router_port'"
####################
# Observability tools config
grafana_image="grafana/grafana:latest"
bash "$root/ops/pull-images.sh" "$grafana_image" > /dev/null
prometheus_image="prom/prometheus:latest"
bash "$root/ops/pull-images.sh" "$prometheus_image" > /dev/null
cadvisor_image="gcr.io/google-containers/cadvisor:latest"
bash "$root/ops/pull-images.sh" "$cadvisor_image" > /dev/null
prometheus_services="prometheus:
image: $prometheus_image
$common
ports:
- 9090:9090
command:
- --config.file=/etc/prometheus/prometheus.yml
volumes:
- $root/ops/prometheus/prometheus.yml:/etc/prometheus/prometheus.yml:ro
cadvisor:
$common
image: $cadvisor_image
ports:
- 8081:8080
volumes:
- /:/rootfs:ro
- /var/run:/var/run:rw
- /sys:/sys:ro
- /var/lib/docker/:/var/lib/docker:ro"
grafana_service="grafana:
image: '$grafana_image'
$common
networks:
- '$project'
ports:
- '3008:3000'
volumes:
- '$root/ops/grafana/grafana:/etc/grafana'
- '$root/ops/grafana/dashboards:/etc/dashboards'"
observability_services="$prometheus_services
$grafana_service"
####################
# Launch stack
docker_compose=$root/.${stack}.docker-compose.yml
rm -f "$docker_compose"
cat - > "$docker_compose" <<EOF
version: '3.4'
networks:
$project:
external: true
services:
carol:
$common
$node_image
$node_env
VECTOR_MNEMONIC: '$carol_mnemonic'
ports:
- '$carol_node_port:$internal_node_port'
- '$carol_prisma:$internal_prisma_port'
dave:
$common
$node_image
$node_env
VECTOR_MNEMONIC: '$dave_mnemonic'
ports:
- '$dave_node_port:$internal_node_port'
- '$dave_prisma:$internal_prisma_port'
roger:
$common
$node_image
$node_env
VECTOR_MNEMONIC: '$roger_mnemonic'
ports:
- '$roger_node_port:$internal_node_port'
- '$roger_prisma:$internal_prisma_port'
router:
$common
$router_image
environment:
VECTOR_CONFIG: '$config'
VECTOR_NODE_URL: 'http://roger:$internal_node_port'
VECTOR_PORT: '$router_port'
$observability_services
EOF
docker stack deploy -c "$docker_compose" "$stack"
echo "The $stack stack has been deployed, waiting for $public_url to start responding.."
timeout=$(( $(date +%s) + 300 ))
while true
do
res=$(curl -k -m 5 -s $public_url || true)
if [[ -z "$res" || "$res" == "Waiting for proxy to wake up" ]]
then
if [[ "$(date +%s)" -gt "$timeout" ]]
then echo "Timed out waiting for proxy to respond.." && exit
else sleep 2
fi
else echo "Good Morning!" && exit;
fi
done
|
antigen use oh-my-zsh
antigen bundle urbainvaes/fzf-marks
antigen bundle git
antigen bundle lein
antigen bundle command-not-found
antigen bundle zsh-users/zsh-autosuggestions
antigen bundle zsh-users/zsh-syntax-highlighting
antigen bundle asdf
antigen theme romkatv/powerlevel10k
antigen apply
|
TERMUX_PKG_HOMEPAGE=https://github.com/kevinlawler/kona
TERMUX_PKG_DESCRIPTION="Open-source implementation of the APL-like K programming language"
TERMUX_PKG_LICENSE="ISC"
TERMUX_PKG_MAINTAINER="@termux"
TERMUX_PKG_VERSION=20211225
TERMUX_PKG_SRCURL=https://github.com/kevinlawler/kona/archive/Win64-${TERMUX_PKG_VERSION}.tar.gz
TERMUX_PKG_SHA256=cd5dcc03394af275f0416b3cb2914574bf51ec60d1c857020fbd34b5427c5faf
TERMUX_PKG_AUTO_UPDATE=true
TERMUX_PKG_UPDATE_VERSION_REGEXP="\d{8}"
TERMUX_PKG_BUILD_IN_SRC=true
|
#!/bin/sh
### BEGIN INIT INFO
# description: Starts the python based OT scan handler as background process
# processname: startotscan
### END INIT INFO
DAEMON_PATH="/usr/bin/"
DAEMON=python3
DAEMONOPTS=/opt/scan/scan-ot-devices-to-iot-hub.py
NAME=startotscan
DESC="startotscan"
PIDFILE=/var/run/$NAME.pid
SCRIPTNAME=/etc/init.d/$NAME
case "$1" in
start)
printf "%-50s" "Starting $NAME..."
cd $DAEMON_PATH
PID=`$DAEMON $DAEMONOPTS > /dev/null 2>&1 & echo $!`
#echo "Saving PID" $PID " to " $PIDFILE
if [ -z $PID ]; then
printf "%s\n" "Fail"
else
echo $PID > $PIDFILE
printf "%s\n" "Ok"
fi
;;
status)
printf "%-50s" "Checking $NAME..."
if [ -f $PIDFILE ]; then
PID=`cat $PIDFILE`
if [ -z "`ps axf | grep ${PID} | grep -v grep`" ]; then
printf "%s\n" "Process dead but pidfile exists"
else
echo "Running"
fi
else
printf "%s\n" "Service not running"
fi
;;
stop)
printf "%-50s" "Stopping $NAME"
PID=`cat $PIDFILE`
cd $DAEMON_PATH
if [ -f $PIDFILE ]; then
kill -HUP $PID
printf "%s\n" "Ok"
rm -f $PIDFILE
else
printf "%s\n" "pidfile not found"
fi
;;
restart)
$0 stop
$0 start
;;
*)
echo "Usage: $0 {status|start|stop|restart}"
exit 1
esac
:
|
<reponame>OpenHFT/Chronicle-Values
/*
* Copyright 2016-2021 chronicle.software
*
* https://chronicle.software
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.openhft.chronicle.values;
import com.squareup.javapoet.MethodSpec;
import java.util.Date;
import static java.lang.String.format;
import static net.openhft.chronicle.values.IntegerFieldModel.NORMAL_ACCESS_TYPE;
class DateFieldModel extends IntegerBackedFieldModel {
final MemberGenerator nativeGenerator = new IntegerBackedNativeMemberGenerator(this, backend) {
@Override
void generateArrayElementFields(
ArrayFieldModel arrayFieldModel, ValueBuilder valueBuilder) {
// no fields
}
@Override
void finishGet(ValueBuilder valueBuilder, MethodSpec.Builder methodBuilder, String value) {
methodBuilder.addStatement(format("return new $T(%s)", value), Date.class);
}
@Override
String startSet(MethodSpec.Builder methodBuilder) {
return varName() + ".getTime()";
}
@Override
void generateEquals(ValueBuilder valueBuilder, MethodSpec.Builder methodBuilder) {
String time = backingFieldModel.genGet(valueBuilder, NORMAL_ACCESS_TYPE);
methodBuilder.addStatement("if ($N != other.$N().getTime()) return false;\n",
time, getOrGetVolatile().getName());
}
@Override
void generateArrayElementEquals(
ArrayFieldModel arrayFieldModel, ValueBuilder valueBuilder,
MethodSpec.Builder methodBuilder) {
String time = backingFieldModel.genArrayElementGet(
arrayFieldModel, valueBuilder, methodBuilder, NORMAL_ACCESS_TYPE);
methodBuilder.addStatement("if ($N != other.$N(index).getTime()) return false;\n",
time, arrayFieldModel.getOrGetVolatile().getName());
}
@Override
String generateHashCode(ValueBuilder valueBuilder, MethodSpec.Builder methodBuilder) {
String time = backingFieldModel.genGet(valueBuilder, NORMAL_ACCESS_TYPE);
return format("java.lang.Long.hashCode(%s)", time);
}
@Override
String generateArrayElementHashCode(
ArrayFieldModel arrayFieldModel, ValueBuilder valueBuilder,
MethodSpec.Builder methodBuilder) {
String time = backingFieldModel.genArrayElementGet(
arrayFieldModel, valueBuilder, methodBuilder, NORMAL_ACCESS_TYPE);
return format("java.lang.Long.hashCode(%s)", time);
}
};
@Override
void postProcess() {
super.postProcess();
backend.type = long.class;
backend.range = RangeImpl.DEFAULT_LONG_RANGE;
backend.postProcess();
}
@Override
MemberGenerator nativeGenerator() {
return nativeGenerator;
}
@Override
MemberGenerator createHeapGenerator() {
return new ObjectHeapMemberGenerator(this) {
@Override
void generateWriteMarshallable(
ValueBuilder valueBuilder, MethodSpec.Builder methodBuilder) {
methodBuilder.addStatement("bytes.writeLong($N.getTime())", fieldName());
}
@Override
void generateArrayElementWriteMarshallable(
ArrayFieldModel arrayFieldModel, ValueBuilder valueBuilder,
MethodSpec.Builder methodBuilder) {
methodBuilder.addStatement("bytes.writeLong($N[index].getTime())", fieldName());
}
@Override
void generateReadMarshallable(
ValueBuilder valueBuilder, MethodSpec.Builder methodBuilder) {
methodBuilder.addStatement("$N = new Date(bytes.readLong())", fieldName());
}
@Override
void generateArrayElementReadMarshallable(
ArrayFieldModel arrayFieldModel, ValueBuilder valueBuilder,
MethodSpec.Builder methodBuilder) {
methodBuilder.addStatement("$N[index] = new Date(bytes.readLong())", fieldName());
}
};
}
}
|
#!/usr/bin/env bash
set -e
if [ -f "$TMUX_PLUGIN_MANAGER_PATH"/tpm/bindings/install_plugins ]; then
# tpm
"$TMUX_PLUGIN_MANAGER_PATH"/tpm/bindings/install_plugins
fi
|
package com.project.godseye.app;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class LogUtil {
private static final int CALL_STACK_INDEX = 1;
private static final Pattern ANONYMOUS_CLASS = Pattern.compile("(\\$\\d+)+$");
public static String prependCallLocation(String message) {
// DO NOT switch this to Thread.getCurrentThread().getStackTrace(). The test will pass
// because Robolectric runs them on the JVM but on Android the elements are different.
if(MainActivity.DEV_MODE) {
StackTraceElement[] stackTrace = new Throwable().getStackTrace();
if (stackTrace.length <= CALL_STACK_INDEX) {
throw new IllegalStateException(
"Synthetic stacktrace didn't have enough elements: are you using proguard?");
}
String clazz = extractClassName(stackTrace[CALL_STACK_INDEX]);
int lineNumber = stackTrace[CALL_STACK_INDEX].getLineNumber();
message = ".(" + clazz + ".java:" + lineNumber + ") - " + message;
}
return message;
}
/**
* Extract the class name without any anonymous class suffixes (e.g., {@code Foo$1}
* becomes {@code Foo}).
*/
private static String extractClassName(StackTraceElement element) {
String tag = element.getClassName();
Matcher m = ANONYMOUS_CLASS.matcher(tag);
if (m.find()) {
tag = m.replaceAll("");
}
return tag.substring(tag.lastIndexOf('.') + 1);
}
}
|
#Copyright (c) 2015 Intel Corporation
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
cd ..
mvn license:check
exit $? |
echo "Enter version"
read version
echo "Enter next submit comment"
read comment
echo "Allow warning? (y/[n])"
read alw
if [ "$version" == '' ] ;
then
echo "nothing"
else
if [ "$comment" == '' ] ;
then
git add -A && git commit -m $version
else
git add -A && git commit -m "'$comment'"
fi
git push origin master
git tag $version
git push --tag
folder=${PWD##*/}
if [ "$alw" == 'y' ] ;
then
pod trunk push $folder.podspec --allow-warnings
else
pod trunk push $folder.podspec
fi
fi
|
// This file is to show how a library package may provide JavaScript interop features
// wrapped in a .NET API
namespace JSInteropWithTypeScript {
class ExampleJsFunctions {
public showPrompt(message: string): string {
return prompt(message, 'Type anything here');
}
}
export function Load(): void {
window['exampleJsFunctions'] = new ExampleJsFunctions();
}
}
JSInteropWithTypeScript.Load(); |
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.hazmat.primitives import hashes
from lxml import etree
from requests import Session
class Services:
def __init__(self, certificateContent, rsaKeyContent, privateKeyContent):
self.certificate = serialization.load_pem_public_key(certificateContent)
self.rsa_key = serialization.load_pem_private_key(rsaKeyContent, password=None)
self.private_key = serialization.load_pem_private_key(privateKeyContent, password=None)
def sign_xml(self, xml_data):
signature = self.rsa_key.sign(
xml_data.encode('utf-8'),
padding.PKCS1v15(),
hashes.SHA256()
)
return signature
def consultTaxIdSubscription(self, taxId, taxPayerId):
xml_data = f"<tax><id>{taxId}</id><payerId>{taxPayerId}</payerId></tax>"
signed_xml = self.sign_xml(xml_data)
# Post the signed XML to the service
session = Session()
response = session.post('https://example.com/consultTaxIdSubscription', data=signed_xml)
return response.text
# Load certificate and key files
certificate_content = open("./certfiles/converted.crt", "rb").read()
rsa_key_content = open("./certfiles/privRSAkey.pem", "rb").read()
private_key_content = open("./certfiles/privateKey.key", "rb").read()
# Create service object
objServ = Services(certificate_content, rsa_key_content, private_key_content)
# Consult tax ID subscription
taxId = "00623904000173"
taxPayerId = "00623904000173"
result = objServ.consultTaxIdSubscription(taxId=taxId, taxPayerId=taxPayerId)
print(result) |
#!/bin/bash
docker run \
--name=piper \
--link postgres:postgres \
--rm \
-it \
-e spring.datasource.url=jdbc:postgresql://postgres:5432/piper \
-e spring.datasource.initialization-mode=always \
-e logging.level.com.creactiviti=INFO \
-e piper.worker.enabled=true \
-e piper.coordinator.enabled=true \
-e piper.worker.subscriptions.tasks=1 \
-e piper.pipeline-repository.filesystem.enabled=true \
-e piper.pipeline-repository.filesystem.location-pattern=/pipelines/**/*.yaml \
-v $PWD:/pipelines \
-p 8080:8080 \
creactiviti/piper
|
const reverseName = name => {
let reversedName = '';
for(let i = name.length - 1; i >= 0; i--) {
reversedName += name[i];
}
return reversedName;
};
const name = prompt('What is your name?');
console.log(reverseName(name)); |
#!/bin/bash
local="0" # Example value for the variable local
stack_name="example-stack" # Example value for the stack name
aws_profile="example-profile" # Example value for the AWS profile
if [ "$local" == "0" ]; then
temp_file=$(mktemp)
eval aws lambda invoke \
--invocation-type RequestResponse \
--function-name "${stack_name}-reinitialize-containers" \
--region us-east-1 \
--payload '{}' \
--profile $aws_profile \
$temp_file
fi |
#!/usr/bin/env bash
set -xe
FUXI_DIR="$BASE/new/fuxi"
TEMPEST_DIR="$BASE/new/tempest"
SCRIPTS_DIR="/usr/os-testr-env/bin/"
venv=${1:-"fullstack"}
function generate_test_logs {
local path="$1"
# Compress all $path/*.txt files and move the directories holding those
# files to /opt/stack/logs. Files with .log suffix have their
# suffix changed to .txt (so browsers will know to open the compressed
# files and not download them).
if [[ -d "$path" ]] ; then
sudo find $path -iname "*.log" -type f -exec mv {} {}.txt \; -exec gzip -9 {}.txt \;
sudo mv $path/* /opt/stack/logs/
fi
}
function generate_testr_results {
# Give job user rights to access tox logs
sudo -H -u $owner chmod o+rw .
sudo -H -u $owner chmod o+rw -R .testrepository
if [[ -f ".testrepository/0" ]] ; then
.tox/$venv/bin/subunit-1to2 < .testrepository/0 > ./testrepository.subunit
$SCRIPTS_DIR/subunit2html ./testrepository.subunit testr_results.html
gzip -9 ./testrepository.subunit
gzip -9 ./testr_results.html
sudo mv ./*.gz /opt/stack/logs/
fi
if [[ "$venv" == fullstack* ]] ; then
generate_test_logs "/tmp/${venv}-logs"
fi
}
owner=stack
# Set owner permissions according to job's requirements.
cd $FUXI_DIR
sudo chown -R $owner:stack $FUXI_DIR
# Run tests
echo "Running Fuxi $venv fullstack tests"
set +e
sudo -H -u $owner tox -e $venv
testr_exit_code=$?
set -e
# Collect and parse results
generate_testr_results
exit $testr_exit_code
|
<reponame>alphagov/locations-api
require "spec_helper"
RSpec.describe "Locations V1 API" do
before do
ENV["OS_PLACES_API_KEY"] = "some_key"
ENV["OS_PLACES_API_SECRET"] = "some_secret"
end
context "Successful call" do
let(:postcode) { "E1 8QS" }
let(:locations) do
[
Location.new(postcode: "E1 8QS",
address: "1, WHITECHAPEL HIGH STREET, LONDON, E1 8QS",
latitude: 51.5144547,
longitude: -0.0729933,
local_custodian_code: 5900),
Location.new(postcode: "E1 8QS",
address: "2, WHITECHAPEL HIGH STREET, LONDON, E1 8QS",
latitude: 51.5144548,
longitude: -0.0729934,
local_custodian_code: 5900),
]
end
before do
client = double("client")
allow(OsPlacesApi::Client).to receive(:new).and_return(client)
expect(client).to receive(:locations_for_postcode).with(postcode).and_return(locations)
end
it "Should return proper body" do
get "/v1/locations?postcode=#{postcode}"
expect(response.body).to eq locations.to_json
end
end
context "Client request validation" do
let(:expected_validation_response) do
{ errors: { postcode: ["This postcode is invalid"] } }.to_json
end
context "Postcode is incorrect" do
let(:postcode) { "AAA 1AA" }
it "Should return error when postcode is incorrect" do
get "/v1/locations?postcode=#{postcode}"
expect(response.body).to eq expected_validation_response
end
end
context "Postcode is not provided" do
it "Should return error when postcode is not provided" do
get "/v1/locations"
expect(response.body).to eq expected_validation_response
end
end
end
end
|
/* Copyright 2007-2015 QReal Research Group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
#include <QtWidgets/QApplication>
#include "visualDebuggerPlugin.h"
#include "plugins/toolPluginInterface/systemEvents.h"
using namespace qReal;
using namespace visualDebugger;
using namespace utils;
QString const blockDiagram = "BlockDiagram";
VisualDebuggerPlugin::VisualDebuggerPlugin()
: mVisualDebugger(nullptr)
, mDebuggerConnector(nullptr)
, mErrorReporter(nullptr)
, mWatchListWindow(nullptr)
, mParser(nullptr)
, mPreferencesPage(new VisualDebuggerPreferencesPage())
{
}
VisualDebuggerPlugin::~VisualDebuggerPlugin()
{
}
void VisualDebuggerPlugin::init(PluginConfigurator const &configurator)
{
mErrorReporter = configurator.mainWindowInterpretersInterface().errorReporter();
mParser = new BlockParser(mErrorReporter);
mVisualDebugger = new VisualDebugger(configurator.logicalModelApi()
, configurator.graphicalModelApi()
, configurator.mainWindowInterpretersInterface()
, mParser
);
mDebuggerConnector = new DebuggerConnector(this);
connect(&configurator.systemEvents(), &SystemEvents::activeTabChanged
, this, &VisualDebuggerPlugin::activeTabChanged);
}
QPair<QString, gui::PreferencesPage *> VisualDebuggerPlugin::preferencesPage()
{
return qMakePair(tr("Block Diagram Debug"), static_cast<gui::PreferencesPage*>(mPreferencesPage));
}
QList<qReal::ActionInfo> VisualDebuggerPlugin::actions()
{
mVisualDebugMenu = new QMenu(tr("Visual debug"));
ActionInfo visualDebugMenuInfo(mVisualDebugMenu, "tools");
mDebugAction = new QAction(tr("Interpret (automatic)"), nullptr);
mDebugAction->setShortcut(QKeySequence(Qt::Key_F5));
connect(mDebugAction, SIGNAL(triggered()), this, SLOT(debug()));
mVisualDebugMenu->addAction(mDebugAction);
mDebugSingleStepAction = new QAction(tr("Interpret (one step)"), nullptr);
mDebugSingleStepAction->setShortcut(QKeySequence(Qt::Key_F6));
connect(mDebugSingleStepAction, SIGNAL(triggered()), this, SLOT(debugSingleStep()));
mVisualDebugMenu->addAction(mDebugSingleStepAction);
mWatchListAction = new QAction(tr("Show watch list"), nullptr);
mWatchListAction->setShortcut(QKeySequence(Qt::CTRL + Qt::Key_I));
connect(mWatchListAction, SIGNAL(triggered()), this, SLOT(showWatchList()));
mVisualDebugMenu->addAction(mWatchListAction);
mVisualDebugWithGdbMenu = new QMenu(tr("Visual debug (with gdb)"));
ActionInfo visualDebugWithGdbMenuInfo(mVisualDebugWithGdbMenu, "tools");
mGenerateAndBuildAction = new QAction(tr("Generate and build"), nullptr);
mGenerateAndBuildAction->setShortcut(QKeySequence(Qt::CTRL + Qt::Key_F5));
connect(mGenerateAndBuildAction, SIGNAL(triggered()), this, SLOT(generateAndBuild()));
mVisualDebugWithGdbMenu->addAction(mGenerateAndBuildAction);
mStartDebuggerAction = new QAction(tr("Start debugger (gdb)"), nullptr);
mStartDebuggerAction->setShortcut(QKeySequence(Qt::CTRL + Qt::Key_F6));
connect(mStartDebuggerAction, SIGNAL(triggered()), this, SLOT(startDebugger()));
mVisualDebugWithGdbMenu->addAction(mStartDebuggerAction);
mConfigureAction = new QAction(tr("Configure"), nullptr);
mConfigureAction->setShortcut(QKeySequence(Qt::CTRL + Qt::SHIFT + Qt::Key_F6));
connect(mConfigureAction, SIGNAL(triggered()), this, SLOT(configureDebugger()));
mVisualDebugWithGdbMenu->addAction(mConfigureAction);
mBreakMainAction = new QAction(tr("Set breakpoint at start"), nullptr);
mBreakMainAction->setShortcut(QKeySequence(Qt::Key_F9));
connect(mBreakMainAction, SIGNAL(triggered()), this, SLOT(setBreakpointAtStart()));
mVisualDebugWithGdbMenu->addAction(mBreakMainAction);
mSetBreakpointsAction = new QAction(tr("Set breakpoints on each element"), nullptr);
mSetBreakpointsAction->setShortcut(QKeySequence(Qt::CTRL + Qt::Key_F9));
connect(mSetBreakpointsAction, SIGNAL(triggered()), this, SLOT(placeBreakpointsInDebugger()));
mVisualDebugWithGdbMenu->addAction(mSetBreakpointsAction);
mRunAction = new QAction(tr("run"), nullptr);
mRunAction->setShortcut(QKeySequence(Qt::Key_F8));
connect(mRunAction, SIGNAL(triggered()), this, SLOT(runProgramWithDebugger()));
mVisualDebugWithGdbMenu->addAction(mRunAction);
mNextAction = new QAction(tr("next"), nullptr);
mNextAction->setShortcut(QKeySequence(Qt::Key_F10));
connect(mNextAction, SIGNAL(triggered()), this, SLOT(goToNextInstruction()));
mVisualDebugWithGdbMenu->addAction(mNextAction);
mContAction = new QAction(tr("cont"), nullptr);
mContAction->setShortcut(QKeySequence(Qt::Key_F11));
connect(mContAction, SIGNAL(triggered()), this, SLOT(goToNextBreakpoint()));
mVisualDebugWithGdbMenu->addAction(mContAction);
mKillAction = new QAction(tr("kill"), nullptr);
mKillAction->setShortcut(QKeySequence(Qt::CTRL + Qt::Key_K));
connect(mKillAction, SIGNAL(triggered()), this, SLOT(killProgramWithDebugger()));
mVisualDebugWithGdbMenu->addAction(mKillAction);
mStartDebuggingAction = new QAction(tr("Start debug (automatic)"), nullptr);
mStartDebuggingAction->setShortcut(QKeySequence(Qt::Key_F7));
connect(mStartDebuggingAction, SIGNAL(triggered()), this, SLOT(startDebugging()));
mVisualDebugWithGdbMenu->addAction(mStartDebuggingAction);
mCloseAllAction = new QAction(tr("Cancel debug"), nullptr);
mCloseAllAction->setShortcut(QKeySequence(Qt::CTRL + Qt::Key_F12));
connect(mCloseAllAction, SIGNAL(triggered()), this, SLOT(closeDebuggerProcessAndThread()));
mVisualDebugWithGdbMenu->addAction(mCloseAllAction);
connect(mDebuggerConnector, SIGNAL(readyReadStdOutput(QString)), this, SLOT(drawDebuggerStdOutput(QString)));
connect(mDebuggerConnector, SIGNAL(readyReadErrOutput(QString)), this, SLOT(drawDebuggerErrOutput(QString)));
mActionInfos << visualDebugMenuInfo << visualDebugWithGdbMenuInfo;
return mActionInfos;
}
void VisualDebuggerPlugin::activeTabChanged(TabInfo const &info)
{
bool const enabled = info.rootDiagramId().diagram() == blockDiagram;
for (ActionInfo const &actionInfo : mActionInfos) {
if (actionInfo.isAction()) {
actionInfo.action()->setEnabled(enabled);
} else {
actionInfo.menu()->setEnabled(enabled);
}
}
}
void VisualDebuggerPlugin::showWatchList()
{
if (mWatchListWindow) {
mWatchListWindow->close();
}
mWatchListWindow = new WatchListWindow(mParser);
mWatchListWindow->show();
}
void VisualDebuggerPlugin::debug()
{
mErrorReporter->clear();
mVisualDebugger->setCurrentDiagram();
if (mVisualDebugger->canDebug(VisualDebugger::fullDebug)) {
mVisualDebugger->debug();
}
}
void VisualDebuggerPlugin::debugSingleStep()
{
mErrorReporter->clear();
mVisualDebugger->setCurrentDiagram();
if (mVisualDebugger->canDebug(VisualDebugger::singleStepDebug)) {
mVisualDebugger->debugSingleStep();
}
}
void VisualDebuggerPlugin::generateAndBuild()
{
mErrorReporter->clear();
mVisualDebugger->setCurrentDiagram();
if (mVisualDebugger->canDebug(VisualDebugger::debugWithDebugger)) {
mVisualDebugger->generateCode();
if (mVisualDebugger->canBuild()) {
mDebuggerConnector->run();
mDebuggerConnector->build();
if (!mDebuggerConnector->hasBuildError()) {
mErrorReporter->addInformation(tr("Code generated and builded successfully"));
}
}
}
}
void VisualDebuggerPlugin::startDebugger()
{
mErrorReporter->clear();
if (mVisualDebugger->canDebug(VisualDebugger::debugWithDebugger)
&& !mDebuggerConnector->isDebuggerRunning())
{
mVisualDebugger->setDebugType(VisualDebugger::debugWithDebugger);
mDebuggerConnector->run();
mDebuggerConnector->startDebugger();
}
}
void VisualDebuggerPlugin::configureDebugger()
{
if (mDebuggerConnector->isDebuggerRunning()) {
mDebuggerConnector->configure();
}
}
void VisualDebuggerPlugin::setBreakpointAtStart()
{
if (mDebuggerConnector->isDebuggerRunning()) {
mVisualDebugger->setCurrentDiagram();
mVisualDebugger->createIdByLineCorrelation();
mDebuggerConnector->sendCommand("break main\n");
}
}
void VisualDebuggerPlugin::runProgramWithDebugger()
{
if (mDebuggerConnector->isDebuggerRunning()) {
mDebuggerConnector->sendCommand("run\n");
}
}
void VisualDebuggerPlugin::killProgramWithDebugger()
{
if (mDebuggerConnector->isDebuggerRunning()) {
mDebuggerConnector->sendCommand("kill\n");
}
}
void VisualDebuggerPlugin::placeBreakpointsInDebugger()
{
if (mDebuggerConnector->isDebuggerRunning()) {
mVisualDebugger->setCurrentDiagram();
mVisualDebugger->createIdByLineCorrelation();
if (mVisualDebugger->canComputeBreakpoints()) {
QList<int>* breakpoints = mVisualDebugger->computeBreakpoints();
for (int const &breakpoint : *breakpoints) {
mDebuggerConnector->sendCommand("break " + QString::number(breakpoint) + "\n");
}
delete breakpoints;
}
}
}
void VisualDebuggerPlugin::goToNextBreakpoint()
{
if (mDebuggerConnector->isDebuggerRunning()) {
mDebuggerConnector->sendCommand("cont\n");
}
}
void VisualDebuggerPlugin::goToNextInstruction()
{
if (mDebuggerConnector->isDebuggerRunning()) {
mDebuggerConnector->sendCommand("next\n");
}
}
void VisualDebuggerPlugin::closeDebuggerProcessAndThread()
{
if (mDebuggerConnector->isDebuggerRunning()) {
mVisualDebugger->dehighlight();
mVisualDebugger->setDebugType(VisualDebugger::noDebug);
mDebuggerConnector->finishProcess();
}
}
void VisualDebuggerPlugin::startDebugging()
{
mErrorReporter->clear();
if (mVisualDebugger->canDebug(VisualDebugger::debugWithDebugger)) {
generateAndBuild();
startDebugger();
configureDebugger();
setBreakpointAtStart();
runProgramWithDebugger();
}
}
void VisualDebuggerPlugin::drawDebuggerStdOutput(QString const &output)
{
mErrorReporter->addInformation(output);
if ('1' <= output.at(0) && output.at(0) <= '9') {
int const index = output.indexOf("\t");
Id const idToLigth = mVisualDebugger->getIdByLine(output.mid(0,index).toInt());
mVisualDebugger->highlight(idToLigth);
} else {
QString const fileName = SettingsManager::value("codeFileName").toString();
int index = output.indexOf(fileName + ":");
if (index > -1) {
index += (fileName.length() + 1);
int boundaryIndex = index;
while ('0' <= output.at(boundaryIndex) && output.at(boundaryIndex) <= '9') {
boundaryIndex++;
}
Id const idToLight = mVisualDebugger->getIdByLine(output.mid(index, boundaryIndex - index).toInt());
mVisualDebugger->highlight(idToLight);
}
}
}
void VisualDebuggerPlugin::drawDebuggerErrOutput(QString const &output)
{
mVisualDebugger->dehighlight();
mVisualDebugger->setDebugType(VisualDebugger::noDebug);
mErrorReporter->addCritical(output);
}
|
<gh_stars>10-100
package Chapter2_5High;
import java.io.File;
import java.util.Arrays;
public class FileSorter {
//Exercise 2.5.28๏ผไปๅฝไปค่กๆฅๅไธไธช็ฎๅฝๅๅนถๆๅฐๅบๆ็
งๆไปถๅๆๅบๅ็ๆๆๆไปถ
public static void main(String[] args) {
File directory = new File(args[0]);
if (!directory.exists()) {
System.out.println(args[0] + " does not exist");
return;
}
if (!directory.isDirectory()) {
System.out.println(args[0] + " is not a directory");
return;
}
String[] files = directory.list();
if (files == null) {
System.out.println("could not read files");
return;
}
Arrays.sort(files);
for (int i = 0; i < files.length; i++) {
System.out.println(files[i]);
}
}
} |
./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/b_malayi_PRJNA10729/ --remote docker/WS275/b_malayi_PRJNA10729
./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/c_angaria_PRJNA51225/ --remote docker/WS275/c_angaria_PRJNA51225
./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/c_brenneri_PRJNA20035/ --remote docker/WS275/c_brenneri_PRJNA20035
./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/c_briggsae_PRJNA10731/ --remote docker/WS275/c_briggsae_PRJNA10731
./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/c_elegans_PRJEB28388/ --remote docker/WS275/c_elegans_PRJEB28388
./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/c_elegans_PRJNA13758/ --remote docker/WS275/c_elegans_PRJNA13758
./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/c_elegans_PRJNA275000/ --remote docker/WS275/c_elegans_PRJNA275000
#skip simple upload--it's just the N2 data again
#./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/c_elegans_simple/ --remote docker/WS275/c_elegans_simple
./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/c_inopinata_PRJDB5687/ --remote docker/WS275/c_inopinata_PRJDB5687
./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/c_japonica_PRJNA12591/ --remote docker/WS275/c_japonica_PRJNA12591
./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/c_latens_PRJNA248912/ --remote docker/WS275/c_latens_PRJNA248912
./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/c_nigoni_PRJNA384657/ --remote docker/WS275/c_nigoni_PRJNA384657
./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/c_remanei_PRJNA248909/ --remote docker/WS275/c_remanei_PRJNA248909
./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/c_remanei_PRJNA248911/ --remote docker/WS275/c_remanei_PRJNA248911
./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/c_remanei_PRJNA53967/ --remote docker/WS275/c_remanei_PRJNA53967
./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/c_sinica_PRJNA194557/ --remote docker/WS275/c_sinica_PRJNA194557
./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/c_tropicalis_PRJNA53597/ --remote docker/WS275/c_tropicalis_PRJNA53597
./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/o_tipulae_PRJEB15512/ --remote docker/WS275/o_tipulae_PRJEB15512
./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/o_volvulus_PRJEB513/ --remote docker/WS275/o_volvulus_PRJEB513
./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/p_pacificus_PRJNA12644/ --remote docker/WS275/p_pacificus_PRJNA12644
./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/p_redivivus_PRJNA186477/ --remote docker/WS275/p_redivivus_PRJNA186477
./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/s_ratti_PRJEB125/ --remote docker/WS275/s_ratti_PRJEB125
./upload_to_S3.pl --profile wormbase --bucket wormbase-modencode --local /home/scain/scain/275_build/jbrowse_build/jbrowse/tools/genome/jbrowse/data/t_muris_PRJEB126/ --remote docker/WS275/t_muris_PRJEB126
|
import * as BB from "bbcode-tags";
import { SearchEngine, searchURL } from "~src/search-engines";
import * as SITE from "~src/site";
import { selectedTextIn, wrapIn } from "../logic/textarea";
export default (engine: SearchEngine) => (textarea: HTMLTextAreaElement, _: boolean) => {
const tagName = SITE.TAG.url;
const selected = selectedTextIn(textarea);
const startTag = BB.start(tagName, searchURL(engine, selected));
wrapIn(textarea, {
before: startTag,
after: BB.end(tagName),
cursor: selected === "" ? startTag.length - 2 /* for "] */ : "KEEP_SELECTION",
});
};
|
package com.boot;
import com.boot.config.GenerateProperties;
import com.boot.config.ScanClassProperties;
import com.boot.config.SwaggerConfig;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.cloud.openfeign.EnableFeignClients;
import org.springframework.context.annotation.Import;
import springfox.documentation.swagger2.annotations.EnableSwagger2;
/**
* @author ๆธธๆฟๆฐ
* 2021/9/5 12:40
*/
@SpringBootApplication
@Import(SwaggerConfig.class) //ๅฏผๅ
ฅswaggerConfig็้
็ฝฎ็ฑป
@EnableSwagger2 //ๅผๅฏSwagger2
@EnableConfigurationProperties(
{
ScanClassProperties.class,
GenerateProperties.class
}
)
@EnableFeignClients
@EnableDiscoveryClient
public class GenerateApplication3991 {
public static void main(String[] args) {
SpringApplication.run(GenerateApplication3991.class,args);
}
} |
<filename>src/brdf.hpp
/*#ifndef __BRDF_HPP__
#define __BRDF_HPP__
#include "glm.hpp"
#include "radiance.hpp"
#include <iostream>
#include <tuple>
class BRDF{
public:
virtual float PdfSpec(glm::vec3 N, glm::vec3 Vi, glm::vec3 Vr, bool debug = false) const = 0;
virtual float PdfDiff() const = 0;
Spectrum Apply(Spectrum kD, Spectrum kS, glm::vec3 N, glm::vec3 Vi, glm::vec3 Vr, bool debug = false) const{
float d = PdfDiff();
float s = PdfSpec(N,Vi,Vr, debug);
IFDEBUG std::cout << "d = " << d << ", s = " << s << std::endl;
return kD*d + kS*s;
}
virtual std::tuple<glm::vec3, Spectrum> GetRay(glm::vec3 normal, glm::vec3 inc, Spectrum diffuse, Spectrum specular, glm::vec2 sample, bool debug = false) const;
};
class BRDFDiffuseCosine : public BRDF{
public:
virtual float PdfSpec(glm::vec3 N, glm::vec3 Vi, glm::vec3 Vr, bool debug = false) const override;
virtual float PdfDiff() const override;
};
class BRDFPhongEnergy : public BRDF{
public:
BRDFPhongEnergy(float exponent) : exponent(exponent) {}
virtual float PdfSpec(glm::vec3 N, glm::vec3 Vi, glm::vec3 Vr, bool debug = false) const override;
virtual float PdfDiff() const override;
private:
float exponent;
};
class BRDFCookTorr : public BRDF{
public:
BRDFCookTorr(float phong_exponent, float ior);
virtual float PdfSpec(glm::vec3 N, glm::vec3 Vi, glm::vec3 Vr, bool debug = false) const override;
virtual float PdfDiff() const override;
private:
float roughness;
float F0;
};
class BRDFLTCBeckmann : public BRDF{
public:
BRDFLTCBeckmann(float phong_exponent);
virtual float PdfSpec(glm::vec3 N, glm::vec3 Vi, glm::vec3 Vr, bool debug = false) const override;
virtual float PdfDiff() const override;
virtual std::tuple<glm::vec3, Spectrum> GetRay(glm::vec3 normal, glm::vec3 inc, Spectrum diffuse, Spectrum specular, glm::vec2 sample, bool debug = false) const override;
private:
float roughness;
};
class BRDFLTCGGX : public BRDF{
public:
BRDFLTCGGX(float phong_exponent);
virtual float PdfSpec(glm::vec3 N, glm::vec3 Vi, glm::vec3 Vr, bool debug = false) const override;
virtual float PdfDiff() const override;
virtual std::tuple<glm::vec3, Spectrum> GetRay(glm::vec3 normal, glm::vec3 inc, Spectrum diffuse, Spectrum specular, glm::vec2 sample, bool debug = false) const override;
private:
float roughness;
};
#endif // __BRDF_HPP__
*/
|
#if !defined(RESPONSE_HEADER_H)
#define RESPONSE_HEADER_H
#include "utils.h"
#include <map>
#include <string>
#include <sys/stat.h>
using std::map;
using std::string;
using std::to_string;
class response_header {
public:
static map<int, string> code2phrase;
static string STR_VERSION_HTTP_1_1;
static const int CODE_INTERNAL_SERVER_ERROR;
static const int CODE_NOT_FOUND;
static const int CODE_OK;
static map<string, string> mp_file_suffix_2_content_type;
string version;
int status_code;
string phrase;
map<string, string> mp_gene_headers;
response_header() {}
~response_header() {}
// header with content-type:html, content-length:size of file of [path]
static int htmlHeader(string path_file, response_header &header, int status_code = 200) {
int ret = 0;
struct stat file_info;
ret = stat(path_file.c_str(), &file_info);
if (ret == -1) {
logger::fail({"in ", __func__, ": call to stat file: ", path_file, " failed"}, true);
return -1;
}
header.version = STR_VERSION_HTTP_1_1;
header.status_code = status_code;
header.phrase = code2phrase[status_code];
header.mp_gene_headers["Content-Type"] = "text/html;charset=UTF-8";
header.mp_gene_headers["Content-Length"] = to_string(file_info.st_size);
return 0;
}
static int strHeader(const string &data, response_header &header, int status_code = 200) {
header.version = STR_VERSION_HTTP_1_1;
header.status_code = status_code;
header.phrase = code2phrase[status_code];
header.mp_gene_headers["Content-Type"] = "text/plain;charset=UTF-8";
header.mp_gene_headers["Content-Length"] = to_string(data.size());
return 0;
}
static int fileHeader(const string path_file, response_header &header, int status_code = 200) {
int ret = 0;
struct stat file_info;
ret = stat(path_file.c_str(), &file_info);
if (ret == -1) {
logger::fail({"in ", __func__, ": call to stat file: ", path_file, " failed"}, true);
return -1;
}
header.version = STR_VERSION_HTTP_1_1;
header.status_code = status_code;
header.phrase = code2phrase[status_code];
header.mp_gene_headers["Content-Type"] = "text/plain;charset=UTF-8";
header.mp_gene_headers["Content-Length"] = to_string(file_info.st_size);
// set content-type according to suffix
size_t last_pos_of_dot = path_file.find_last_of('.');
if (last_pos_of_dot != string::npos) {
const string file_type = path_file.substr(last_pos_of_dot);
if (mp_file_suffix_2_content_type.find(file_type) != mp_file_suffix_2_content_type.end())
header.mp_gene_headers["Content-Type"] = mp_file_suffix_2_content_type[file_type];
}
return 0;
}
string toString() {
stringstream ss;
const char sp = ' ';
const string crlf = "\r\n";
const char colon = ':';
ss << version << sp << status_code << sp << phrase << crlf;
for (auto &&p : mp_gene_headers)
ss << p.first << colon << p.second << crlf;
ss << crlf;
return ss.str();
}
};
const int response_header::CODE_OK = 200;
const int response_header::CODE_NOT_FOUND = 404;
const int response_header::CODE_INTERNAL_SERVER_ERROR = 500;
string response_header::STR_VERSION_HTTP_1_1 = "HTTP/1.1";
map<int, string> response_header::code2phrase;
map<string, string> response_header::mp_file_suffix_2_content_type = {
{".tif", "image/tiff"},
{".asf", "video/x-ms-asf"},
{".asp", "text/asp"},
{".asx", "video/x-ms-asf"},
{".au", "audio/basic"},
{".avi", "video/avi"},
{".awf", "application/vnd.adobe.workflow"},
{".biz", "text/xml"},
{".bmp", "application/x-bmp"},
{".bot", "application/x-bot"},
{".cdr", "application/x-cdr"},
{".cel", "application/x-cel"},
{".cer", "application/x-x509-ca-cert"},
{".cg4", "application/x-g4"},
{".cgm", "application/x-cgm"},
{".cit", "application/x-cit"},
{".class", "java/*"},
{".cml", "text/xml"},
{".cmp", "application/x-cmp"},
{".cmx", "application/x-cmx"},
{".cot", "application/x-cot"},
{".crl", "application/pkix-crl"},
{".crt", "application/x-x509-ca-cert"},
{".csi", "application/x-csi"},
{".css", "text/css"},
{".cut", "application/x-cut"},
{".dbf", "application/x-dbf"},
{".dbm", "application/x-dbm"},
{".dbx", "application/x-dbx"},
{".dcd", "text/xml"},
{".dcx", "application/x-dcx"},
{".der", "application/x-x509-ca-cert"},
{".dgn", "application/x-dgn"},
{".dib", "application/x-dib"},
{".dll", "application/x-msdownload"},
{".doc", "application/msword"},
{".dot", "application/msword"},
{".drw", "application/x-drw"},
{".dtd", "text/xml"},
{".dwf", "Model/vnd.dwf"},
{".dwf", "application/x-dwf"},
{".dwg", "application/x-dwg"},
{".dxb", "application/x-dxb"},
{".dxf", "application/x-dxf"},
{".edn", "application/vnd.adobe.edn"},
{".emf", "application/x-emf"},
{".eml", "message/rfc822"},
{".ent", "text/xml"},
{".epi", "application/x-epi"},
{".eps", "application/x-ps"},
{".eps", "application/postscript"},
{".etd", "application/x-ebx"},
{".exe", "application/x-msdownload"},
{".fax", "image/fax"},
{".fdf", "application/vnd.fdf"},
{".fif", "application/fractals"},
{".fo", "text/xml"},
{".frm", "application/x-frm"},
{".g4", "application/x-g4"},
{".gbr", "application/x-gbr"},
{".", "application/x-"},
{".gif", "image/gif"},
{".gl2", "application/x-gl2"},
{".gp4", "application/x-gp4"},
{".hgl", "application/x-hgl"},
{".hmr", "application/x-hmr"},
{".hpg", "application/x-hpgl"},
{".hpl", "application/x-hpl"},
{".hqx", "application/mac-binhex40"},
{".hrf", "application/x-hrf"},
{".hta", "application/hta"},
{".htc", "text/x-component"},
{".htm", "text/html"},
{".html", "text/html"},
{".htt", "text/webviewhtml"},
{".htx", "text/html"},
{".icb", "application/x-icb"},
{".ico", "image/x-icon"},
{".ico", "application/x-ico"},
{".iff", "application/x-iff"},
{".ig4", "application/x-g4"},
{".igs", "application/x-igs"},
{".iii", "application/x-iphone"},
{".img", "application/x-img"},
{".ins", "application/x-internet-signup"},
{".isp", "application/x-internet-signup"},
{".IVF", "video/x-ivf"},
{".java", "java/*"},
{".jfif", "image/jpeg"},
{".jpe", "image/jpeg"},
{".jpe", "application/x-jpe"},
{".jpeg", "image/jpeg"},
{".jpg", "image/jpeg"},
{".jpg", "application/x-jpg"},
{".js", "application/x-javascript"},
{".jsp", "text/html"},
{".la1", "audio/x-liquid-file"},
{".lar", "application/x-laplayer-reg"},
{".latex", "application/x-latex"},
{".lavs", "audio/x-liquid-secure"},
{".lbm", "application/x-lbm"},
{".lmsff", "audio/x-la-lms"},
{".ls", "application/x-javascript"},
{".ltr", "application/x-ltr"},
{".m1v", "video/x-mpeg"},
{".m2v", "video/x-mpeg"},
{".m3u", "audio/mpegurl"},
{".m4e", "video/mpeg4"},
{".mac", "application/x-mac"},
{".man", "application/x-troff-man"},
{".math", "text/xml"},
{".mdb", "application/msaccess"},
{".mdb", "application/x-mdb"},
{".mfp", "application/x-shockwave-flash"},
{".mht", "message/rfc822"},
{".mhtml", "message/rfc822"},
{".mp1", "audio/mp1"},
{".mp2", "audio/mp2"},
{".mp2v", "video/mpeg"},
{".mp3", "audio/mp3"},
{".mp4", "video/mpeg4"},
{".mpa", "video/x-mpg"},
{".mpd", "application/vnd.ms-project"},
{".mpe", "video/x-mpeg"},
{".mpeg", "video/mpg"},
{".mpg", "video/mpg"},
{".mpga", "audio/rn-mpeg"},
{".mpp", "application/vnd.ms-project"},
{".mps", "video/x-mpeg"},
{".mpt", "application/vnd.ms-project"},
{".mpv", "video/mpg"},
{".mpv2", "video/mpeg"},
{".mpw", "application/vnd.ms-project"},
{".mpx", "application/vnd.ms-project"},
{".mtx", "text/xml"},
{".mxp", "application/x-mmxp"},
{".net", "image/pnetvue"},
{".nrf", "application/x-nrf"},
{".nws", "message/rfc822"},
{".odc", "text/x-ms-odc"},
{".out", "application/x-out"},
{".pdf", "application/pdf"},
{".pdf", "application/pdf"},
{".plt", "application/x-plt"},
{".png", "image/png"},
{".png", "application/x-png"},
{".pot", "application/vnd.ms-powerpoint"},
{".ppa", "application/vnd.ms-powerpoint"},
{".ppm", "application/x-ppm"},
{".pps", "application/vnd.ms-powerpoint"},
{".ppt", "application/vnd.ms-powerpoint"},
{".ppt", "application/x-ppt"},
{".pr", "application/x-pr"},
{".prf", "application/pics-rules"},
{".prn", "application/x-prn"},
{".prt", "application/x-prt"},
{".ps", "application/x-ps"},
{".ps", "application/postscript"},
{".ptn", "application/x-ptn"},
{".rec", "application/vnd.rn-recording"},
{".red", "application/x-red"},
{".rgb", "application/x-rgb"},
{".rm", "application/vnd.rn-realmedia"},
{".rmf", "application/vnd.adobe.rmf"},
{".rmi", "audio/mid"},
{".rmj", "application/vnd.rn-realsystem-rmj"},
{".rmm", "audio/x-pn-realaudio"},
{".rmp", "application/vnd.rn-rn_music_package"},
{".rms", "application/vnd.rn-realmedia-secure"},
{".rmvb", "application/vnd.rn-realmedia-vbr"},
{".rmx", "application/vnd.rn-realsystem-rmx"},
{".rnx", "application/vnd.rn-realplayer"},
{".rp", "image/vnd.rn-realpix"},
{".rpm", "audio/x-pn-realaudio-plugin"},
{".rsml", "application/vnd.rn-rsml"},
{".rt", "text/vnd.rn-realtext"},
{".rtf", "application/msword"},
{".rtf", "application/x-rtf"},
{".rv", "video/vnd.rn-realvideo"},
{".sam", "application/x-sam"},
{".sat", "application/x-sat"},
{".sdp", "application/sdp"},
{".spc", "application/x-pkcs7-certificates"},
{".spl", "application/futuresplash"},
{".spp", "text/xml"},
{".ssm", "application/streamingmedia"},
{".sst", "application/vnd.ms-pki.certstore"},
{".stl", "application/vnd.ms-pki.stl"},
{".stm", "text/html"},
{".sty", "application/x-sty"},
{".svg", "text/xml"},
{".swf", "application/x-shockwave-flash"},
{".tdf", "application/x-tdf"},
{".tg4", "application/x-tg4"},
{".tga", "application/x-tga"},
{".tif", "image/tiff"},
{".tif", "application/x-tif"},
{".tiff", "image/tiff"},
{".tld", "text/xml"},
{".top", "drawing/x-top"},
{".torrent", "application/x-bittorrent"},
{".tsd", "text/xml"},
{".txt", "text/plain"},
{".uin", "application/x-icq"},
{".uls", "text/iuls"},
{".vcf", "text/x-vcard"},
{".vda", "application/x-vda"},
{".vdx", "application/vnd.visio"},
{".vml", "text/xml"},
{".vpg", "application/x-vpeg005"},
{".vsd", "application/vnd.visio"},
{".vsd", "application/x-vsd"},
{".vss", "application/vnd.visio"},
{".vst", "application/vnd.visio"},
{".vst", "application/x-vst"},
{".vsw", "application/vnd.visio"},
{".vsx", "application/vnd.visio"},
{".vtx", "application/vnd.visio"},
{".vxml", "text/xml"},
{".wav", "audio/wav"},
{".wax", "audio/x-ms-wax"},
{".wb1", "application/x-wb1"},
{".wb2", "application/x-wb2"},
{".wb3", "application/x-wb3"},
{".wbmp", "image/vnd.wap.wbmp"},
{".wiz", "application/msword"},
{".wk3", "application/x-wk3"},
{".wk4", "application/x-wk4"},
{".wkq", "application/x-wkq"},
{".wks", "application/x-wks"},
{".wm", "video/x-ms-wm"},
{".wma", "audio/x-ms-wma"},
{".wmd", "application/x-ms-wmd"},
{".wmf", "application/x-wmf"},
{".wml", "text/vnd.wap.wml"},
{".wmv", "video/x-ms-wmv"},
{".wmx", "video/x-ms-wmx"},
{".wmz", "application/x-ms-wmz"},
{".wp6", "application/x-wp6"},
{".wpd", "application/x-wpd"},
{".wpg", "application/x-wpg"},
{".wpl", "application/vnd.ms-wpl"},
{".wq1", "application/x-wq1"},
{".wr1", "application/x-wr1"},
{".wri", "application/x-wri"},
{".wrk", "application/x-wrk"},
{".ws", "application/x-ws"},
{".ws2", "application/x-ws"},
{".wsc", "text/scriptlet"},
{".wsdl", "text/xml"},
{".wvx", "video/x-ms-wvx"},
{".xdp", "application/vnd.adobe.xdp"},
{".xdr", "text/xml"},
{".xfd", "application/vnd.adobe.xfd"},
{".xfdf", "application/vnd.adobe.xfdf"},
{".xhtml", "text/html"},
{".xls", "application/vnd.ms-excel"},
{".xls", "application/x-xls"},
{".xlw", "application/x-xlw"},
{".xml", "text/xml"},
{".xpl", "audio/scpls"},
{".xq", "text/xml"},
{".xql", "text/xml"},
{".xquery", "text/xml"},
{".ipa", "application/vnd.iphone"},
{".apk", "application/vnd.android.package-archive"},
};
#endif // RESPONSE_HEADER_H
|
<filename>packages/bus-rdf-resolve-quad-pattern/lib/ActorRdfResolveQuadPatternSource.ts
import type { ActionContext, IActorArgs, IActorTest } from '@comunica/core';
import type * as RDF from '@rdfjs/types';
import type { AsyncIterator } from 'asynciterator';
import type { Algebra } from 'sparqlalgebrajs';
import type { IActionRdfResolveQuadPattern,
IActorRdfResolveQuadPatternOutput } from './ActorRdfResolveQuadPattern';
import {
ActorRdfResolveQuadPattern,
} from './ActorRdfResolveQuadPattern';
/**
* A base implementation for rdf-resolve-quad-pattern events
* that wraps around an {@link IQuadSource}.
*
* @see IQuadSource
*/
export abstract class ActorRdfResolveQuadPatternSource extends ActorRdfResolveQuadPattern {
public constructor(args: IActorArgs<IActionRdfResolveQuadPattern, IActorTest, IActorRdfResolveQuadPatternOutput>) {
super(args);
}
public async test(action: IActionRdfResolveQuadPattern): Promise<IActorTest> {
return true;
}
public async run(action: IActionRdfResolveQuadPattern): Promise<IActorRdfResolveQuadPatternOutput> {
const source = await this.getSource(action.context, action.pattern);
return await this.getOutput(source, action.pattern, action.context);
}
/**
* Get the output of the given action on a source.
* @param {IQuadSource} source A quad source, possibly lazy.
* @param {Algebra.Operation} operation The operation to apply.
* @param ActionContext context Optional context data.
* @return {Promise<IActorRdfResolveQuadPatternOutput>} A promise that resolves to a hash containing
* a data RDFJS stream.
*/
protected async getOutput(source: IQuadSource, pattern: RDF.BaseQuad, context?: ActionContext):
Promise<IActorRdfResolveQuadPatternOutput> {
// Create data stream
const data = source.match(pattern.subject, pattern.predicate, pattern.object, pattern.graph);
return { data };
}
/**
* Get a source instance for the given context.
* @param {ActionContext} context Optional context data.
* @param {Algebra.Pattern} operation The operation to apply.
* @return {Promise<RDF.Source>} A promise that resolves to a source.
*/
protected abstract getSource(context: ActionContext | undefined, operation: Algebra.Pattern): Promise<IQuadSource>;
}
/**
* A lazy quad source.
*/
export interface IQuadSource {
/**
* Returns a (possibly lazy) stream that processes all quads matching the pattern.
*
* The returned stream MUST expose the property 'metadata'.
* The implementor is reponsible for handling cases where 'metadata'
* is being called without the stream being in flow-mode.
*
* @param {RDF.Term} subject The exact subject to match, variable is wildcard.
* @param {RDF.Term} predicate The exact predicate to match, variable is wildcard.
* @param {RDF.Term} object The exact object to match, variable is wildcard.
* @param {RDF.Term} graph The exact graph to match, variable is wildcard.
* @return {AsyncIterator<RDF.Quad>} The resulting quad stream.
*/
match: (subject: RDF.Term, predicate: RDF.Term, object: RDF.Term, graph: RDF.Term) => AsyncIterator<RDF.Quad>;
}
|
<gh_stars>0
from django import forms
from .models import Vocabularies
class CreateVocForm(forms.Form):
voc_name = forms.CharField(label='Titre', max_length=20, widget=forms.TextInput(attrs={'class': 'form-control'}))
words = forms.CharField(label='Vocabulaire',
widget=forms.Textarea(attrs={'class': 'form-control'}),
required=False)
class SettingsVocForm(forms.Form):
nb_errors = forms.IntegerField(label='Nombre d\'erreurs permises avant de perdre',
widget=forms.TextInput(attrs={"class": "slider",
"type": "range",
"min": "0",
"max": "12",
"value": "5"}))
nb_words = forms.IntegerField(label='Nombre maximum de mots ร trouver',
widget=forms.TextInput(attrs={"class": "slider",
"type": "range",
"min": "1",
"max": "30",
"value": "10"}))
def __init__(self, *args, **kwargs):
super(SettingsVocForm, self).__init__(*args, **kwargs)
self.fields['voc_selected'] = forms.ChoiceField(label='Vocabulaire choisi',
choices=[(voc, voc) for voc in Vocabularies.get_vocs_list()],
widget=forms.Select(attrs={'class':'form-control'}))
|
<gh_stars>0
package frc.robot.subsystems;
import edu.wpi.first.wpilibj.VictorSP;
import edu.wpi.first.wpilibj.command.Subsystem;
import frc.robot.commands.Feed;
public class Feeder extends Subsystem {
private static Feeder instance;
public static Feeder getInstance(){
if(instance == null)
instance = new Feeder();
return instance;
}
//Initializing all the motor controllers.
private VictorSP feedVictor;
private Feeder(){
feedVictor = new VictorSP(4);
}
public void setSpeed (double up){
feedVictor.setSpeed(up);
}
@Override
protected void initDefaultCommand() {
setDefaultCommand(new Feed());
}
}
|
#include <cppunit/extensions/HelperMacros.h>
class YourTestClass : public CppUnit::TestFixture {
CPPUNIT_TEST_SUITE(YourTestClass);
CPPUNIT_TEST(testInitializationExceptionOnInvalidLiquidMass);
CPPUNIT_TEST(testInitializationExceptionOnInvalidReferenceSpeed);
CPPUNIT_TEST_SUITE_END();
public:
void testInitializationExceptionOnInvalidLiquidMass() {
tConfigData->mMaxLiquidMass = DBL_EPSILON;
CPPUNIT_ASSERT_THROW(tArticle->initialize(*tConfigData, *tInputData, tLinks, tPort0, tPort1),
TsInitializationException);
}
void testInitializationExceptionOnInvalidReferenceSpeed() {
// Set up the invalid reference speed in tConfigData
// ...
CPPUNIT_ASSERT_THROW(tArticle->initialize(*tConfigData, *tInputData, tLinks, tPort0, tPort1),
TsInitializationException);
}
// Other test cases for valid initialization can be added here
}; |
#!/usr/bin/env bash
# Run this from the root project dir with scripts/run_all_tests.sh
. .venv/bin/activate
python -m pytest tests/local
|
<reponame>dengyifei/netty-proxy
package com.efei.proxy.common.face;
@FunctionalInterface
public interface CallBack<T> {
void accept(T t);
}
|
import {modules} from '../../container'
import {ApiOperationGet, ApiPath, SwaggerDefinitionConstant} from 'swagger-express-ts'
import {inject, LazyServiceIdentifer} from 'inversify'
import * as express from 'express'
import {controller, httpGet} from 'inversify-express-utils'
import {workerLogger} from '../../utils/workerLogger'
import MonitorService from '../services/monitorService'
import {prepare0xPrefix} from '../../../tools'
@ApiPath({
path: '/',
name: 'MonitorController',
security: {basicAuth: []}
})
@controller('/')
export default class MonitorController {
#monitorService: MonitorService
// @ts-ignore
#info = (msg: string) => {
workerLogger.info(`MonitorController: ${msg}`)
}
// @ts-ignore
#error = (msg: string) => {
workerLogger.error(`MonitorController: ${msg}`)
}
constructor(
@inject(new LazyServiceIdentifer(() => modules[MonitorService.name])) monitorService: MonitorService,
) {
this.#monitorService = monitorService
}
@ApiOperationGet({
path: 'request',
description: 'get match request status',
summary: '',
parameters: {
query: {
txHash: {
name: 'txHash',
type: 'string',
required: true,
description: 'txHash of outpoint of request in Hex format'
},
index: {
name: 'index',
type: 'string',
required: true,
description: 'index of outpoint of request in Hex format'
},
}
},
responses: {
200: {
description: 'Success',
type: SwaggerDefinitionConstant.Response.Type.STRING,
//model: 'string'
},
400: {description: 'Parameters fail'}
}
})
@httpGet('request')
request(
req: express.Request,
res: express.Response
): void {
let txHash = <string>req.query.txHash
let index = <string>req.query.index
txHash = prepare0xPrefix(txHash)
index = prepare0xPrefix(index)
try {
const result = this.#monitorService.read(`${txHash}-${index}`)
res.status(200).send(result)
} catch (err) {
res.status(500).send()
}
}
}
|
<reponame>GGolfz/cscms-url-shortener
import axios from '../axios/axios'
import { GET_ORIGINAL, CREATE_SHORTEN } from './type'
export const createShorten = url => async dispatch => {
let response = {
answer: null,
success: false
}
try {
const res = await axios.post('/api/newUrl', { url: url })
let host = window.location.href
if (host === 'http://localhost:3000/') {
host = 'http://localhost:3050/'
}
response.answer = host + res.data.shortUrl
response.success = true
} catch (error) {
console.log(error)
}
dispatch({ type: CREATE_SHORTEN, payload: response })
}
export const createShortenWithSlug = (url, slug) => async dispatch => {
let response = {
answer: null,
success: false
}
try {
const res = await axios.post('/api/newUrl', { url: url, prefer: slug })
let host = window.location.href
if (host === 'http://localhost:3000/') {
host = 'http://localhost:3050/'
}
response.answer = host + res.data.shortUrl
response.success = true
} catch (error) {
console.log(error)
}
dispatch({ type: CREATE_SHORTEN, payload: response })
}
export const getOriginal = url => async dispatch => {
if (url.length > 6 && url.lastIndexOf('/') !== -1) {
url = url.slice(url.lastIndexOf('/') + 1)
}
let response = {
answer: null,
success: false
}
if (url.length === 6) {
try {
const res = await axios.get('/api/originalUrl', {
params: {
url: url
}
})
response.answer = res.data.url
response.success = true
} catch (error) {
console.log(error)
}
}
dispatch({ type: GET_ORIGINAL, payload: response })
}
|
// const configBase = ;
/**
* Prettier internal configuration for this repository. Use on your own risk ;)
*/
module.exports = {
...require('../includes/prettier-config'),
printWidth: 120,
tabWidth: 4,
overrides: [
{
files: ['LICENSE'],
options: { parser: 'markdown', proseWrap: 'always', printWidth: 80 },
},
{
files: ['*.md'],
options: { tabWidth: 2 },
},
],
};
|
<gh_stars>1-10
#pragma once
#include <lua/lua.hpp>
#include <SFML/Graphics.hpp>
#include "Scenes/Scene.h"
#include "Scenes/ModalWindow.h"
#include "Screen.h"
#include "AnimatedSprite.h"
#include "Player.h"
#include "TTLua.h"
#include "TTUtils.h"
#include "TooterLogger.h"
namespace tt
{
namespace
{
int Utils_openUrl(lua_State* L)
{
const auto url = lua_tostring(L, 1);
tt::openBrowser(url);
return 0;
}
int Utils_showModal(lua_State* L)
{
auto scene = checkObject<Scene>(L);
const auto text = lua_tostring(L, 2);
ModalWindow mw{ *scene, };
mw.setText(text);
mw.exec();
return 0;
}
int Utils_showYesNo(lua_State* L)
{
auto scene = checkObject<Scene>(L);
const auto text = lua_tostring(L, 2);
OptionsWindow mw{ *scene, };
mw.setText(text);
mw.addOption("Yes");
mw.addOption("No");
mw.exec();
if (auto res = mw.selection();
res.has_value() && *res == 0)
{
lua_pushboolean(L, 1);
}
else
{
lua_pushboolean(L, 0);
}
return 1;
}
const struct luaL_Reg Utils_LuaMethods[] =
{
{"openUrl", Utils_openUrl},
{"showModal", Utils_showModal},
{"showYesNo", Utils_showYesNo},
{nullptr, nullptr}
};
}
template<typename T>
void initLua(lua_State* L, T& screen, void* itemFactory)
{
auto logger = log::initializeLogger("Lua");
logger->info("initializing Lua subsystem");
luaL_openlibs(L);
// push a reference to `this` into the registry, it should
// always be the 3rd entry
lua_pushlightuserdata(L, static_cast<void*>(&screen));
luaL_checktype(L, 1, LUA_TLIGHTUSERDATA);
[[maybe_unused]] int reference = luaL_ref(L, LUA_REGISTRYINDEX);
assert(GAMESCREEN_LUA_IDX == reference);
if (itemFactory != nullptr)
{
lua_pushlightuserdata(L, itemFactory);
luaL_checktype(L, 1, LUA_TLIGHTUSERDATA);
reference = luaL_ref(L, LUA_REGISTRYINDEX);
assert(ITEMFACTORY_LUA_IDX == reference);
}
// register static methods for `ItemFactory`
{
lua_newtable(L);
luaL_setfuncs(L, ItemFactory::LuaMethods, 0);
lua_setglobal(L, ItemFactory::CLASS_NAME);
}
// register static methods for `Modal`
{
lua_newtable(L);
luaL_setfuncs(L, Utils_LuaMethods, 0);
lua_setglobal(L, "Utils");
}
// register static methods for `Log`
{
lua_newtable(L);
luaL_setfuncs(L, Logger_LuaMethods, 0);
lua_setglobal(L, "Log");
}
//luaL_newmetatable(_luaState, "GameScreen");
//lua_pushstring(_luaState, "__index");
//lua_pushvalue(_luaState, -2); // push the metatable
//lua_settable(_luaState, -3); // metatable.__index = metatable
registerLuaFunctions<Scene>(L);
registerLuaFunctions<Player>(L);
registerLuaFunctions<DescriptionText>(L);
registerLuaFunctions<Item>(L);
registerLuaFunctions<Zone>(L);
registerLuaFunctions<ModalWindow>(L);
registerLuaFunctions<MessagesWindow>(L);
registerLuaFunctions<OptionsWindow>(L);
registerLuaFunctions<InventoryWindow>(L);
{
lua_newtable(L);
lua_pushstring(L, "Default");
lua_pushnumber(L, static_cast<std::uint16_t>(ModalType::Default));
lua_settable(L, -3);
lua_pushstring(L, "Messages");
lua_pushnumber(L, static_cast<std::uint16_t>(ModalType::Messages));
lua_settable(L, -3);
lua_pushstring(L, "Options");
lua_pushnumber(L, static_cast<std::uint16_t>(ModalType::Options));
lua_settable(L, -3);
lua_pushstring(L, "Inventory");
lua_pushnumber(L, static_cast<std::uint16_t>(ModalType::Inventory));
lua_settable(L, -3);
lua_setglobal(L, "ModalType");
}
{
lua_newtable(L);
lua_pushstring(L, "Top");
lua_pushnumber(L, static_cast<std::uint16_t>(ModalType::Default));
lua_settable(L, -3);
lua_pushstring(L, "Center");
lua_pushnumber(L, static_cast<std::uint16_t>(ModalType::Messages));
lua_settable(L, -3);
lua_pushstring(L, "Bottom");
lua_pushnumber(L, static_cast<std::uint16_t>(ModalType::Options));
lua_settable(L, -3);
lua_setglobal(L, "ModalAlignment");
}
assert(lua_gettop(L) == 0);
}
class GameScreen final : public Screen
{
public:
using SceneMap = std::map<std::string, SceneSharedPtr>;
static GameScreen* l_get(lua_State* L);
GameScreen(ResourceManager& resmgr, sf::RenderTarget& target);
~GameScreen();
void draw() override;
PollResult poll(const sf::Event&) override;
ScreenAction timestep() override;
lua_State* lua() const { return _luaState; }
const SceneMap& scenes() const { return _scenes; }
private:
SceneSharedPtr _currentScene;
SceneMap _scenes;
PlayerPtr _player;
lua_State* _luaState;
std::shared_ptr<ItemFactory> _itemFactory;
sf::Clock _gameClock;
};
} // namespace tt |
import pkg.mod
print(pkg.__name__)
print(pkg.mod.__name__)
print(pkg.mod.foo())
# Import 2nd time, must be same module objects
pkg_ = __import__("pkg.mod")
print(pkg_ is not pkg.mod)
print(pkg_ is pkg)
print(pkg_.mod is pkg.mod)
# import using "as"
import pkg.mod as mm
print(mm is pkg.mod)
print(mm.foo())
|
function moveTower(height, fromPole, toPole, withPole){
if (height >= 1) {
moveTower(height - 1, fromPole, withPole, toPole);
moveDisk(fromPole, toPole);
moveTower(height - 1, withPole, toPole, fromPole);
}
}
function moveDisk(fromPole, toPole){
console.log('moving disk from', fromPole, 'to', toPole);
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.