text stringlengths 1 1.05M |
|---|
def calculateLabelHeight():
# Constants for line edit height and label width
lineEditHeight = 30 # Example height of a line edit widget
labelWidth = GafferUI.PlugWidget.labelWidth() # Width of the label widget
# Calculate the height of the label widget based on vertical alignment
if GafferUI.Label.VerticalAlignment.Center:
labelHeight = lineEditHeight # Set label height to match line edit height for center alignment
else:
# Calculate label height based on other vertical alignments (e.g., top, bottom)
# Add appropriate logic here based on the specific vertical alignment requirements
return labelHeight |
#pragma once
#include "vector.hpp"
namespace frea {
namespace random {
template <class P, class RD>
P GenPlane(RD&& rd) {
// 中身は単位ベクトル + 原点との距離
const auto nml = GenVecUnit<typename P::vec_t>(rd);
const auto dist = rd();
return P(nml, dist);
}
}
}
|
<reponame>altraman12/TeslaTSA
//WARNING: This code runs on ALL pages, if it shouldn't, put it somewhere else
$(document).ready(function() {
$('.angled-border').angledBorder();
function stickScroll () {
var window_top = $(window).scrollTop();
var div_top = $('#nav').prev().height();
if (window_top > div_top) {
$('#nav').addClass('affix');
} else {
$('#nav').removeClass('affix');
}
}
$(window).scroll(stickScroll);
stickScroll();
});
|
def extract_columns_data(column_names, file_paths):
data_dict = {}
for file_path in file_paths:
file_name = file_path.split('/')[-1]
data_list = []
with open(file_path, 'r', encoding='utf-8') as file:
lines = file.readlines()
row_data = {}
for line in lines:
if line.strip() and not line.startswith('#'):
fields = line.strip().split('\t')
if len(fields) == len(column_names):
row_data = {column_names[i]: fields[i] for i in range(len(column_names))}
data_list.append(row_data)
data_dict[file_name] = data_list
return data_dict |
#!/bin/bash
set -e
# This script is a central controller that manages git commands via project specific commands.
# Each project will have a different branching strategy, so the creation of new branches will be configured
# by project specific commands.
# Project specific commands can be found in git command specific directories.
GIT_COMMAND_DIR=.git_cmd
# From the directory where we execute the command, we search recursively in the parent directories for a git command directory.
# If one is found, then we will delegate the command to the equivalent one in the directory
# e.g. executing this: gbf will result in the execution of $GIT_COMMAND_DIR/gbf, which can be specific for projects.
# e.g. $GIT_COMMAND_DIR/gbf branch_name can be git checkout -b feature/branch_name origin/main
WORKING_DIR=$PWD
while [ ! -d "$GIT_COMMAND_DIR" ]
do cd ..;
done;
PATH_TO_CMD="$(pwd)/$GIT_COMMAND_DIR"
CMD="$PATH_TO_CMD/$1"
echo "----------The full path to the command is--------------"
echo "$CMD"
echo "-------------------------------------------------------"
cd "$WORKING_DIR"
/bin/bash "$CMD" $2
git branch -vv | grep $2
|
#!/bin/bash
set -euo pipefail
# Variables:
##### NOTE: THE BELOW VARIABLES MUST BE SET! ######
# Name of an S3 bucket to be created -- MUST BE GLOBALLY UNIQUE!
S3_BUCKET=MY-UNIQUE-BUCKETNAME
# AWS Account IDs
DEV_ACCOUNT_ID=123456789123
PROD_ACCOUNT_ID=123456789123
###### THESE VARIABLES CAN BE OPTIONALLY ADJUSTED ######
# Prefix for files in S3 bucket. Default is fine for most scenarios.
S3_PREFIX=AWS-CICD-Quickstart
# A name for your CloudFormation stack
STACK_NAME=cicd-serverless
# Relative path to local folder (that does not exist) to store git project
LOCAL_REPO_FOLDER=../sample-project-codecommit
|
"""Sends error message to the userinterface"""
def send_error_msg(error_type):
#Add encryption adn HMAC
print("Error reporting currently not supported.")
"""
network = connect_network(flash_light=False) #Connect to network
s = create_and_connect_socket(UPDATE_URL, UPDATE_PORT, UPDATE_HTTPS)
content_length = len("sensor_id={}&sensor_key={}&status={}".format(SENSOR_ID, SENSOR_KEY, error_type))
"""
#data = """POST /report_failure HTTP/1.1\r\nHost: {}\r\nContent-Type: application/x-www-form-urlencoded\r\nContent-Length: {}\r\n\r\nsensor_id={}&sensor_key={}&status={}\r\n\r\n""".format(UPDATE_URL, content_length, SENSOR_ID, SENSOR_KEY, error_type)
"""
s.send(bytes(data, 'utf8'))
utime.sleep(2)
print("Status sent to user interface")
"""
|
<filename>x-pack/plugins/ml/public/application/data_frame_analytics/pages/analytics_exploration/components/outlier_exploration/outlier_exploration.tsx
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import React, { FC } from 'react';
import { i18n } from '@kbn/i18n';
import {
EuiCallOut,
EuiFlexGroup,
EuiFlexItem,
EuiHorizontalRule,
EuiPanel,
EuiSpacer,
EuiTitle,
} from '@elastic/eui';
import {
useColorRange,
ColorRangeLegend,
COLOR_RANGE,
COLOR_RANGE_SCALE,
} from '../../../../../components/color_range_legend';
import { sortColumns, INDEX_STATUS, defaultSearchQuery } from '../../../../common';
import { DATA_FRAME_TASK_STATE } from '../../../analytics_management/components/analytics_list/common';
import { getTaskStateBadge } from '../../../analytics_management/components/analytics_list/columns';
import { useExploreData, TableItem } from '../../hooks/use_explore_data';
import { ExplorationDataGrid } from '../exploration_data_grid';
import { ExplorationQueryBar } from '../exploration_query_bar';
const FEATURE_INFLUENCE = 'feature_influence';
const ExplorationTitle: FC<{ jobId: string }> = ({ jobId }) => (
<EuiTitle size="xs">
<span>
{i18n.translate('xpack.ml.dataframe.analytics.exploration.jobIdTitle', {
defaultMessage: 'Outlier detection job ID {jobId}',
values: { jobId },
})}
</span>
</EuiTitle>
);
interface ExplorationProps {
jobId: string;
jobStatus: DATA_FRAME_TASK_STATE;
}
const getFeatureCount = (resultsField: string, tableItems: TableItem[] = []) => {
if (tableItems.length === 0) {
return 0;
}
return Object.keys(tableItems[0]).filter(key =>
key.includes(`${resultsField}.${FEATURE_INFLUENCE}.`)
).length;
};
export const OutlierExploration: FC<ExplorationProps> = React.memo(({ jobId, jobStatus }) => {
const {
errorMessage,
indexPattern,
jobConfig,
pagination,
searchQuery,
selectedFields,
setPagination,
setSearchQuery,
setSelectedFields,
setSortingColumns,
sortingColumns,
rowCount,
status,
tableFields,
tableItems,
} = useExploreData(jobId);
const columns = [];
if (
jobConfig !== undefined &&
indexPattern !== undefined &&
selectedFields.length > 0 &&
tableItems.length > 0
) {
const resultsField = jobConfig.dest.results_field;
const removePrefix = new RegExp(`^${resultsField}\.${FEATURE_INFLUENCE}\.`, 'g');
columns.push(
...tableFields.sort(sortColumns(tableItems[0], resultsField)).map(id => {
const idWithoutPrefix = id.replace(removePrefix, '');
const field = indexPattern.fields.getByName(idWithoutPrefix);
// Built-in values are ['boolean', 'currency', 'datetime', 'numeric', 'json']
// To fall back to the default string schema it needs to be undefined.
let schema;
switch (field?.type) {
case 'date':
schema = 'datetime';
break;
case 'geo_point':
schema = 'json';
break;
case 'number':
schema = 'numeric';
break;
}
if (id === `${resultsField}.outlier_score`) {
schema = 'numeric';
}
return { id, schema };
})
);
}
const colorRange = useColorRange(
COLOR_RANGE.BLUE,
COLOR_RANGE_SCALE.INFLUENCER,
jobConfig !== undefined ? getFeatureCount(jobConfig.dest.results_field, tableItems) : 1
);
if (jobConfig === undefined || indexPattern === undefined) {
return null;
}
// if it's a searchBar syntax error leave the table visible so they can try again
if (status === INDEX_STATUS.ERROR && !errorMessage.includes('parsing_exception')) {
return (
<EuiPanel grow={false}>
<ExplorationTitle jobId={jobConfig.id} />
<EuiCallOut
title={i18n.translate('xpack.ml.dataframe.analytics.exploration.indexError', {
defaultMessage: 'An error occurred loading the index data.',
})}
color="danger"
iconType="cross"
>
<p>{errorMessage}</p>
</EuiCallOut>
</EuiPanel>
);
}
let tableError =
status === INDEX_STATUS.ERROR && errorMessage.includes('parsing_exception')
? errorMessage
: undefined;
if (status === INDEX_STATUS.LOADED && tableItems.length === 0 && tableError === undefined) {
tableError = i18n.translate('xpack.ml.dataframe.analytics.exploration.noDataCalloutBody', {
defaultMessage:
'The query for the index returned no results. Please make sure the index contains documents and your query is not too restrictive.',
});
}
return (
<EuiPanel data-test-subj="mlDFAnalyticsOutlierExplorationTablePanel">
<EuiFlexGroup
alignItems="center"
justifyContent="spaceBetween"
responsive={false}
gutterSize="s"
>
<EuiFlexItem grow={false}>
<ExplorationTitle jobId={jobConfig.id} />
</EuiFlexItem>
<EuiFlexItem grow={false}>
<span>{getTaskStateBadge(jobStatus)}</span>
</EuiFlexItem>
</EuiFlexGroup>
<EuiHorizontalRule margin="xs" />
{(columns.length > 0 || searchQuery !== defaultSearchQuery) && (
<>
<EuiFlexGroup justifyContent="spaceBetween">
<EuiFlexItem>
<ExplorationQueryBar indexPattern={indexPattern} setSearchQuery={setSearchQuery} />
</EuiFlexItem>
<EuiFlexItem grow={false}>
<EuiSpacer size="s" />
<ColorRangeLegend
colorRange={colorRange}
title={i18n.translate(
'xpack.ml.dataframe.analytics.exploration.colorRangeLegendTitle',
{
defaultMessage: 'Feature influence score',
}
)}
/>
</EuiFlexItem>
</EuiFlexGroup>
<EuiSpacer size="s" />
{columns.length > 0 && tableItems.length > 0 && (
<ExplorationDataGrid
colorRange={colorRange}
columns={columns}
pagination={pagination}
resultsField={jobConfig.dest.results_field}
rowCount={rowCount}
selectedFields={selectedFields}
setPagination={setPagination}
setSelectedFields={setSelectedFields}
setSortingColumns={setSortingColumns}
sortingColumns={sortingColumns}
tableItems={tableItems}
/>
)}
</>
)}
</EuiPanel>
);
});
|
import functools
import json
from jwtAuthenticator.schemas.schema_user import validate_user
from jwtAuthenticator.models import db
from jwtAuthenticator.models import User
from flask.views import MethodView
from flask_jwt_extended import (
JWTManager,
create_access_token,
create_refresh_token,
jwt_required,
jwt_refresh_token_required,
get_jwt_identity,
fresh_jwt_required,
set_access_cookies,
set_refresh_cookies,
unset_jwt_cookies
)
from flask import (
request, jsonify
)
jwt = JWTManager()
# registration endpoint
class RegisterAPI(MethodView):
def get(self):
return jsonify({'ok': False, 'message': 'forbidden'}), 403
def post(self):
''' user registration endpoint '''
data = validate_user(request.get_json())
# if the validation succeeded
if data['ok']:
# get the user data
user_data = data['user_data']
# create a new user and save it in the database
newUser = User(username=user_data['username'], password=user_data['password'])
# if the User with the given username already exists
if (User.query.filter_by(username=user_data['username']).first()):
# send the username already exists in the json response
return jsonify({'ok': False, 'message': 'duplicate_username'}), 400
# add the user in the database
db.session.add(newUser)
db.session.commit()
# send the reponse with a message indicating a successful registration
return jsonify({'ok': True, 'message': 'User Created'}), 200
# validation did not succeed
else:
if data['error'] == 'validation':
#message = ""
#if data['message'] == 'username':
# message = "Username must be at least 4 characters "
# message += "and can only contain _ @ ! and no spaces"
#elif data['message'] == 'password':
# message = "Password must be atleast 8 characters in length "
# message += "and must contain a capital letter, a small letter, "
# message += "a number and a special character"
return jsonify({'ok': False, 'message': "Invalid Credentials"}), 400
# send the response with a message indicating a bad request
return jsonify({'ok': False, 'message': "Bad Credentials"}), 400
# authentication endpoint
class AuthenticateAPI(MethodView):
def get(self):
return jsonify({'ok': False, 'message': 'forbidden'}), 403
def post(self):
''' user authentication endpoint '''
# validate the request data
data = validate_user(request.get_json())
# if validataion was successful
if data['ok']:
# get the user data from the validated data
user_data = data['user_data']
# get teh user from the data from database if it exists
user = User.query.filter_by(username=user_data['username']).first()
# if the user with the given username exists and the password is valid
if user and user.verify_password(user_data['password']):
# remove the password from the userdata
del user_data['password']
# create the access token
access_token = create_access_token(identity=user_data, fresh=True)
refresh_token = create_refresh_token(identity=user_data)
#user_data['access_token'] = access_token
#user_data['refresh_token'] = refresh_token
user_data['login'] = True
resp = jsonify(user_data)
set_access_cookies(resp, access_token)
set_refresh_cookies(resp, refresh_token)
return resp, 200
else:
# the user does not exist or the password is not valid, return invalid credentials
return jsonify({'ok': False, 'message': 'Invalid Credentials'}), 400
else:
if data['error'] == 'validation':
return jsonify({'ok': False, 'message': 'Invalid Credentials'}), 400
# the user does not exist or the password is not valid, return invalid credentials
return jsonify({'ok': False, 'message': 'Bad Request'}), 400
# recreate accessToken
class RefreshAPI(MethodView):
''' view for refreshing jwt tokens '''
# get not allowed
def get(self):
return jsonify({'ok': False, 'message': 'forbidden'}), 403
# the refresh token is required to access this url
@jwt_refresh_token_required
def post(self):
''' access token refresh endpoint '''
# get the current user
current_user = get_jwt_identity()
# create a new token
access_token = create_access_token(identity=current_user, fresh=False)
# response
resp = jsonify({'refresh': True})
set_access_cookies(resp, access_token)
# return the access_token in refresh token
return resp, 200
# fresh login
class FreshLogin(MethodView):
''' view to create fresh access tokens '''
def get(self):
return jsonify({'ok': False, 'message': 'forbidden'}), 403
def post(self):
''' user authentication endpoint '''
# validate the request data
data = validate_user(request.get_json())
# if validataion was successful
if data['ok']:
# get the user data from the validated data
user_data = data['user_data']
# get teh user from the data from database if it exists
user = User.query.filter_by(username=user_data['username']).first()
# if the user with the given username exists and the password is valid
if user and user.verify_password(user_data['password']):
# remove the password from the userdata
del user_data['password']
#user_data['access_token'] = access_token
user_data['fresh_login'] = True
# create the access token
access_token = create_access_token(identity=user_data, fresh=True)
# create a response
resp = jsonify(user_data)
set_access_cookies(resp, access_token)
return resp, 200
else:
# the user does not exist or the password is not valid, return invalid credentials
return jsonify({'ok': False, 'message': 'Invalid Credentials'}), 400
class ValidateToken(MethodView):
''' token validation endpoint '''
# validate the token
@jwt_required
def get(self):
current_user = get_jwt_identity()
return jsonify({"ok": True, 'is_valid': True, 'user': current_user}), 200
# not implemented
@jwt_required
def post(self):
#current_user = get_jwt_identity()
#return jsonify({"ok": True, 'message': 'The token is valid', 'user': current_user}), 200
current_user = get_jwt_identity()
return jsonify({"ok": True, 'is_valid': True, 'user': current_user}), 200
class ValidateFreshToken(MethodView):
''' fresh token validation '''
# validate the token and verify it is fresh
@fresh_jwt_required
def get(self):
current_user = get_jwt_identity()
return jsonify({"ok": True, 'is_valid': True, 'user': current_user}), 200
@fresh_jwt_required
def post(self):
current_user = get_jwt_identity()
return jsonify({"ok": True, 'is_valid': True, 'user': current_user}), 200
class LogoutAPI(MethodView):
''' logout token validation '''
# remove the tokens from cookies
def get(self):
return jsonify({'ok': False, 'message': 'forbidden'}), 403
def post(self):
resp = jsonify({'logout': True})
# remove the cookies from the response
unset_jwt_cookies(resp)
return resp, 200
class GetUsers(MethodView):
def get(self):
all_users = User.query.order_by(User.username).all()
#return jsonify({'ok': True, 'data': all_users}), 200
json_encodeable_users = [a_user.as_dict() for a_user in all_users]
return jsonify({'ok': True, 'data': json_encodeable_users}, 200)
#TODO: This is just a test
class Home(MethodView):
''' This is just to test frontend '''
def get(self):
resp = jsonify({'message': "this is home in get"})
return resp, 200
def post(self):
resp = jsonify({'message': "this is home in post"})
return resp, 200
|
bash -c "npm start" |
module game {
export class ElsfkMap {
//行 Row 列 Column
row = 18
column = 10
//宽度 长宽高 都是一样
width = 50
private _map: Array<THREE.Vector3> = [] //顶点坐标
public initMap() {
//假设 是 左下 为0
// let row = this.row / 2
// let column = this.column / 2
// for (let i = -row; i < row; i++) {
// for (let j = -column; j < column; j++) {
// let x = j * this.width
// let y = this.width
// let z = i * this.width
// let vec = new THREE.Vector3(x, y, z)
// this._map.push(vec)
// }
// }
for (let i = 0; i < this.row; i++) {
for (let j = 0; j < this.column; j++) {
let x = j * this.width
let y = this.width
let z = i * this.width
let vec = new THREE.Vector3(x, y, z)
this._map.push(vec)
}
}
}
get map() {
return this._map
}
}
} |
base_url = $('#token').attr('base-url');//Extrae la base url del input token de la vista
function eliminarQuehacer(id,token) {
url = base_url.concat('/admin/pueblos_magicos/quehacer/eliminar');
$.ajax({
method: "POST",
url: url,
data:{
"id":id,
"_token":token
},
success: function() {
swal({
title: "Registro eliminado correctamente, esta página se recargará automáticamente ahora.",
type: "success",
showConfirmButton: false,
},
function() {
location.reload();
});
setTimeout("location.reload()",1200);
},
error: function(xhr, status, error) {
swal({
title: "<small>Error!</small>",
text: "Se encontró un problema eliminando este registro, por favor, trate nuevamente.<br><span style='color:#F8BB86'>\nError: " + xhr.status + " (" + error + ") "+"</span>",
html: true
});
}
});
}
function guardarDetalleQuehacer(id_empresa,id_quehacer,descripcion,token) {
url = base_url.concat('/admin/pueblos_magicos/quehacer/guardarDetalle');
$.ajax({
method: "POST",
url: url,
data:{
"id_empresa":id_empresa,
"id_quehacer":id_quehacer,
"descripcion":descripcion,
"_token":token
},
success: function(data) {
$('#formulario-detalle-quehacer select#empresa').val('0');
$('#descripcionDetalleEmpresa').val('');
if (data.length > 0) {
$("#detallesQuehacerContenido").children().remove();
data.forEach(function(res) {
if (res != "") {
$("#detallesQuehacerContenido").append(
"<tr class='' id="+res.idDetalle+">"+
"<td class='hide'>"+res.idDetalle+"</td>"+
"<td>"+res.nombreEmpresa+"</td>"+
"<td class='text'><span>"+res.descripcion+"</span></td>"+
"<td>"+
"<button type='button' class='btn btn-danger borrar-detalle-quehacer' status-pueblo='0'>Borrar</button>"+
"</td>"+
"</tr>"
);
}
})
}
swal({
title: "Registro guardado correctamente.",
type: "success",
showConfirmButton: true,
timer: 1200,
});
$('#guardar-detalle-quehacer').show();
},
error: function(xhr, status, error) {
swal({
title: "<small>Error!</small>",
text: "No se ha podido guardar el nuevo registro, por favor, trate nuevamente.<br><span style='color:#F8BB86'>\nError: " + xhr.status + " (" + error + ") "+"</span>",
html: true
});
$('#guardar-detalle-quehacer').show();
}
});
}
function borrarQuehacerDetalles(idDetalle,token) {
url = base_url.concat('/admin/pueblos_magicos/quehacer/borrarDetalle');
$.ajax({
method: "POST",
url: url,
data:{
"idDetalle":idDetalle,
"_token":token
},
success: function() {
swal({
title: "Registro eliminado correctamente.",
type: "success",
showConfirmButton: true,
timer: 1200,
});
$('#guardar-detalle-quehacer').show();
$('tr#'+idDetalle).remove();
if ($('table#detallesQuehacer >tbody#detallesQuehacerContenido >tr').length == 0){
$("#detallesQuehacerContenido").append(
'<td colspan="4">No hay registros disponibles</td>'
);
}
},
error: function(xhr, status, error) {
$('#guardar-detalle-quehacer').show();
swal({
title: "<small>Error!</small>",
text: "Se encontró un problema eliminando este registro, por favor, trate nuevamente.<br><span style='color:#F8BB86'>\nError: " + xhr.status + " (" + error + ") "+"</span>",
html: true
});
}
});
} |
/**
* Copyright (c) 2008 <NAME>. All rights reserved.
*
* This file is part of XBee-API.
*
* XBee-API is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* XBee-API is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with XBee-API. If not, see <http://www.gnu.org/licenses/>.
*/
package com.rapplogic.xbee.api;
import java.io.IOException;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.apache.log4j.Logger;
import com.rapplogic.xbee.RxTxSerialComm;
import com.rapplogic.xbee.XBeeConnection;
import com.rapplogic.xbee.api.HardwareVersion.RadioType;
import com.rapplogic.xbee.util.ByteUtils;
/**
* This is an API for communicating with Digi XBee 802.15.4 and ZigBee radios
* via the serial port
* <p/>
* @author <NAME> <<EMAIL>rew.rapp at gmail>
*
*/
public class XBee implements IXBee {
private final static Logger log = Logger.getLogger(XBee.class);
// object to synchronize on to protect access to sendPacket
private Object sendPacketBlock = new Object();
private XBeeConnection xbeeConnection;
private InputStreamThread parser;
private XBeeConfiguration conf;
private RadioType type;
public XBee() {
this(new XBeeConfiguration().withMaxQueueSize(100).withStartupChecks(true));
}
public XBee(XBeeConfiguration conf) {
this.conf = conf;
if (this.conf.isShutdownHook()) {
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
if (isConnected()) {
log.info("ShutdownHook is closing connection");
close();
}
}
});
}
}
private void doStartupChecks() throws XBeeException {
// Perform startup checks
try {
AtCommandResponse ap = this.sendAtCommand(new AtCommand("AP"));
if (!ap.isOk()) {
throw new XBeeException("Attempt to query AP parameter failed");
}
if (ap.getValue()[0] != 2) {
log.warn("XBee radio is in API mode without escape characters (AP=1). The radio must be configured in API mode with escape bytes (AP=2) for use with this library.");
log.info("Attempting to set AP to 2");
ap = this.sendAtCommand(new AtCommand("AP", 2));
if (ap.isOk()) {
log.info("Successfully set AP mode to 2. This setting will not persist a power cycle without the WR (write) command");
} else {
throw new XBeeException("Attempt to set AP=2 failed");
}
} else {
log.info("Radio is in correct AP mode (AP=2)");
}
ap = this.sendAtCommand(new AtCommand("HV"));
RadioType radioType = HardwareVersion.parse(ap);
log.info("XBee radio is " + radioType);
if (radioType == RadioType.UNKNOWN) {
log.warn("Unknown radio type (HV): " + ap.getValue()[0]);
}
AtCommandResponse vr = this.sendAtCommand(new AtCommand("VR"));
if (vr.isOk()) {
log.info("Firmware version is " + ByteUtils.toBase16(vr.getValue()));
}
this.clearResponseQueue();
} catch (XBeeTimeoutException ex) {
throw new XBeeException("AT command timed-out while attempt to set/read in API mode. Check that the XBee radio is in API mode (AP=2); it will not function propertly in AP=1");
}
}
/**
* If XBeeConnection.startUpChecks is set to true (default), this method will check if the AP parameter
* is set correctly and attempt to update if AP=1. If AP=0 (Transparent mode), an
* exception will be thrown.
*/
public void open(String port, int baudRate) throws XBeeException {
try {
if (this.isConnected()) {
throw new IllegalStateException("Cannot open new connection -- existing connection is still open. Please close first");
}
this.type = null;
RxTxSerialComm serial = new RxTxSerialComm();
serial.openSerialPort(port, baudRate);
this.initConnection(serial);
} catch (XBeeException e) {
throw e;
} catch (Exception e) {
throw new XBeeException(e);
}
}
public static void registerResponseHandler(int apiId, Class<? extends XBeeResponse> clazz) {
PacketParser.registerResponseHandler(apiId, clazz);
}
public static void unRegisterResponseHandler(int apiId) {
PacketParser.unRegisterResponseHandler(apiId);
}
/**
* Allows a protocol specific implementation of XBeeConnection to be used instead of the default RXTX connection.
* The connection must already be established as the interface has no means to do so.
*/
public void initProviderConnection(XBeeConnection connection) throws XBeeException {
if (this.isConnected()) {
throw new IllegalStateException("Cannot open new connection -- existing connection is still open. Please close first");
}
initConnection(connection);
}
private void initConnection(XBeeConnection conn) throws XBeeException {
try {
this.xbeeConnection = conn;
parser = new InputStreamThread(this.xbeeConnection, conf);
// startup heuristics
if (conf.isStartupChecks()) {
this.doStartupChecks();
}
} catch (XBeeException e) {
throw e;
} catch (Exception e) {
throw new XBeeException(e);
}
}
public void addPacketListener(PacketListener packetListener) {
if (parser == null) {
throw new IllegalStateException("No connection");
}
synchronized (parser.getPacketListenerList()) {
this.parser.getPacketListenerList().add(packetListener);
}
}
public void removePacketListener(PacketListener packetListener) {
if (parser == null) {
throw new IllegalStateException("No connection");
}
synchronized (parser.getPacketListenerList()) {
this.parser.getPacketListenerList().remove(packetListener);
}
}
public void sendRequest(XBeeRequest request) throws IOException {
if (this.type != null) {
// TODO use interface to mark series type
if (type == RadioType.SERIES1 && request.getClass().getPackage().getName().indexOf("api.zigbee") > -1) {
throw new IllegalArgumentException("You are connected to a Series 1 radio but attempting to send Series 2 requests");
} else if (type == RadioType.SERIES2 && request.getClass().getPackage().getName().indexOf("api.wpan") > -1) {
throw new IllegalArgumentException("You are connected to a Series 2 radio but attempting to send Series 1 requests");
}
}
log.info("Sending request to XBee: " + request);
this.sendPacket(request.getXBeePacket());
}
/**
* It's possible for packets to get interspersed if multiple threads send simultaneously.
* This method is not thread-safe because doing so would introduce a synchronized performance penalty
* for the vast majority of users that will not never need thread safety.
* That said, it is responsibility of the user to provide synchronization if multiple threads are sending.
*
* Not thread safe.
*
* @param packet
* @throws IOException
*/
public void sendPacket(XBeePacket packet) throws IOException {
this.sendPacket(packet.getByteArray());
}
/**
* This exists solely for the XMPP project. Use sendRequest instead
*
* Not Thread Safe
*
* @param packet
* @throws RuntimeException when serial device is disconnected
*/
public void sendPacket(int[] packet) throws IOException {
// TODO should we synchronize on read lock so we are sending/recv. simultaneously?
// TODO call request listener with byte array
if (!this.isConnected()) {
throw new XBeeNotConnectedException();
}
if (log.isInfoEnabled()) {
log.info("Sending packet to XBee " + ByteUtils.toBase16(packet));
}
for (int packetByte : packet) {
// if connection lost
//Caused by: com.rapplogic.xbee.api.XBeeException
//Caused by: java.io.IOException: Input/output error in writeArray
xbeeConnection.getOutputStream().write(packetByte);
}
xbeeConnection.getOutputStream().flush();
}
/**
* Sends an XBeeRequest though the XBee interface in an asynchronous manner, such that
* it will return immediately, without waiting for a response.
* Refer to the getResponse method for obtaining a response
*
* Not thread safe
*
* @param request
* @throws XBeeException
*/
public void sendAsynchronous(XBeeRequest request) throws XBeeException {
try {
this.sendRequest(request);
} catch (Exception e) {
throw new XBeeException(e);
}
}
/**
* Uses sendSynchronous to send an AtCommand and collect the response
* <p/>
* Timeout value is fixed at 5 seconds
*
* @deprecated Use this.sendSynchronous(command, timeout);
* @param command
* @return
* @throws XBeeException
*/
public AtCommandResponse sendAtCommand(AtCommand command) throws XBeeException {
return (AtCommandResponse) this.sendSynchronous(command, 5000);
}
/**
* Synchronous method for sending an XBeeRequest and obtaining the
* corresponding response (response that has same frame id).
* <p/>
* This method returns the first response object with a matching frame id, within the timeout
* period, so it is important to use a unique frame id (relative to previous subsequent requests).
* <p/>
* This method must only be called with requests that receive a response of
* type XBeeFrameIdResponse. All other request types will timeout.
* <p/>
* Keep in mind responses received here will also be available through the getResponse method
* and the packet listener. If you would prefer to not have these responses added to the response queue,
* you can add a ResponseQueueFilter via XBeeConfiguration to ignore packets that are sent in response to
* a request. Another alternative is to call clearResponseQueue prior to calling this method.
* <p/>
* It is recommended to use a timeout of at least 5 seconds, since some responses can take a few seconds or more
* (e.g. if remote radio is not powered on).
* <p/>
* This method is thread-safe
*
* @param xbeeRequest
*
* @return
* @throws XBeeException
* @throws XBeeTimeoutException thrown if no matching response is identified
*/
public XBeeResponse sendSynchronous(final XBeeRequest xbeeRequest, int timeout) throws XBeeTimeoutException, XBeeException {
if (xbeeRequest.getFrameId() == XBeeRequest.NO_RESPONSE_FRAME_ID) {
throw new XBeeException("Frame Id cannot be 0 for a synchronous call -- it will always timeout as there is no response!");
}
PacketListener pl = null;
try {
final List<XBeeResponse> container = new LinkedList<XBeeResponse>();
// this makes it thread safe -- prevents multiple threads from writing to output stream simultaneously
synchronized (sendPacketBlock) {
this.sendRequest(xbeeRequest);
}
pl = new PacketListener() {
// TODO handle error response as well
public void processResponse(XBeeResponse response) {
if (response instanceof XBeeFrameIdResponse && ((XBeeFrameIdResponse)response).getFrameId() == xbeeRequest.getFrameId()) {
// frame id matches -- yay we found it
container.add(response);
synchronized(container) {
container.notify();
}
}
}
};
this.addPacketListener(pl);
synchronized (container) {
try {
container.wait(timeout);
} catch (InterruptedException e) { }
}
if (container.size() == 0) {
// we didn't find a matching packet
throw new XBeeTimeoutException();
}
return (XBeeResponse) container.get(0);
} catch (IOException io) {
throw new XBeeException(io);
} finally {
if (pl != null) {
this.removePacketListener(pl);
}
}
}
/**
* Uses sendSynchronous timeout defined in XBeeConfiguration (default is 5000ms)
*/
public XBeeResponse sendSynchronous(final XBeeRequest request) throws XBeeTimeoutException, XBeeException {
return this.sendSynchronous(request, conf.getSendSynchronousTimeout());
}
/**
* Same as getResponse(int) but does not timeout.
* It's highly recommend that you always use a timeout because
* if the serial connection dies under certain conditions, you will end up waiting forever!
* <p/>
* Consider using the PacketListener for asynchronous (non-blocking) behavior
*
* @return
* @throws XBeeException
*/
public XBeeResponse getResponse() throws XBeeException {
return getResponseTimeout(null);
}
/**
* This method returns an XBeeResponse from the queue, if available, or
* waits up to "timeout" milliseconds for a response.
* <p/>
* There are three possible outcomes:
* <p/>
* 1. A packet is returned within "timeout" milliseconds <br/>
* 2. An XBeeTimeoutException is thrown (i.e. queue was empty for duration of timeout) <br/>
* 3. Null is returned if timeout is 0 and queue is empty. <br/>
* <p/>
* @param timeout milliseconds to wait for a response. A value of zero disables the timeout
* @return
* @throws XBeeException
* @throws XBeeTimeoutException if timeout occurs before a response is received
*/
public XBeeResponse getResponse(int timeout) throws XBeeException, XBeeTimeoutException {
return this.getResponseTimeout(timeout);
}
private XBeeResponse getResponseTimeout(Integer timeout) throws XBeeException, XBeeTimeoutException {
// seeing this with xmpp
if (!this.isConnected()) {
throw new XBeeNotConnectedException();
}
XBeeResponse response;
try {
if (timeout != null) {
response = parser.getResponseQueue().poll(timeout, TimeUnit.MILLISECONDS);
} else {
response = parser.getResponseQueue().take();
}
} catch (InterruptedException e) {
throw new XBeeException("Error while attempting to remove packet from queue", e);
}
if (response == null && timeout > 0) {
throw new XBeeTimeoutException();
}
return response;
}
// public List<? extends XBeeResponse> collectResponses(int wait, ResponseFilter filter, CollectTerminator terminator) throws XBeeException {
//
// }
/**
* Collects responses until the timeout is reached or the CollectTerminator returns true
*
* @param wait
* @param terminator
* @return
* @throws XBeeException
*/
public List<? extends XBeeResponse> collectResponses(int wait, CollectTerminator terminator) throws XBeeException {
// seeing this with xmpp
if (!this.isConnected()) {
throw new XBeeNotConnectedException();
}
long start = System.currentTimeMillis();
long callStart = 0;
int waitTime;
List<XBeeResponse> responseList = new ArrayList<XBeeResponse>();
XBeeResponse response = null;
try {
while (true) {
// compute the remaining wait time
waitTime = wait - (int)(System.currentTimeMillis() - start);
if (waitTime <= 0) {
break;
}
log.debug("calling getResponse with waitTime: " + waitTime);
if (log.isDebugEnabled()) {
callStart = System.currentTimeMillis();
}
response = this.getResponse(waitTime);
if (log.isDebugEnabled()) {
log.debug("Got response in " + (System.currentTimeMillis() - callStart));
}
responseList.add(response);
if (terminator != null && terminator.stop(response)) {
log.debug("Found terminating response.. exiting");
break;
}
}
} catch (XBeeTimeoutException e) {
// ok, we'll just return whatever is in the list
} catch (XBeeException e) {
throw e;
}
log.debug("Time is up.. returning list with " + responseList.size() + " packets");
return responseList;
}
/**
* Collects responses for wait milliseconds and returns responses as List
*
* @param wait
* @return
* @throws XBeeException
*/
public List<? extends XBeeResponse> collectResponses(int wait) throws XBeeException {
return this.collectResponses(wait, null);
}
/**
* Returns the number of packets available in the response queue for immediate consumption
*
* @return
*/
public int getResponseQueueSize() {
// seeing this with xmpp
if (!this.isConnected()) {
throw new XBeeNotConnectedException();
}
return parser.getResponseQueue().size();
}
/**
* Shuts down RXTX and packet parser thread
*/
public void close() {
if (!this.isConnected()) {
throw new IllegalStateException("XBee is not connected");
}
// shutdown parser thread
if (parser != null) {
parser.setDone(true);
// interrupts thread, if waiting. does not interrupt thread if blocking on read
// serial port close will be closed prior to thread exit
parser.interrupt();
}
try {
this.xbeeConnection.getOutputStream().close();
} catch (IOException e) {
log.warn("Failed to close output stream", e);
}
this.type = null;
parser = null;
xbeeConnection = null;
}
/**
* Indicates if serial port connection has been established.
* The open method may be called if this returns true
*
* @return
*/
public boolean isConnected() {
try {
if (parser.getXBeeConnection().getInputStream() != null && parser.getXBeeConnection().getOutputStream() != null) {
return true;
}
return false;
} catch (Exception e) {
return false;
}
}
// TODO move to its own class
private int sequentialFrameId = 0xff;
public int getCurrentFrameId() {
// TODO move to separate class (e.g. FrameIdCounter)
return sequentialFrameId;
}
/**
* This is useful for obtaining a frame id when composing your XBeeRequest.
* It will return frame ids in a sequential manner until the maximum is reached (0xff)
* and it flips to 1 and starts over.
*
* Not Thread-safe
*
* @return
*/
public int getNextFrameId() {
if (sequentialFrameId == 0xff) {
// flip
sequentialFrameId = 1;
} else {
sequentialFrameId++;
}
return sequentialFrameId;
}
/**
* Updates the frame id. Any value between 1 and ff is valid
*
* @param val
* Jan 24, 2009
*/
public void updateFrameId(int val) {
if (val <=0 || val > 0xff) {
throw new IllegalArgumentException("invalid frame id");
}
this.sequentialFrameId = val;
}
/**
* Removes all packets off of the response queue
*/
public void clearResponseQueue() {
// seeing this with xmpp
if (!this.isConnected()) {
throw new XBeeNotConnectedException();
}
parser.getResponseQueue().clear();
}
}
|
class Vector:
def __init__(self, x, y, z):
self.x = x
self.y = y
self.z = z
def magnitude(self):
return (self.x**2 + self.y **2 + self.z**2)**0.5
def normalize(self):
mag = self.magnitude()
return Vector(self.x/mag, self.y/mag, self.z/mag)
def __add__(self, other):
return Vector(self.x + other.x, self.y + other.y, self.z + other.z)
def __sub__(self, other):
return Vector(self.x - other.x, self.y - other.y, self.z - other.z)
def __mul__(self, scalar):
return Vector(self.x * scalar, self.y * scalar, self.z * scalar) |
#!/bin/sh
SCRIPT="$0"
echo "# START SCRIPT: $SCRIPT"
while [ -h "$SCRIPT" ] ; do
ls=`ls -ld "$SCRIPT"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
SCRIPT="$link"
else
SCRIPT=`dirname "$SCRIPT"`/"$link"
fi
done
if [ ! -d "${APP_DIR}" ]; then
APP_DIR=`dirname "$SCRIPT"`/..
APP_DIR=`cd "${APP_DIR}"; pwd`
fi
executable="./modules/openapi-generator-cli/target/openapi-generator-cli.jar"
if [ ! -f "$executable" ]
then
mvn clean package
fi
# if you've executed sbt assembly previously it will use that instead.
export JAVA_OPTS="${JAVA_OPTS} -Xmx1024M -DloggerPath=conf/log4j.properties"
ags="generate -i modules/openapi-generator/src/test/resources/3_0/petstore.yaml -g elm -t modules/openapi-generator/src/main/resources/elm -o samples/openapi3/client/petstore/elm --additional-properties elmPrefixCustomTypeVariants=true $@"
java $JAVA_OPTS -jar $executable $ags
|
<reponame>hmrc/claim-tax-refund-frontend
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers
import config.FrontendAppConfig
import connectors.DataCacheConnector
import controllers.actions.{AuthAction, DataRequiredAction, DataRetrievalAction}
import javax.inject.Inject
import play.api.i18n.I18nSupport
import play.api.mvc.{Action, AnyContent, MessagesControllerComponents}
import uk.gov.hmrc.play.bootstrap.frontend.controller.FrontendController
import uk.gov.hmrc.play.partials.FormPartialRetriever
import views.html.sessionTimedout
import scala.concurrent.{ExecutionContext, Future}
class SessionManagementController @Inject()(val appConfig: FrontendAppConfig,
sessionTimedout: sessionTimedout,
cc: MessagesControllerComponents,
authenticate: AuthAction,
dataCacheConnector: DataCacheConnector,
getData: DataRetrievalAction,
requireData: DataRequiredAction,
implicit val executionContext: ExecutionContext) extends FrontendController(cc) with I18nSupport{
def extendSession: Action[AnyContent] = Action.async {
Future.successful(Ok("OK"))
}
def signedOut: Action[AnyContent] = Action {
implicit request =>
Ok(sessionTimedout(appConfig)).withNewSession
}
def clearSessionData: Action[AnyContent] = authenticate.async {
implicit request =>
dataCacheConnector.removeAll(request.externalId).map { cacheMap =>
Redirect(routes.SessionManagementController.signedOut)
}
}
}
|
<reponame>Tiltification/sonic-tilt
//
// DispatcherSampleAppDelegate.h
// DispatcherSample
//
// Copyright (c) 2011 <NAME> (<EMAIL>)
//
// For information on usage and redistribution, and for a DISCLAIMER OF ALL
// WARRANTIES, see the file, "LICENSE.txt," in this distribution.
//
#import <UIKit/UIKit.h>
#import "PdAudioController.h"
#import "PdDispatcher.h"
@class DispatcherSampleViewController;
@interface DispatcherSampleAppDelegate : NSObject <UIApplicationDelegate> {
UIWindow *window;
DispatcherSampleViewController *viewController;
PdAudioController *audioController;
PdDispatcher *dispatcher;
}
@property (nonatomic, retain) IBOutlet UIWindow *window;
@property (nonatomic, retain) IBOutlet DispatcherSampleViewController *viewController;
@end
|
import React, {Component} from 'react';
import ListView from "./src/Components/ListView";
export default class App extends Component<Props> {
render() {
return (
<ListView/>
);
}
}
|
<gh_stars>0
package main
import (
"flag"
"fmt"
"io/ioutil"
"os"
"code.google.com/p/mahonia"
)
var (
infile = "in.txt"
outfile = "out.txt"
src = "utf8"
dst = "tcvn3"
)
func main() {
flag.StringVar(&infile, "file", infile, "file")
flag.StringVar(&src, "src", src, "src")
flag.StringVar(&dst, "dst", dst, "dst")
flag.StringVar(&outfile, "out", outfile, "out")
flag.Parse()
decoder := mahonia.NewDecoder(src)
if decoder == nil {
fmt.Println("unknow src code ", src)
return
}
encoder := mahonia.NewEncoder(dst)
if encoder == nil {
fmt.Println("unknow src code ", dst)
return
}
f, err := os.Open(infile)
if err != nil {
fmt.Println(err)
return
}
defer f.Close()
bytes, err := ioutil.ReadAll(f)
if err != nil {
fmt.Println(err)
return
}
_, bytes2, err := decoder.Translate(bytes, false)
if err != nil {
fmt.Println(err)
return
}
strout := encoder.ConvertString(string(bytes2))
f2, err := os.OpenFile(outfile, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, os.ModePerm)
if err != nil {
fmt.Println(err)
return
}
defer f2.Close()
f2.Write([]byte(strout))
}
|
#Just to know the last time this was executed
import time
print time.ctime()
import h5py
from bisect import bisect_left
import matplotlib.pyplot as plt
from brianPlotter import *
from gupta_paper_further_formulas_brianversion19 import *
# *** ***
# Be sure to set correct brian main program import file
# *** ***
bp = brianPlotter('simulation.hdf5')
gupta_paper = gupta_paper()
# spikeVoltage and testSpikeVoltage used for reformatting to avoid formatting issue with plotting
spikeVoltage = []
for timeIndex in range(len(gupta_paper.M.i[:])):
spikeVoltage.append(gupta_paper.M.i[timeIndex].tolist())
spikes = zip(spikeVoltage, gupta_paper.M.t)
#testSpikes = zip(testSpikeVoltage, gupta_paper.testM.t)
bp.saveData('spikes', np.asarray(spikes), downsample=False);
#bp.saveData('testSpikes', np.asarray(testSpikes), downsample=False);
voltage = np.asarray(gupta_paper.UmM.v2.T/mV)
#testVoltage = np.asarray(gupta_paper.testUmM3.v2.T/mV)
#print 'voltage\t',voltage
bp.saveData('voltage',voltage)
#bp.saveData('testVoltage',testVoltage)
for weightIndex in range(dictionaryLongitude):
bp.saveData('weights'+str(weightIndex),np.asarray(gupta_paper.weightMonitors[weightIndex].w.T/volt),downsample=False)
'''weights0 = np.asarray(gupta_paper.WM0.w.T/volt)
weights1 = np.asarray(gupta_paper.WM0.w.T/volt)
weights2 = np.asarray(gupta_paper.WM0.w.T/volt)
weights3 = np.asarray(gupta_paper.WM0.w.T/volt)
bp.saveData('weights0',weights0,downsample=False)
bp.saveData('weights1',weights1,downsample=False)
bp.saveData('weights2',weights2,downsample=False)
bp.saveData('weights3',weights3,downsample=False)'''
del bp #you have to delete the object so the file is closed
print time.ctime() |
#!/bin/sh
# Disable source following.
# shellcheck disable=SC1090,SC1091
# Disable optional arguments.
# shellcheck disable=SC2120
TEST_SCRIPT="$0"
TEST_DIR="$(dirname -- "$TEST_SCRIPT")"
. "$TEST_DIR/test_helpers"
oneTimeSetUp() {
th_oneTimeSetUp || return 1
TEST_ENVS_B_SETUP_FILE="$TEST_ENVS_VENV/B/$PYVENV_ACTIVATE_VENV_PATH_FILE_NAME"
}
th_register_test test_venv_setup_func
test_venv_setup_func() {
rm -f -- "$TEST_ENVS_B_SETUP_FILE"
# Create setup file in env B with absolute paths
pyvenv_setup_venv_file_path "$TEST_ENVS_VENV/B/.pyvenv_venv" "$TEST_ENVS_VENV/B" || \
fail "unable to create setup file in env B with absolute paths"
test -r "$TEST_ENVS_B_SETUP_FILE" || \
fail "setup file in env B was not created"
rm -f -- "$TEST_ENVS_B_SETUP_FILE"
# Create setup file in env B with relative paths
cd -- "$TEST_ENVS_VENV/B" || fail "cd to env B"
pyvenv_setup_venv_file_path ".pyvenv_venv" "." || \
fail "unable to create setup file in env B with relative paths"
test -r "$TEST_ENVS_B_SETUP_FILE" || \
fail "setup file in env B was not created"
rm -f -- "$TEST_ENVS_B_SETUP_FILE"
# Unable to setup file in env B with default paths without activated
# virtual environment
pyvenv_setup_venv_file_path 2>/dev/null && \
fail "should not be able to create setup file in env B with default
paths without activated virtual environment"
test -r "$TEST_ENVS_B_SETUP_FILE" && \
fail "setup file in env B should not have been created"
# Create setup file in env B with default paths
th_activate_venv "$TEST_ENVS_VENV/B" || fail "activate env B"
pyvenv_setup_venv_file_path || \
fail "unable to create setup file in env B with default paths"
test -r "$TEST_ENVS_B_SETUP_FILE" || \
fail "setup file in env B was not created"
deactivate nondestructive
unset -f deactivate
rm -f -- "$TEST_ENVS_B_SETUP_FILE"
}
th_register_test test_venv_setup_file_activate_perms
test_venv_setup_file_activate_perms() {
# Create setup file in env B
cd -- "$TEST_ENVS_VENV/B" || fail "cd to env B"
th_pyvenv_setup_venv_file_path || fail "setup in env B"
# Set wrong perms to setup file
chmod 644 "$TEST_ENVS_B_SETUP_FILE" || \
fail "set perms of setup file in env B"
# pyvenv_activate should fail with setup file with wrong permission
pyvenv_activate 2>/dev/null && \
fail "pyvenv_activate in env B with wrong perms on setup file should fail"
# Reset valid perms to setup file
chmod 400 "$TEST_ENVS_B_SETUP_FILE" || \
fail "set perms of setup file in env B"
# pyvenv_activate should be ok
pyvenv_activate || fail "pyvenv_activate in env B"
pyvenv_deactivate || fail "pyvenv_deactivate env B"
}
th_register_test test_venv_setup_file_activate_abs_path
test_venv_setup_file_activate_abs_path() {
cd -- "$TEST_ENVS_VENV/B" || fail "cd to env B"
# Set relative path in setup file
rm -f -- "$TEST_ENVS_B_SETUP_FILE"
echo ".pyvenv_venv" > "$TEST_ENVS_B_SETUP_FILE"
chmod 400 "$TEST_ENVS_B_SETUP_FILE" || \
fail "set perms of setup file in env B"
# pyvenv_activate should fail with setup file with relative path
pyvenv_activate 2>/dev/null && \
fail "pyvenv_activate in env B with relative path in setup file should fail"
# Set absolute path in setup file
rm -f -- "$TEST_ENVS_B_SETUP_FILE"
echo "$PWD/.pyvenv_venv" > "$TEST_ENVS_B_SETUP_FILE"
chmod 400 "$TEST_ENVS_B_SETUP_FILE" || \
fail "set perms of setup file in env B"
# pyvenv_activate should be ok
pyvenv_activate || fail "pyvenv_activate in env B"
pyvenv_deactivate || fail "pyvenv_deactivate env B"
}
th_register_test test_venv_setup_in_virtual_env
test_venv_setup_in_virtual_env() {
rm -f -- "$TEST_ENVS_B_SETUP_FILE"
assertEquals "check host env" "$HOST_PYTHON_PATH" "$(th_get_python_path)"
# Change directory to env B and manually activate
cd -- "$TEST_ENVS_VENV/B" || fail "cd to env B"
th_activate_venv || fail "activate env B"
# Check env B python path
env_b_python_path="$(th_get_python_path)"
assertNotEquals "python path not equals to host in env B"\
"$HOST_PYTHON_PATH" "$env_b_python_path"
# pyvenv_activate should fail without setup file
pyvenv_activate 2>/dev/null && \
fail "pyvenv_activate in env B without setup file"
# Setup pyvenv with activated virtual env in env B
pyvenv_setup_venv_file_path || fail "pyvenv_setup in env B with activated virtual env"
# pyvenv_activate should be ok and no op
pyvenv_activate || fail "pyvenv_activate in env B"
assertEquals "python path should not be changed in env B after pyvenv_activate"\
"$env_b_python_path" "$(th_get_python_path)"
# pyvenv_deactivate should disable the virtual env
pyvenv_deactivate || fail "pyvenv_deactivate env B"
assertEquals "python path should equals to host after pyvenv_deactivate"\
"$HOST_PYTHON_PATH" "$(th_get_python_path)"
# pyvenv_activate should be ok
pyvenv_activate || fail "pyvenv_activate in env B"
assertEquals "python path should be equals to env B after pyvenv_activate"\
"$env_b_python_path" "$(th_get_python_path)"
pyvenv_deactivate || fail "pyvenv_deactivate env B"
}
th_register_auto_activate_tests test_venv_setup_auto_activate
test_venv_setup_auto_activate() {
enable_cmd="$1"
disable_cmd="$2"
cd_cmd="$3"
rm -f -- "$TEST_ENVS_B_SETUP_FILE"
$enable_cmd || fail "enable auto activate"
assertEquals "check host env" "$HOST_PYTHON_PATH" "$(th_get_python_path)"
# Change directory to env B
$cd_cmd -- "$TEST_ENVS_VENV/B" || fail "cd to env B"
# Virtual environment should not be activated
assertEquals "python path should equals to host without setup file"\
"$HOST_PYTHON_PATH" "$(th_get_python_path)"
# Manual activate virtual environment
th_activate_venv || fail "activate env B"
env_b_python_path="$(th_get_python_path)"
assertNotEquals "python path not equals to host in env B"\
"$HOST_PYTHON_PATH" "$env_b_python_path"
# Setup pyvenv with activated virtual env in env B
pyvenv_setup_venv_file_path || fail "pyvenv_setup in env B with activated virtual env"
assertEquals "python path not equals to env B"\
"$env_b_python_path" "$(th_get_python_path)"
# Change directory to envs tmpdir and check python path
$cd_cmd -- "$TEST_ENVS_VENV" || fail "cd to envs tmpdir"
assertEquals "python path equals to host in envs tmpdir after cd"\
"$HOST_PYTHON_PATH" "$(th_get_python_path)"
# Go back to env B and check python path
$cd_cmd -- "$TEST_ENVS_VENV/B" || fail "cd to env B"
assertEquals "python path not equals to env B"\
"$env_b_python_path" "$(th_get_python_path)"
$cd_cmd -- "$TEST_ENVS_VENV" || fail "cd to envs tmpdir"
$disable_cmd || fail "disable auto activate"
}
. "$TEST_DIR/shunit2/shunit2"
|
import random
# Generates a random integer between 0 and 1000
x = random.randint(0,1000)
print(x) |
#!/bin/sh
set -e # -e: exit on error
if [ ! "$(command -v chezmoi)" ]; then
bin_dir="$HOME/.local/bin"
chezmoi="$bin_dir/chezmoi"
if [ "$(command -v curl)" ]; then
sh -c "$(curl -fsLS https://git.io/chezmoi)" -- -b "$bin_dir"
elif [ "$(command -v wget)" ]; then
sh -c "$(wget -qO- https://git.io/chezmoi)" -- -b "$bin_dir"
else
echo "To install chezmoi, you must have curl or wget installed." >&2
exit 1
fi
else
chezmoi=chezmoi
fi
# POSIX way to get script's dir: https://stackoverflow.com/a/29834779/12156188
script_dir="$(cd -P -- "$(dirname -- "$(command -v -- "$0")")" && pwd -P)"
# exec: replace current process with chezmoi init
exec "$chezmoi" init --apply "--source=$script_dir"
|
import { ref } from "vue";
import { Octokit } from "@octokit/rest";
const octokit = new Octokit({
auth: process.env.OCTOKIT_API_KEY,
userAgent: "brampijper",
Accept: "application/vnd.github.16.28.4.raw",
});
export default async function useGithubRepositories() {
async function fetchRepo() {
let repositories = [];
loading.value = true;
try {
const res = await octokit.repos.listForOrg({
org: "brampijper-gh-pages",
});
if (!res) {
console.log("no response received", res);
}
res.data.filter((repo) => {
if (repo.homepage) {
repositories.push(repo);
}
});
loading.value = false;
return repositories;
} catch (err) {
throw new Error(err);
}
}
let loading = ref(null);
let repositories = ref(await fetchRepo());
return {
loading,
repositories,
};
}
|
#!/bin/bash -ex
if ! docker ps | grep mysql_autobet -q; then
source bin/devenv_start.sh
sleep 20
fi
java -jar target/autobet-0.1-SNAPSHOT-executable.jar stats -t PT10M
java -jar target/autobet-0.1-SNAPSHOT-executable.jar eval -t PT1M -s random
java -jar target/autobet-0.1-SNAPSHOT-executable.jar eval -t PT1M -s low_bet
java -jar target/autobet-0.1-SNAPSHOT-executable.jar eval -t PT1M -s chances_based_on_goal_based_rater
|
<gh_stars>1-10
#include <stdio.h>
int print(int n)
{
return printf("%d\n", n);
}
int read()
{
int n;
scanf("%d", &n);
return n;
}
|
import assert from 'assert'
import { fetchSongs, fetchSong } from '../scraper.js'
import { describe, it } from 'mocha'
import fs from 'fs/promises'
import path from 'path'
function assertSong (t, file, song) {
t.timeout(20000)
return fs.readFile(path.resolve('sources', 'wneen.com', 'test', file)).then(data => {
const expectedSong = JSON.parse(data)
return fetchSong(song).then(actualSong => {
assert.deepStrictEqual(actualSong, expectedSong)
})
})
}
describe('scraper', function () {
describe('singers', function () {
this.timeout(20000)
it('should return ابراهيم الحكمي 47 songs', function () {
return fetchSongs({
artist: 'ابراهيم الحكمي',
url: 'https://www.wneen.com/singer/128'
}).then(songs => {
assert.equal(songs.length, 47)
})
})
it('should return محمد عبده 462 songs', function () {
return fetchSongs({
artist: 'محمد عبده',
url: 'https://www.wneen.com/singer/1'
}).then(songs => {
assert.equal(songs.length, 462)
})
})
})
describe('songs', function () {
it('11121', function () {
const song = {
name: 'ابتسم',
url: 'https://www.wneen.com/lyrics/11121'
}
return assertSong(this, '11121.json', song)
})
})
})
|
package com.modesteam.urutau.service.persistence;
public enum OrderEnum {
ASC, DESC
}
|
The most efficient way to identify all the palindrome numbers between 1 to 10000 is to use the following logic: Starting from 1 to 10000, convert each number to a string and check if the reverse of the string is equal to the original string. If so, then the number is a palindrome number. |
#!/bin/bash
#
# Copyright (c) 2015-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
#
set -ex
function define_xc_macros() {
XC_MACROS="CODE_SIGN_IDENTITY=\"\" CODE_SIGNING_REQUIRED=NO"
case "$TARGET" in
"lib" ) XC_TARGET="WebDriverAgentLib";;
"runner" ) XC_TARGET="WebDriverAgentRunner";;
"tv_lib" ) XC_TARGET="WebDriverAgentLib_tvOS";;
"tv_runner" ) XC_TARGET="WebDriverAgentRunner_tvOS";;
*) echo "Unknown TARGET"; exit 1 ;;
esac
case "${DEST:-}" in
"iphone" ) XC_DESTINATION="name=`echo $IPHONE_MODEL | tr -d "'"`,OS=$IOS_VERSION";;
"ipad" ) XC_DESTINATION="name=`echo $IPAD_MODEL | tr -d "'"`,OS=$IOS_VERSION";;
"tv" ) XC_DESTINATION="name=`echo $TV_MODEL | tr -d "'"`,OS=$TV_VERSION";;
"generic" ) XC_DESTINATION="generic/platform=iOS";;
"tv_generic" ) XC_DESTINATION="generic/platform=tvOS" XC_MACROS="${XC_MACROS} ARCHS=arm64";; # tvOS only supports arm64
esac
case "$ACTION" in
"build" ) XC_ACTION="build";;
"analyze" )
XC_ACTION="analyze"
XC_MACROS="${XC_MACROS} CLANG_ANALYZER_OUTPUT=plist-html CLANG_ANALYZER_OUTPUT_DIR=\"$(pwd)/clang\""
;;
"unit_test" ) XC_ACTION="test -only-testing:UnitTests";;
"tv_unit_test" ) XC_ACTION="test -only-testing:UnitTests_tvOS";;
esac
case "$SDK" in
"sim" ) XC_SDK="iphonesimulator";;
"device" ) XC_SDK="iphoneos";;
"tv_sim" ) XC_SDK="appletvsimulator";;
"tv_device" ) XC_SDK="appletvos";;
*) echo "Unknown SDK"; exit 1 ;;
esac
case "${CODE_SIGN:-}" in
"no" ) XC_MACROS="${XC_MACROS} CODE_SIGNING_ALLOWED=NO";;
esac
}
function analyze() {
xcbuild
if [[ -z $(find clang -name "*.html") ]]; then
echo "Static Analyzer found no issues"
else
echo "Static Analyzer found some issues"
exit 1
fi
}
function xcbuild() {
destination=""
output_command=cat
if [ $(which xcpretty) ] ; then
output_command=xcpretty
fi
if [[ -n "$XC_DESTINATION" ]]; then
xcodebuild \
-project "WebDriverAgent.xcodeproj" \
-scheme "$XC_TARGET" \
-sdk "$XC_SDK" \
-destination "$XC_DESTINATION" \
$XC_ACTION \
$XC_MACROS \
| $output_command && exit ${PIPESTATUS[0]}
else
xcodebuild \
-project "WebDriverAgent.xcodeproj" \
-scheme "$XC_TARGET" \
-sdk "$XC_SDK" \
$XC_ACTION \
$XC_MACROS \
| $output_command && exit ${PIPESTATUS[0]}
fi
}
function fastlane_test() {
if [[ -n "$XC_DESTINATION" ]]; then
SDK="$XC_SDK" DEST="$XC_DESTINATION" SCHEME="$1" bundle exec fastlane test
else
SDK="$XC_SDK" SCHEME="$1" bundle exec fastlane test
fi
}
./Scripts/bootstrap.sh -dn
define_xc_macros
case "$ACTION" in
"analyze" ) analyze ;;
"int_test_1" ) fastlane_test IntegrationTests_1 ;;
"int_test_2" ) fastlane_test IntegrationTests_2 ;;
"int_test_3" ) fastlane_test IntegrationTests_3 ;;
*) xcbuild ;;
esac
|
package m.co.rh.id.anavigator.example.dialog;
import android.app.Activity;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import m.co.rh.id.anavigator.StatefulView;
import m.co.rh.id.anavigator.annotation.NavInject;
import m.co.rh.id.anavigator.component.INavigator;
import m.co.rh.id.anavigator.example.R;
public class Full1Page extends StatefulView<Activity> {
@NavInject
private transient INavigator mNavigator;
private int mCount;
@Override
protected View createView(Activity activity, ViewGroup container) {
View view = activity.getLayoutInflater().inflate(R.layout.page_dialog_full_1, container, false);
TextView textViewCount = view.findViewById(R.id.text_view_count);
textViewCount.setText("Current count: " + mCount);
Button buttonCount = view.findViewById(R.id.button_count);
buttonCount.setOnClickListener(v -> {
mCount++;
textViewCount.setText("Current count: " + mCount);
});
Button buttonShowDialog2 = view.findViewById(R.id.button_show_dialog_2);
buttonShowDialog2.setOnClickListener(v ->
mNavigator.push((args, activity1) -> new Dialog2Page()));
Toast.makeText(activity, "Dialog Full page 1 createView", Toast.LENGTH_LONG).show();
return view;
}
}
|
var zIndexes = {'auto':'auto'};
for (let i = 0; i < 100; i++) {
zIndexes[i] = i;
}
module.exports = {
purge: [
'./resources/**/*.blade.php',
'./resources/**/*.js',
'./resources/**/*.vue',
],
darkMode: false, // or 'media' or 'class'
theme: {
extend: {},
zIndex: zIndexes
},
variants: {
extend: {},
},
plugins: [],
}
|
function calculateTotalCost(costs) {
totalCost = 0;
for (let i in costs) {
totalCost += costs[i];
}
return totalCost;
} |
<reponame>chnghia/gatsby_fresh_starter
import React from 'react'
const Footer = () => (
<footer className="footer footer-dark">
<div className="container">
<div className="columns">
<div className="column">
<div className="footer-logo">
<img src="assets/images/logos/fresh-white-alt.svg" />
</div>
</div>
<div className="column">
<div className="footer-column">
<div className="footer-header">
<h3>Product</h3>
</div>
<ul className="link-list">
<li>
<a href="#">Discover features</a>
</li>
<li>
<a href="#">Why choose our Product ?</a>
</li>
<li>
<a href="#">Compare features</a>
</li>
<li>
<a href="#">Our Roadmap</a>
</li>
<li>
<a href="#">Request features</a>
</li>
</ul>
</div>
</div>
<div className="column">
<div className="footer-column">
<div className="footer-header">
<h3>Docs</h3>
</div>
<ul className="link-list">
<li>
<a href="#">Get Started</a>
</li>
<li>
<a href="#">User guides</a>
</li>
<li>
<a href="#">Admin guide</a>
</li>
<li>
<a href="#">Developers</a>
</li>
</ul>
</div>
</div>
<div className="column">
<div className="footer-column">
<div className="footer-header">
<h3>Blogroll</h3>
</div>
<ul className="link-list">
<li>
<a href="#">Latest News</a>
</li>
<li>
<a href="#">Tech articles</a>
</li>
<li>
<a href="#">Video Blog</a>
</li>
</ul>
</div>
</div>
<div className="column">
<div className="footer-column">
<div className="footer-header">
<h3>Follow Us</h3>
<nav className="level is-mobile">
<div className="level-left">
<a className="level-item" href="https://github.com/#">
<span className="icon">
<i className="fa fa-github" />
</span>
</a>
<a className="level-item" href="https://dribbble.com/#">
<span className="icon">
<i className="fa fa-dribbble" />
</span>
</a>
<a className="level-item" href="https://fb.com/#">
<span className="icon">
<i className="fa fa-facebook" />
</span>
</a>
<a className="level-item" href="https://twitter.com/#">
<span className="icon">
<i className="fa fa-twitter" />
</span>
</a>
<a className="level-item" href="https://bitbucket.org/#">
<span className="icon">
<i className="fa fa-bitbucket" />
</span>
</a>
</div>
</nav>
<a href="https://bulma.io" target="_blank">
<img
src="assets/images/logos/made-with-bulma.png"
alt="Made with Bulma"
width="128"
height="24"
/>
</a>
</div>
</div>
</div>
</div>
</div>
</footer>
)
export default Footer
|
import React from 'react';
import { StyleSheet, Text, View, TextStyle } from 'react-native';
import { Button, Loading, primary } from '../../../../../packages/client/src/modules/common/components/native';
import { TranslateFunction } from '@gqlapp/i18n-client-react';
interface ViewProps {
t: TranslateFunction;
children: any;
counter: any;
loading: boolean;
}
const styles = StyleSheet.create({
element: {
paddingTop: 30
},
box: {
textAlign: 'center',
marginBottom: 5
}
});
export const ServerCounterView = ({ t, children, counter, loading }: ViewProps) => {
if (loading) {
return <Loading text={t('loading')} />;
} else {
return (
<View>
<View style={styles.element}>
<Text style={styles.box as TextStyle}>{t('text', { amount: counter.amount })}</Text>
</View>
{children}
</View>
);
}
};
interface ButtonProps {
onClick: () => any;
text: string;
}
export const ServerCounterButton = ({ onClick, text }: ButtonProps) => (
<Button type={primary} onPress={onClick}>
{text}
</Button>
);
|
import { initialize } from ".";
initialize();
|
jarsigner -verbose -sigalg SHA1withRSA -digestalg SHA1 -keystore /home/nico/d/d/wya/nr-commons.keystore commons/target/commons-1.0-SNAPSHOT.apk nrkeystorealias
zipalign -f -v 4 commons/target/commons-1.0-SNAPSHOT.apk commons/target/commons-1.0-SNAPSHOT_signed.apk
|
<gh_stars>0
import React, { useEffect, useState } from 'react'
import useStyles from "./Styles"
import {InputLabel ,Button ,FormControl , Input} from "@material-ui/core"
import Message from '../Message/Message'
import firebase from "firebase"
import {Link} from "react-router-dom"
function ChatHome({db}) {
const classes = useStyles()
const [input,setInput] = useState("")
const [messages,setMessages] = useState([{username:"",text:""}])
const [username,setUsername] = useState("")
useEffect(() =>
{
setUsername(prompt("Please Enter you username!!"))
},[])
useEffect(() =>
{
db.collection("messages")
.orderBy("timestamp")
.onSnapshot(snapshot =>
{
setMessages(snapshot.docs.map(doc => doc.data()))
})
},[])
const sendMessage = (e) =>
{
db.collection("messages").add({
text:input,
username:username,
timestamp:firebase.firestore.FieldValue.serverTimestamp()
})
setInput("")
}
return (
<div className={classes.root}>
<div style={{display:"flex"}}>
<h1 className={classes.title}>Welcome {username}</h1>
<Link to="/dashboard">
<Button className={classes.btn} variant="outlined">Leave Room</Button>
</Link>
</div>
<div className={classes.box}>
<div className={classes.messages}>
{
messages.map(message => (
<Message username={username} message={message}/>
))
}
</div>
<center>
<form className={classes.form}>
<FormControl className={classes.form}>
<InputLabel>Enter a message</InputLabel>
<Input value={input} onChange={(e) => setInput(e.target.value)} style={{width:"30rem"}}/>
<Button disabled={!input} type="submit" onClick={sendMessage} variant="outlined" color="#6b705c">Send Message</Button>
</FormControl>
</form>
</center>
</div>
</div>
)
}
export default ChatHome
|
#!/bin/bash
# run with sudo
# this file connects to wifi and handles the access point service
ROOT_DIR='/usr/local/mm-config'
SCRIPTS="$ROOT_DIR/bin"
# check if it has the right amount of commands
if [ $# != 1 ]; then
echo "usage: sudo bash $0 <wifi-name>"
exit 1
fi
# set the name of the wifi
WIFI_NAME=$1
# read in the password
read -sp 'Password: ' PASSWORD
echo ''
# define all the access point handler scripts needed
APH_STATUS_SH="$SCRIPTS/access-point-handler/access-point-handler-status.sh"
APH_RUN_SH="$SCRIPTS/access-point-handler/run-access-point-handler.sh"
APH_STOP_SH="$SCRIPTS/access-point-handler/stop-access-point-handler.sh"
# define all the access point config scripts needed
AP_STATUS_SH="$SCRIPTS/access-point/access-point-service-status.sh"
AP_RUN_SH="$SCRIPTS/access-point/run-access-point-service.sh"
AP_STOP_SH="$SCRIPTS/access-point/stop-access-point-service.sh"
# define all the wifi scripts needed
WIFI_STATUS_SH="$SCRIPTS/wifi/wifi-status.sh"
WIFI_CONNECT_SH="$SCRIPTS/wifi/connect-to-wifi.sh"
# if the access point handler is active stop it
if [ $(bash $APH_STATUS_SH) == "active" ]; then
bash $APH_STOP_SH
# echo 'stopped access-point-handler.service'
fi
# stop access point service
bash $AP_STOP_SH
echo 'stopped access point'
# attempt to connect to wifi
echo 'attempting to connect to wifi'
bash $WIFI_CONNECT_SH $WIFI_NAME <<< $PASSWORD
# if not connected to wifi start up access point
if [ $(bash $WIFI_STATUS_SH) == "disconnected" ]; then
bash $AP_RUN_SH
echo 'warning: un able to connect to wifi'
echo ' started up access point'
fi
# start up the access point handler service
bash $APH_RUN_SH
# echo 'started the access-point-handler service'
|
#!/usr/bin/env bash
#!/bin/bash
g++ -std=gnu++1y -O2 -Wall apps/imdb/hash-server.c++ apps/imdb/hashprotocol.capnp.cc \
-lcapnpc -lcapnp-rpc -lcapnp -lkj-async -lkj -o hash-server
g++ -std=gnu++1y -O2 -Wall -D_GLIBCXX_USE_CXX11_ABI=0 apps/imdb/hash-client.c++ apps/imdb/hashprotocol.capnp.cc \
-lcapnpc -lcapnp-rpc -lcapnp -lkj-async -lkj -o hash-client
|
def classify(height, weight, age):
# Load the model
model = Model()
# Get data to use as input
data = [height, weight, age]
# Use the model to predict the class
pred = model.predict(data)
return pred |
#!/bin/bash
set -e
echo "---- cleanup"
echo CentOS Provision Cleanup
sudo yum clean all
sudo rm -rf /var/lib/yum
sudo rm -rf /var/cache/yum
sudo rm -rf /tmp/*
|
package dao;
import conexao.Conexao;
import java.io.Serializable;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.logging.Level;
import java.util.logging.Logger;
import sessao.SessionUtil;
public class SalarioDao implements Serializable
{
private CallableStatement cs;
private String sql;
private Connection con;
private ResultSet rs;
private PreparedStatement ps;
private String resultado;
public void registrarSalario( String nif,String valor)
{
sql="{call PRC_REG_SALARIO(?,?,?,?)}";
Conexao conexao = new Conexao();
if(conexao.getCon()!=null)
{
try
{
cs = conexao.getCon().prepareCall(sql);
cs.setString(1, SessionUtil.getUserlogado().getNif());
cs.setString(2, nif);
cs.setDouble(3, Double.valueOf(valor));
cs.setInt(4, SessionUtil.getUserlogado().getIdAgencia());
cs.execute();
conexao.desCon();
}
catch (SQLException ex)
{
Logger.getLogger(SalarioDao.class.getName()).log(Level.SEVERE, null, ex);
System.out.println("Erro a registrar salário "+ex.getMessage());
}
}
}
}
|
package org.apache.tapestry5.integration.app1.pages;
import org.apache.tapestry5.ComponentAction;
import org.apache.tapestry5.annotations.Environmental;
import org.apache.tapestry5.annotations.Persist;
import org.apache.tapestry5.annotations.Property;
import org.apache.tapestry5.services.FormSupport;
import java.util.ArrayList;
import java.util.List;
public class FormCancelActionDemo
{
@Property
@Persist
private List<String> messages;
static class AddMessage implements ComponentAction<FormCancelActionDemo>
{
public void execute(FormCancelActionDemo component)
{
component.addMessage("action trigger");
}
}
private void addMessage(String s)
{
if (messages == null)
{
messages = new ArrayList<String>();
}
messages.add(s);
}
@Environmental
private FormSupport formSupport;
void onBeginRenderFromForm()
{
formSupport.storeCancel(this, new AddMessage());
}
void onCanceledFromForm()
{
addMessage("cancel event");
}
}
|
<reponame>JsonMa/egg-city
'use strict';
// const request = require('supertest');
const mm = require('egg-mock');
const assert = require('assert');
describe('test api through input type', () => {
let app;
before(() => {
app = mm.app({
baseDir: 'apps/city-test',
});
return app.ready();
});
after(() => app.close());
afterEach(mm.restore);
it('should return city info through code', async () => {
const ctx = app.mockContext();
const cityInfo = await ctx.service.city.getCity('360000');
assert(cityInfo[0].code === '360000');
});
it('should return city info through name', async () => {
const ctx = app.mockContext();
const cityInfo = await ctx.service.city.getCity('宿迁市');
assert(cityInfo[0].name === '宿迁市');
});
describe('test api through city type', () => {
it('should return province info', async () => {
const ctx = app.mockContext();
const province = await ctx.service.city.getCity('320000');
assert(province[0].code === '320000');
});
it('should return city info', async () => {
const ctx = app.mockContext();
const city = await ctx.service.city.getCity('139000');
assert(city[0].code === '139000');
});
it('should return distinct info', async () => {
const ctx = app.mockContext();
const distinct = await ctx.service.city.getCity('410922');
assert(distinct[0].code === '410922');
});
});
});
|
from pioneer.das.api.samples.annotations import *
from pioneer.das.api.samples.echo import Echo
from pioneer.das.api.samples.echo_xyzit import EchoXYZIT
from pioneer.das.api.samples.fast_trace import FastTrace
from pioneer.das.api.samples.image import Image
from pioneer.das.api.samples.image_cylinder import ImageCylinder
from pioneer.das.api.samples.image_fisheye import ImageFisheye
from pioneer.das.api.samples.rpm import RPM
from pioneer.das.api.samples.sample import Sample
from pioneer.das.api.samples.trace import Trace
from pioneer.das.api.samples.xyzit import XYZIT
from pioneer.das.api.samples.xyzvcfar import XYZVCFAR
from pioneer.das.api.samples.xyzvi import XYZVI |
kubectl delete pods healthy-monolith monolith secure-monolith
kubectl delete services monolith auth frontend hello
kubectl delete deployments auth frontend hello
kubectl delete secrets tls-certs
kubectl delete configmaps nginx-frontend-conf nginx-proxy-conf
|
#*******************************************************************************
# scripting.py
#
# <NAME> <<EMAIL>>
# 2012-07-19
#
# Blender addon development template.
#
#*******************************************************************************
bl_info = {
'name' : 'Addon Template',
'author' : '<NAME>',
'version' : ( 0, 0 ),
'blender' : ( 2, 6, 3 ),
'location' : 'Add > Mesh',
'description' : 'Creates a mesh, and adds it to the scene as an object.',
'warning' : '',
'wiki_url' : '',
'tracker_url' : '',
'category' : 'Add Mesh'
}
#-------------------------------------------------------------------------------
# IMPORTS
#-------------------------------------------------------------------------------
import bmesh
import bpy
from bpy.props import ( BoolProperty,
FloatProperty,
FloatVectorProperty,
StringProperty )
from bpy_extras import object_utils
#-------------------------------------------------------------------------------
# CLASSES
#-------------------------------------------------------------------------------
class PropertyPanel( bpy.types.Panel ):
""" Adjust properties at the scene level """
# Blender panel properties
bl_label = "Example Property Panel"
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
bl_context = "scene"
def draw( self, context ):
""" Draw the panel """
# Set shortcuts to the layout and scene.
t = self.layout
s = context.scene
# Add property controls to the panel.
for key in s.userProperties.keys():
t.prop( s, key )
# Add a button to use the operator.
t.operator( "example.add" )
class AddMeshOperator( bpy.types.Operator ):
""" Create a mesh as an object in the scene """
# Blender operator properties
bl_idname = "example.add"
bl_label = "Add Example Mesh"
# Properties used for operator transforms.
view_align = BoolProperty( name = "Align to View",
default = False )
location = FloatVectorProperty( name = "Location",
subtype = "TRANSLATION" )
rotation = FloatVectorProperty( name = "Rotation",
subtype = "EULER" )
def addMesh( self, context ):
""" Add the example mesh """
# Set a shortcut to the scene.
s = context.scene
# Create the mesh data.
bm = self.createMesh( s.propX, s.propY, s.propZ )
# Create the mesh instance.
mesh = bpy.data.meshes.new( "Example Mesh" )
# Import mesh data into mesh instance.
bm.to_mesh( mesh )
mesh.update()
# Add the mesh to the scene as an object.
self.addObject( context, mesh )
def addObject( self, context, mesh ):
""" Add a mesh to the scene as an object """
# Use the object utilities method to add the object.
object_utils.object_data_add( context, mesh, operator = self )
def createMesh( self, w, h, d ):
""" Separate mesh generation from operator logic """
# Create a blender mesh.
b = bmesh.new()
# Add the bottom vertices, and the face.
b.verts.new( ( 0, 0, 0 ) )
b.verts.new( ( w, 0, 0 ) )
b.verts.new( ( w, d, 0 ) )
b.verts.new( ( 0, d, 0 ) )
b.faces.new( [ b.verts[ i ] for i in range( 0, 4 ) ] )
# Add the top vertices, and the face.
b.verts.new( ( 0, 0, h ) )
b.verts.new( ( w, 0, h ) )
b.verts.new( ( w, d, h ) )
b.verts.new( ( 0, d, h ) )
b.faces.new( [ b.verts[ i ] for i in range( 4, 8 ) ] )
# Add the side faces.
b.faces.new( [ b.verts[ i ] for i in [ 0, 1, 5, 4 ] ] )
b.faces.new( [ b.verts[ i ] for i in [ 1, 2, 6, 5 ] ] )
b.faces.new( [ b.verts[ i ] for i in [ 2, 3, 7, 6 ] ] )
b.faces.new( [ b.verts[ i ] for i in [ 3, 0, 4, 7 ] ] )
# Return the constructed mesh.
return b
def execute( self, context ):
""" Run the operator """
# Set a shortcut to the scene.
s = context.scene
# Add the mesh.
self.addMesh( context )
# Inform the caller the operator finished successfully.
return { 'FINISHED' }
#-------------------------------------------------------------------------------
# FUNCTIONS
#-------------------------------------------------------------------------------
def main()
""" Script development entry point """
# Unregister the previous version of the addon.
unregister()
# Register the new version of the addon.
register()
def menu_func( menu, context ):
""" Addon menu function """
# Show the operator in the menu.
menu.layout.operator( AddMeshOperator.bl_idname, icon = 'MESH_CUBE' )
def register():
""" Register the bar chart addon """
# Assign a shortcut to the scene class.
s = bpy.types.Scene
# Create a dictionary of user-definable properties.
s.userProperties = {
'propX': FloatProperty( name = "Property X",
description = "X-dimension property.",
min = 0.1,
max = 100.0,
default = 1.0 ),
'propY': FloatProperty( name = "Property Y",
description = "Y-dimension property.",
min = 0.1,
max = 100.0,
default = 1.0 ),
'propZ': FloatProperty( name = "Property Z",
description = "Z-dimension property.",
min = 0.1,
max = 100.0,
default = 1.0 )
}
# Add properties directly to the scene class.
for key, prop in s.userProperties.items():
setattr( s, key, prop )
# Register the operator and panel.
bpy.utils.register_class( PropertyPanel )
bpy.utils.register_class( AddMeshOperator )
bpy.types.INFO_MT_mesh_add.append( menu_func )
def unregister():
""" Unregister the bar chart addon """
# Unregister the operator and panel.
bpy.utils.unregister_class( PropertyPanel )
bpy.utils.unregister_class( AddMeshOperator )
bpy.types.INFO_MT_mesh_add.remove( menu_func )
#-------------------------------------------------------------------------------
# PROCEDURE
#-------------------------------------------------------------------------------
if __name__ == "__main__":
# If executed directly, attempt to run from the main entry point.
main()
|
public class SecondMax {
public static int secondMaximum(int[] nums) {
int firstMax, secondMax;
// Initialize the two largest elements as the first and second elements
firstMax = secondMax = Integer.MIN_VALUE;
for (int i = 0; i < nums.length; i++) {
// If the current element is greater than firstMax
if (nums[i] > firstMax) {
// Update first and secondMax
secondMax = firstMax;
firstMax = nums[i];
}
// If the current element is in between firstMax and secondMax
else if (nums[i] > secondMax) {
secondMax = nums[i];
}
}
// return secondMax if it exists else return -1
return (secondMax == Integer.MIN_VALUE) ? -1 : secondMax;
}
public static void main(String[] args) {
int[] arr = {2,4,3,8,5,7,1};
System.out.print("Second maximum element: " + secondMaximum(arr));
}
} |
var logger = require('morgan');
var bodyParser = require('body-parser');
var override = require('method-override');
module.exports = function(app){
app.use(logger('dev'));
app.use(bodyParser.urlencoded({extended:true}));
app.use(bodyParser.json());
app.use(override());
}; |
def unique_values(pairs):
unique_set = set() # Use a set to store unique values
for pair in pairs:
unique_set.add(pair[1]) # Add the second element of each pair to the set
return list(unique_set) # Convert the set to a list and return |
#!/bin/bash
# Copyright (c) 2021, Mathias Lüdtke
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# shellcheck source=industrial_ci/src/builders/catkin_tools.sh
source "${ICI_SRC_PATH}/builders/catkin_tools.sh"
ici_warn "BUILDER=catkin_tools_isolated_devel should only be used in addition to the other non-devel builders"
function ici_extend_space {
echo "$1/devel"
}
function _catkin_config {
local extend=$1; shift
local ws=$1; shift
ici_cmd ici_exec_in_workspace "$extend" "$ws" catkin config --isolate-devel
}
|
<filename>test/Simple.java<gh_stars>1-10
public class Simple {
public static int size(long v, int radix) {
int size = 0;
for (long n = v; n != 0; n /= radix) ++size;
return size;
}
public static void main(String[] args) {
size(42, 10);
}
}
|
def find_common(a,b):
common = []
for element in a:
if element in b:
common.append(element)
return common |
<reponame>ahmetegesel/functional-mongo<gh_stars>1-10
import {
andThen, identity, ifElse, inc, isNil, pipe, uncurryN,
} from 'ramda';
import useCollection from './useCollection';
import dissolveFindParams from './internal/dissolveFindParams';
/**
* It can be either direct `predicate` as it is expected in corresponding `mongodb` operation,
* or it can be an `Array` in which the first element is the `predicate` and the second element is the `options`
* to be passed to the corresponding `mongodb` operation.
*
* @typedef { T | Array<T> } FindParams<T>
* */
/**
* Takes a {@link MongoClientLike}, a database name, a collection name, and a predicate, then
* returns `Promise` which resolves an `Array` of result that matches given predicate in specified `Collection`
* in MongoDB.
*
* `predicate` should be as documented at
* [here](http://mongodb.github.io/node-mongodb-native/3.5/reference/ecmascriptnext/crud/#read-methods)
*
* It is a curried function so it can be partially recomposed.
* Since [Ramda](https://ramdajs.com/) is used for currying, you can also use [R.__](https://ramdajs.com/docs/#__)
* placeholder to allow partial application of any combination of arguments of this particular function.
*
* @func findBy
* @since v0.1.0
* @param {MongoClientLike} client {@link MongoClient} instance
* @param {string} databaseName Database name to get the collection from.
* @param {string} collectionName Collection name to get find results in.
* @param {FindParams<object>} predicate FindParams object that represents the query.
* @return {Promise<Array>} Array of Document matching given predicate.
* @see {@link findAll}, {@link createClient}
* @example
*
* const client = createClient(...params);
* findBy(client, 'databaseName', 'collectionName', { name: 'some name' })
* .then(console.log);
*
* // partial re-usability
* const findCategoriesBy = findBy(someClient, 'someDb', 'categories');
* findCategoriesBy({ name: 'some name' }).then(console.log);
* findCategoriesBy({ approved: false }).then(console.log);
*
* const findApproved = findBy(someClient, 'someDb', R.__, {approved: true})
* findApproved('categories').then(console.log);
* findApproved('articles').then(console.log);
*
* // with additional options
* findCategoriesBy([{ approved: true }, {
* skip: 1, limit: 2, projection: { _id: 0 }, sort: { name: 1 }, includeCount: true,
* }]).then(console.log);
*/
const findBy = uncurryN(
inc(useCollection.length),
pipe(
useCollection,
uncurryN(
2,
(collectionPromise) => (predicate) => andThen(
async (collection) => {
const {
query, skip, limit, sort, includeCount, ...options
} = dissolveFindParams(predicate);
const cursorResult = collection.find(query, options);
const result = pipe(
ifElse(() => isNil(skip), identity, (cursor) => cursor.skip(skip)),
ifElse(() => isNil(limit), identity, (cursor) => cursor.limit(limit)),
ifElse(() => isNil(sort), identity, (cursor) => cursor.sort(sort)),
(cursor) => cursor.toArray(),
)(cursorResult);
if (!includeCount) {
return result;
}
const data = await result;
const count = await cursorResult.count();
return [
data,
count,
];
},
collectionPromise,
),
),
),
);
export default findBy;
|
#!/usr/bin/env bash
[[ -d editorconfig-eclipse ]] && rm -rf editorconfig-eclipse
git clone "https://github.com/ncjones/editorconfig-eclipse.git"
cd editorconfig-eclipse
git submodule init && git submodule update
mvn clean install
cd ../
[[ -d sk.eclipse.editorconfig.offline ]] && rm -rf sk.eclipse.editorconfig.offline
git clone "https://github.com/ShaneKingCommonweal/sk.eclipse.editorconfig.offline.git"
rm -rf sk.eclipse.editorconfig.offline/editorconfig-eclipse-p2
cp -r editorconfig-eclipse/editorconfig-eclipse-p2 sk.eclipse.editorconfig.offline
cd sk.eclipse.editorconfig.offline
git pull
#can not add target...
#git add -A
git add -f .
git add -u
git commit -m "sync at $(date +'%Y-%m-%d %H:%M')"
git push -f "https://ShaneKing:${GH_TOKEN}@github.com/ShaneKingCommonweal/sk.eclipse.editorconfig.offline.git" master:master
|
<filename>src/project/java/reincarnation/Project.java<gh_stars>0
package reincarnation;
/*
* Copyright (C) 2020 Reincarnation Development Team
*
* Licensed under the MIT License (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://opensource.org/licenses/MIT
*/
public class Project extends bee.api.Project {
{
product("com.github.teletha", "Reincarnation", "0.6");
require("org.ow2.asm", "asm");
require("com.github.teletha", "sinobu");
require("com.github.teletha", "psychopath");
require("com.github.javaparser", "javaparser-core");
require("org.benf", "cfr");
require("com.github.teletha", "icymanipulator").atAnnotation();
require("com.github.teletha", "bee").atTest();
require("com.github.teletha", "antibug").atTest();
require("com.github.teletha", "viewtify").atTest();
versionControlSystem("https://github.com/teletha/reincarnation");
}
public static class Compile extends bee.task.Compile {
{
useECJ = true;
}
}
} |
#include <iostream>
using namespace std;
bool search(string arr[], int n, string x)
{
int i;
for (i = 0; i < n; i++)
if (arr[i] == x)
return true;
return false;
}
int main()
{
string arr[] = {"apple", "mango", "grapes"};
int n = sizeof(arr) / sizeof(arr[0]);
string x = "mango";
if (search(arr, n, x))
cout << "Element found in array";
else
cout << "Element not found in array";
return 0;
} |
#!/usr/bin/env bash
# Cause the script to exit if a single command fails
set -eo pipefail -v
pip install -r requirements/requirements.txt
pip install -r requirements/requirements-cv.txt
pip install -r requirements/requirements-nlp.txt
pip install -r requirements/requirements-dev.txt
pip install -r docs/requirements.txt
catalyst-check-codestyle
make check-docs |
def process_input(input_data, write_fn):
if isinstance(input_data, str):
write_fn(input_data)
else:
write_fn(str(input_data)) |
<filename>dynamic-support/src/main/java/com/pranavpandey/android/dynamic/support/model/DynamicRemoteTheme.java
/*
* Copyright 2018-2020 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pranavpandey.android.dynamic.support.model;
import android.graphics.Color;
import androidx.annotation.ColorInt;
import androidx.annotation.NonNull;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonSyntaxException;
import com.pranavpandey.android.dynamic.support.model.adapter.DynamicThemeTypeAdapter;
import com.pranavpandey.android.dynamic.support.theme.DynamicTheme;
import com.pranavpandey.android.dynamic.theme.strategy.ExcludeStrategy;
import com.pranavpandey.android.dynamic.theme.utils.DynamicThemeUtils;
/**
* A remote theme to resolve system colors according to the API level.
*/
public class DynamicRemoteTheme extends DynamicWidgetTheme {
/**
* Constant for the default system color.
*/
public static final int SYSTEM_COLOR = Color.WHITE;
/**
* Constant for the default system color at night.
*/
public static final int SYSTEM_COLOR_NIGHT = Color.BLACK;
/**
* Constructor to initialize an object of this class.
*/
public DynamicRemoteTheme() {
super();
}
/**
* Constructor to initialize an object of this class from the theme string.
*
* @param theme The theme string to initialize the instance.
*/
public DynamicRemoteTheme(@NonNull String theme) throws JsonSyntaxException {
this(new GsonBuilder().setExclusionStrategies(new ExcludeStrategy())
.registerTypeAdapter(DynamicRemoteTheme.class,
new DynamicThemeTypeAdapter<DynamicRemoteTheme>()).create()
.fromJson(DynamicThemeUtils.formatTheme(theme), DynamicRemoteTheme.class));
}
/**
* Constructor to initialize an object of this class.
*
* @param dynamicAppTheme The dynamic app theme to copy the theme.
*/
public DynamicRemoteTheme(@NonNull DynamicAppTheme dynamicAppTheme) {
super(dynamicAppTheme);
}
@Override
public @ColorInt int getPrimaryColorDark(boolean resolve) {
if (resolve && super.getPrimaryColorDark(false) == AUTO) {
return DynamicTheme.getInstance().resolveSystemColor(
DynamicTheme.getInstance().isSystemNightMode());
}
return super.getPrimaryColorDark(resolve);
}
}
|
<filename>toggle-boot-core/src/test/java/br/com/mballoni/autoconfigure/AutoConfigurationTest.java
package br.com.mballoni.autoconfigure;
import br.com.mballoni.autoconfigure.beans.NoOpFetcher;
import br.com.mballoni.autoconfigure.beans.NoOpStore;
import br.com.mballoni.toggleboot.Fetcher;
import br.com.mballoni.toggleboot.Store;
import br.com.mballoni.toggleboot.ToggleService;
import br.com.mballoni.toggleboot.impl.InMemoryStore;
import br.com.mballoni.toggleboot.impl.JDBCFetcher;
import br.com.mballoni.toggleboot.sync.Scheduler;
import org.junit.Test;
import org.springframework.boot.autoconfigure.AutoConfigurations;
import org.springframework.boot.test.autoconfigure.jdbc.AutoConfigureJdbc;
import org.springframework.boot.test.context.runner.ApplicationContextRunner;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.scheduling.annotation.ScheduledAnnotationBeanPostProcessor;
import static org.assertj.core.api.Assertions.assertThat;
public class AutoConfigurationTest {
private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
.withConfiguration(AutoConfigurations.of(ToggleBootAutoConfiguration.class));
@Test
public void setup_DEFAULT_beans() {
contextRunner.withUserConfiguration(JDBCConfiguration.class)
.run((context) -> {
assertThat(context).hasSingleBean(JDBCFetcher.class);
assertThat(context).hasSingleBean(Fetcher.class);
assertThat(context).hasSingleBean(ToggleService.class);
assertThat(context).hasSingleBean(InMemoryStore.class);
assertThat(context).hasSingleBean(Store.class);
assertThat(context).doesNotHaveBean(Scheduler.class);
assertThat(context).doesNotHaveBean(ScheduledAnnotationBeanPostProcessor.class);
}
);
}
@Test
public void setup_SCHEDULER() {
contextRunner.withUserConfiguration(UserStoreConfiguration.class)
.withPropertyValues("toggle-boot.synchronizer.enabled=true")
.run((context) -> {
assertThat(context).hasSingleBean(Scheduler.class);
assertThat(context).hasSingleBean(ScheduledAnnotationBeanPostProcessor.class);
}
);
}
@Test
public void setup_USER_DEFINED_beans_ONLY() {
contextRunner.withUserConfiguration(UserStoreConfiguration.class)
.run((context) -> {
assertThat(context).hasSingleBean(NoOpFetcher.class);
assertThat(context).hasSingleBean(Fetcher.class);
assertThat(context).hasSingleBean(NoOpStore.class);
assertThat(context).hasSingleBean(Store.class);
});
}
@Configuration
static class UserStoreConfiguration {
@Bean
public Fetcher getFetcher() {
return new NoOpFetcher();
}
@Bean
public Store getStore() {
return new NoOpStore();
}
}
@Configuration
@AutoConfigureJdbc
static class JDBCConfiguration {
}
}
|
#!/bin/bash
set -o errexit
if [ ! "$1" ]; then
echo "This script requires either amd64 of arm64 as an argument"
exit 1
elif [ "$1" = "amd64" ]; then
#PLATFORM="$1"
REDHAT_PLATFORM="x86_64"
DIR_NAME="bpx-blockchain-linux-x64"
else
#PLATFORM="$1"
DIR_NAME="bpx-blockchain-linux-arm64"
fi
# If the env variable NOTARIZE and the username and password variables are
# set, this will attempt to Notarize the signed DMG
if [ ! "$BPX_INSTALLER_VERSION" ]; then
echo "WARNING: No environment variable BPX_INSTALLER_VERSION set. Using 0.0.0."
BPX_INSTALLER_VERSION="0.0.0"
fi
echo "BPX Installer Version is: $BPX_INSTALLER_VERSION"
echo "Installing npm and electron packagers"
cd npm_linux_rpm || exit
npm ci
GLOBAL_NPM_ROOT=$(pwd)/node_modules
PATH=$(npm bin):$PATH
cd .. || exit
echo "Create dist/"
rm -rf dist
mkdir dist
echo "Create executables with pyinstaller"
SPEC_FILE=$(python -c 'import chia; print(chia.PYINSTALLER_SPEC_PATH)')
pyinstaller --log-level=INFO "$SPEC_FILE"
LAST_EXIT_CODE=$?
if [ "$LAST_EXIT_CODE" -ne 0 ]; then
echo >&2 "pyinstaller failed!"
exit $LAST_EXIT_CODE
fi
# Builds CLI only rpm
CLI_RPM_BASE="bpx-blockchain-cli-$BPX_INSTALLER_VERSION-1.$REDHAT_PLATFORM"
mkdir -p "dist/$CLI_RPM_BASE/opt/bpx"
mkdir -p "dist/$CLI_RPM_BASE/usr/bin"
cp -r dist/daemon/* "dist/$CLI_RPM_BASE/opt/bpx/"
ln -s ../../opt/bpx/bpx "dist/$CLI_RPM_BASE/usr/bin/bpx"
# This is built into the base build image
# shellcheck disable=SC1091
. /etc/profile.d/rvm.sh
rvm use ruby-3
# /usr/lib64/libcrypt.so.1 is marked as a dependency specifically because newer versions of fedora bundle
# libcrypt.so.2 by default, and the libxcrypt-compat package needs to be installed for the other version
# Marking as a dependency allows yum/dnf to automatically install the libxcrypt-compat package as well
fpm -s dir -t rpm \
-C "dist/$CLI_RPM_BASE" \
-p "dist/$CLI_RPM_BASE.rpm" \
--name bpx-blockchain-cli \
--license Apache-2.0 \
--version "$BPX_INSTALLER_VERSION" \
--architecture "$REDHAT_PLATFORM" \
--description "BPX Blockchain" \
--depends /usr/lib64/libcrypt.so.1 \
.
# CLI only rpm done
cp -r dist/daemon ../bpx-blockchain-gui/packages/gui
cd .. || exit
cd bpx-blockchain-gui || exit
echo "npm build"
lerna clean -y
npm ci
# Audit fix does not currently work with Lerna. See https://github.com/lerna/lerna/issues/1663
# npm audit fix
npm run build
LAST_EXIT_CODE=$?
if [ "$LAST_EXIT_CODE" -ne 0 ]; then
echo >&2 "npm run build failed!"
exit $LAST_EXIT_CODE
fi
# Change to the gui package
cd packages/gui || exit
# sets the version for bpx-blockchain in package.json
cp package.json package.json.orig
jq --arg VER "$BPX_INSTALLER_VERSION" '.version=$VER' package.json > temp.json && mv temp.json package.json
electron-packager . bpx-blockchain --asar.unpack="**/daemon/**" --platform=linux \
--icon=src/assets/img/bpx.icns --overwrite --app-bundle-id=cc.bpxcoin.blockchain \
--appVersion=$BPX_INSTALLER_VERSION --executable-name=bpx-blockchain
LAST_EXIT_CODE=$?
# reset the package.json to the original
mv package.json.orig package.json
if [ "$LAST_EXIT_CODE" -ne 0 ]; then
echo >&2 "electron-packager failed!"
exit $LAST_EXIT_CODE
fi
mv $DIR_NAME ../../../build_scripts/dist/
cd ../../../build_scripts || exit
if [ "$REDHAT_PLATFORM" = "x86_64" ]; then
echo "Create bpx-blockchain-$BPX_INSTALLER_VERSION.rpm"
# Disables build links from the generated rpm so that we dont conflict with other packages. See https://github.com/Chia-Network/chia-blockchain/issues/3846
# shellcheck disable=SC2086
sed -i '1s/^/%define _build_id_links none\n%global _enable_debug_package 0\n%global debug_package %{nil}\n%global __os_install_post \/usr\/lib\/rpm\/brp-compress %{nil}\n/' "$GLOBAL_NPM_ROOT/electron-installer-redhat/resources/spec.ejs"
# Use attr feature of RPM to set the chrome-sandbox permissions
# adds a %attr line after the %files line
# The location is based on the existing location inside spec.ej
sed -i '/^%files/a %attr(4755, root, root) /usr/lib/<%= name %>/chrome-sandbox' "$GLOBAL_NPM_ROOT/electron-installer-redhat/resources/spec.ejs"
# Updates the requirements for building an RPM on Centos 7 to allow older version of rpm-build and not use the boolean dependencies
# See https://github.com/electron-userland/electron-installer-redhat/issues/157
# shellcheck disable=SC2086
sed -i "s#throw new Error('Please upgrade to RPM 4.13.*#console.warn('You are using RPM < 4.13')\n return { requires: [ 'gtk3', 'libnotify', 'nss', 'libXScrnSaver', 'libXtst', 'xdg-utils', 'at-spi2-core', 'libdrm', 'mesa-libgbm', 'libxcb' ] }#g" $GLOBAL_NPM_ROOT/electron-installer-redhat/src/dependencies.js
electron-installer-redhat --src dist/$DIR_NAME/ --dest final_installer/ \
--arch "$REDHAT_PLATFORM" --options.version $BPX_INSTALLER_VERSION \
--license ../LICENSE --options.bin bpx-blockchain --options.name bpx-blockchain
LAST_EXIT_CODE=$?
if [ "$LAST_EXIT_CODE" -ne 0 ]; then
echo >&2 "electron-installer-redhat failed!"
exit $LAST_EXIT_CODE
fi
fi
# Move the cli only rpm into final installers as well, so it gets uploaded as an artifact
mv "dist/$CLI_RPM_BASE.rpm" final_installer/
ls final_installer/
|
<filename>python/regularExpression/reTest.py<gh_stars>1-10
str1 = 'test python'
# 未使用正则表达式的查找
print(str1.find('1'))
print(str1.find('test'))
print(str1.startswith('test'))
# 使用正则表达式查找
import re
# 将正则表达式编译成pattern对象
# 使用r'test', r代表进行匹配的是元字符串
pa = re.compile(r'test') # pa已经成为一个pattern实例
print(type(pa))
ma = pa.match(str1) # 若匹配成功, ma成为一个match对象
print(ma)
print(ma.group()) # group()返回一个str或者tuple
print(ma.span()) # 返回字符串的索引
print(ma.re) # pattern的实例
# 另一个例子
pa2 = re.compile(r'_')
ma2 = pa2.match('_value')
print(ma2.group())
# 例子3
pa3 = re.compile(r'_')
ma3 = pa3.match('value_')
# print(ma3.group()) #匹配失败 'NoneType' object has no attribute 'group'
# 忽略大小写匹配
pa = re.compile(r'test', re.I) # re.I 忽略大小写, I=ignore
print(pa)
ma = pa.match('Test python')
print(ma.group())
#
ma = re.match(r'test', 'Test Python', re.I)
print(ma.group()) |
package de.ids_mannheim.korap.config;
import lombok.Getter;
/**
* @author hanl
* @date 15/07/15
*/
@Getter
public class URIParam extends ParamFields.Param {
private final String uriFragment;
private final Long uriExpiration;
public URIParam (String uri, Long expire) {
this.uriFragment = uri;
this.uriExpiration = expire;
}
@Override
public boolean hasValues () {
return this.uriFragment != null && !this.uriFragment.isEmpty()
&& this.uriExpiration != null;
}
}
|
sudo apt-get -y install python3-pip unzip
if [ "$(pwd)" = "/home/vagrant" ]; then
VAGRANT=1
cd /vagrant
sudo apt-get -y install expect
else
VAGRANT=0
fi
pwd
PIP="pip3"
rm -fr $PWD/env
$PIP install virtualenv
virtualenv --python=python3 /vagrant/env
$PWD/env/bin/pip install -r requirements.txt
# Create local_settings.py if needed
if [ ! -f $PWD/local_settings.py ]; then
cp $PWD/local_settings.example $PWD/local_settings.py
fi
# Perform migrations
$PWD/env/bin/python manage.py migrate
# Create superuser (if not already done)
if [ ! -f $PWD/.userCreated ]; then
if [ $VAGRANT -eq 1 ]; then
$PWD/setup/test_user_vagrant.expect
else
$PWD/env/bin/python manage.py createsuperuser
fi
touch $PWD/.userCreated
fi
# Start server
if [ "$( ps aux | grep python | grep manage | wc -l)" == "2" ]; then
for i in $(ps aux | grep python | grep manage | awk '{print $2}'); do
kill $i
done
fi
# Load country info
if [ ! -f countryInfo.txt ]; then
wget http://download.geonames.org/export/dump/countryInfo.txt
$PWD/env/bin/python manage.py load_countries countryInfo.txt
fi
# Load SPR data
if [ ! -f admin1CodesASCII.txt ]; then
wget http://download.geonames.org/export/dump/admin1CodesASCII.txt
$PWD/env/bin/python manage.py load_spr admin1CodesASCII.txt
fi
# Load cities data
# TODO: Let this be variable?
if [ ! -f cities15000.txt ]; then
wget http://download.geonames.org/export/dump/cities15000.zip
unzip cities15000.zip
$PWD/env/bin/python manage.py load_cities cities15000.txt
fi
if [ $VAGRANT -eq 1 ]; then
nohup $PWD/env/bin/python manage.py runserver 192.168.42.42:8000 > output 2>&1 &
else
$PWD/env/bin/python manage.py runserver
fi
|
<filename>node_modules/react-icons-kit/fa/mobilePhone.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.mobilePhone = void 0;
var mobilePhone = {
"viewBox": "0 0 768 1792",
"children": [{
"name": "path",
"attribs": {
"d": "M464 1408q0-33-23.5-56.5t-56.5-23.5-56.5 23.5-23.5 56.5 23.5 56.5 56.5 23.5 56.5-23.5 23.5-56.5zM672 1248v-704q0-13-9.5-22.5t-22.5-9.5h-512q-13 0-22.5 9.5t-9.5 22.5v704q0 13 9.5 22.5t22.5 9.5h512q13 0 22.5-9.5t9.5-22.5zM480 400q0-16-16-16h-160q-16 0-16 16t16 16h160q16 0 16-16zM768 384v1024q0 52-38 90t-90 38h-512q-52 0-90-38t-38-90v-1024q0-52 38-90t90-38h512q52 0 90 38t38 90z"
}
}]
};
exports.mobilePhone = mobilePhone; |
class Triangle {
public:
int sideA;
int sideB;
int sideC;
int angleA;
int angleB;
int angleC;
Triangle(int a, int b, int c, int angleA, int angleB, int angleC) {
sideA = a;
sideB = b;
sideC = c;
this->angleA = angleA;
this->angleB = angleB;
this->angleC= angleC;
}
}; |
package com.mrh0.createaddition.blocks.creative_energy;
import com.mrh0.createaddition.index.CATileEntities;
import com.mrh0.createaddition.shapes.CAShapes;
import com.simibubi.create.content.logistics.block.inventories.CrateBlock;
import com.simibubi.create.foundation.block.ITE;
import net.minecraft.core.BlockPos;
import net.minecraft.world.level.BlockGetter;
import net.minecraft.world.level.Level;
import net.minecraft.world.level.block.Block;
import net.minecraft.world.level.block.entity.BlockEntity;
import net.minecraft.world.level.block.entity.BlockEntityType;
import net.minecraft.world.level.block.state.BlockState;
import net.minecraft.world.phys.shapes.CollisionContext;
import net.minecraft.world.phys.shapes.VoxelShape;
public class CreativeEnergyBlock extends CrateBlock implements ITE<CreativeEnergyTileEntity> {
public static final VoxelShape CREATIVE_ENERGY_SHAPE = CAShapes.shape(1,0,1,15,16,15).add(0,2,0,16,14,16).build();
public CreativeEnergyBlock(Properties props) {
super(props);
}
@Override
public VoxelShape getShape(BlockState state, BlockGetter worldIn, BlockPos pos, CollisionContext context) {
return CREATIVE_ENERGY_SHAPE;
}
@Override
public Class<CreativeEnergyTileEntity> getTileEntityClass() {
return CreativeEnergyTileEntity.class;
}
@Override
public BlockEntity newBlockEntity(BlockPos pos, BlockState state) {
return CATileEntities.CREATIVE_ENERGY.create(pos, state);
}
@Override
public void neighborChanged(BlockState state, Level worldIn, BlockPos pos, Block blockIn, BlockPos fromPos, boolean isMoving) {
BlockEntity tileentity = state.hasBlockEntity() ? worldIn.getBlockEntity(pos) : null;
if(tileentity != null) {
if(tileentity instanceof CreativeEnergyTileEntity) {
((CreativeEnergyTileEntity)tileentity).updateCache();
}
}
}
@Override
public BlockEntityType<? extends CreativeEnergyTileEntity> getTileEntityType() {
return CATileEntities.CREATIVE_ENERGY.get();
}
}
|
model = tf.keras.Sequential([
tf.keras.layers.Dense(20, activation='relu', input_shape=(4,)),
tf.keras.layers.Dense(10, activation='relu'),
tf.keras.layers.Dense(5, activation='relu'),
tf.keras.layers.Dense(3, activation='softmax')
]) |
import torch
import torch.nn as nn
import torch.nn.functional as F
class Net(nn.Module):
def __init__(self, input_dim, output_dim):
super(Net, self).__init__()
self.fc1 = nn.Linear(input_dim, 64)
self.fc2 = nn.Linear(64, 64)
self.fc3 = nn.Linear(64, output_dim)
def forward(self, x):
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = self.fc3(x)
return F.sigmoid(x)
model = Net(input_dim, output_dim) |
package com.g4mesoft.net.packet.server;
import java.util.UUID;
import com.g4mesoft.net.NetworkManager;
import com.g4mesoft.net.PacketByteBuffer;
import com.g4mesoft.net.client.ClientNetworkManager;
import com.g4mesoft.net.packet.Packet;
import com.g4mesoft.world.entity.EntityFacing;
public class S01PositionPacket extends Packet {
public UUID entityUUID;
public float x;
public float y;
public EntityFacing facing;
public S01PositionPacket() {
}
public S01PositionPacket(UUID entityUUID, float x, float y, EntityFacing facing) {
this.entityUUID = entityUUID;
this.x = x;
this.y = y;
this.facing = facing;
}
@Override
public void read(PacketByteBuffer buffer) {
entityUUID = buffer.getUUID();
x = buffer.getFloat();
y = buffer.getFloat();
facing = EntityFacing.fromIndex(buffer.getInt());
}
@Override
public void write(PacketByteBuffer buffer) {
buffer.putUUID(entityUUID);
buffer.putFloat(x);
buffer.putFloat(y);
buffer.putInt(facing == null ? -1 : facing.getIndex());
}
@Override
public void processPacket(NetworkManager manager) {
if (manager.isClient())
((ClientNetworkManager)manager).handlePositionPacket(this);
}
@Override
public boolean checkSize(int bytesToRead) {
// uuid float float int
return bytesToRead == 28;
}
}
|
package cmd
import (
"errors"
"github.com/ekalinin/pbvm/utils"
"github.com/spf13/cobra"
)
// deleteCmd represents the delete command
var deleteCmd = &cobra.Command{
Aliases: []string{"rm"},
Use: "delete <version>",
Short: "Delete version",
Long: `Delete version. Version should be installed.`,
Args: cobra.ExactArgs(1),
RunE: func(cmd *cobra.Command, args []string) error {
version := args[0]
installed, _, err := utils.IsInstalledVersion(pbName, version)
if err != nil {
return err
}
if !installed {
// suppress help output
// https://github.com/spf13/cobra/issues/340
cmd.SilenceUsage = true
cmd.SilenceErrors = true
return errors.New("Version " + version + " is not installed")
}
active, err := utils.IsActiveVersion(pbName, version)
if err != nil {
return err
}
if active {
// suppress help output
cmd.SilenceUsage = true
cmd.SilenceErrors = true
return errors.New("Version " + version + " is active at the moment")
}
return utils.DeleteVersion(pbName, version)
},
}
func init() {
rootCmd.AddCommand(deleteCmd)
}
|
<gh_stars>10-100
from __future__ import print_function, division, absolute_import, unicode_literals
from collections import OrderedDict
try:
from collections.abc import Mapping, Sequence, Hashable
except ImportError:
from collections import Mapping, Sequence, Hashable
import copy
import json
import six
class FrozenDictJSONEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, FrozenDict):
return obj._dict
return super(FrozenDictJSONEncoder, self).default(obj)
class FrozenDict(Mapping):
def __init__(self, *args, **kwargs):
self._dict = dict(*args, **kwargs)
def __getitem__(self, key):
return self._dict[key]
def __contains__(self, key):
return key in self._dict
def __iter__(self):
return iter(self._dict)
def __len__(self):
return len(self._dict)
def __repr__(self):
class_name = type(self).__name__
return "%s(%s)" % (class_name, repr(self._dict))
def __str__(self):
return str(self._dict)
def __hash__(self):
if hasattr(self, '_hash'):
return self._hash
h = 0
for key_value in six.viewitems(self._dict):
h ^= hash(key_value)
self._hash = h
return h
def keys(self):
return six.viewkeys(self._dict)
def values(self):
for val in six.viewvalues(self._dict):
yield copy.deepcopy(val)
def items(self):
for key, val in six.viewitems(self._dict):
yield key, copy.deepcopy(val)
def copy(self):
return copy.copy(self)
def replace(self, *args, **kwargs):
new_dict = copy.deepcopy(self._dict)
new_dict.update(*args, **kwargs)
return type(self)(new_dict)
def to_json(self):
return json.dumps(self, cls=FrozenDictJSONEncoder)
def __lt__(self, other):
if isinstance(other, FrozenDict):
return str(self) < str(other)
return NotImplemented
def __le__(self, other):
if isinstance(other, FrozenDict):
return str(self) <= str(other)
return NotImplemented
def __gt__(self, other):
if isinstance(other, FrozenDict):
return str(self) > str(other)
return NotImplemented
def __ge__(self, other):
if isinstance(other, FrozenDict):
return str(self) >= str(other)
return NotImplemented
@classmethod
def recursively_froze(cls, value):
if (isinstance(value, Mapping)
and not isinstance(value, Hashable)):
value = cls._recursively_froze_mapping(value)
elif (isinstance(value, Sequence)
and not isinstance(value, Hashable)):
value = cls._recursively_froze_sequence(value)
return value
@classmethod
def _recursively_froze_sequence(cls, sequence):
return tuple(cls.recursively_froze(val) for val in sequence)
@classmethod
def _recursively_froze_mapping(cls, mapping):
return SortedFrozenDict((key, cls.recursively_froze(val))
for key, val in six.viewitems(mapping))
class OrderedFrozenDict(FrozenDict):
def __init__(self, *args, **kwargs):
self._dict = OrderedDict(*args, **kwargs)
def __str__(self):
s = ', '.join('%r: %r' % (k, v) for k, v in six.viewitems(self._dict))
s = '{%s}' % s
return s
class SortedFrozenDict(OrderedFrozenDict):
def __init__(self, *args, **kwargs):
unsorted_dict = dict(*args, **kwargs)
self._dict = OrderedDict((k, unsorted_dict[k])
for k in sorted(six.viewkeys(unsorted_dict)))
|
//-------- js/HTMLForAvatarGUI.js --------
// Generated by CoffeeScript 1.12.2
(function () {
var HTMLForAvatarGUI, console, cwaenv, document, log, setTimeout;
cwaenv = this.getCWAEnv();
console = this.console;
document = this.document;
setTimeout = this.setTimeout;
log = console.log.bind(console);
HTMLForAvatarGUI = (function () {
function HTMLForAvatarGUI(ix, avSettings) {
this.ix = ix;
this.avw = avSettings.width;
this.avh = avSettings.height;
this.avBg = avSettings.background;
this.avs = avSettings.avList;
this.initav = avSettings.initAv;
this.doFrameSteps = avSettings.allowFrameSteps;
this.dostext = avSettings.allowSiGMLText;
this.initsurl = avSettings.initSiGMLURL;
}
HTMLForAvatarGUI.prototype.allHTML = function () {
var avatar, avgui, table;
return avatar = this.htmlForAv();
};
HTMLForAvatarGUI.prototype.htmlForAv = function () {
var html, sty;
sty = this.avBg != null ? "style=\"background: " + this.avBg + ";\" " : "";
return html = "<div class=\"divAv av" + this.ix + "\" " + sty + ">\n <canvas class=\"canvasAv av" + this.ix + "\" ></canvas>\n</div> <!--class=\"divAv av" + this.ix + "\"-->";
};
return HTMLForAvatarGUI;
})();
cwaenv.add(HTMLForAvatarGUI, "HTMLForAvatarGUI");
}).call(this); |
const BaseItem = require('../entities/base')
module.exports = {
name: "ExtendZipkin",
canRunGlobal: ({compiler, registry}) => {
if (registry.services.length == 0)
{
if (!registry.findByNaming('cluster', ['sprt']))
{
return false;
}
}
var policy = registry.resolvePolicy('distributed-tracing-provider');
if (policy.value == 'zipkin' || policy.value == 'jaeger') {
return true;
}
return false;
},
globalAction: ({compiler, registry}) => {
var policy = registry.resolvePolicy('distributed-tracing-provider');
var dtraceCfg = {
kind: 'service',
cluster: 'sprt',
sector: 'main',
name: 'dtrace',
code: {
kind: 'docker',
image: ''
},
provides: {
client: {
port: 9411,
protocol: 'http'
}
},
resources: {
memory: {
min: 300
}
},
environment: {
}
}
compiler.markClusterImplicit(dtraceCfg.cluster);
if (policy.value == 'zipkin') {
dtraceCfg.code.image = 'openzipkin/zipkin';
} else if (policy.value == 'jaeger') {
dtraceCfg.code.image = 'jaegertracing/all-in-one';
dtraceCfg.environment["COLLECTOR_ZIPKIN_HTTP_PORT"] = "9411";
dtraceCfg.provides["web"] = {
port: 16686,
protocol: 'http'
}
}
var dtraceSvcId = compiler._addImpicit(dtraceCfg);
if (policy.value == 'zipkin') {
compiler.addClusterProvided(dtraceCfg.cluster, "dtrace", {
sector: dtraceCfg.sector,
service: dtraceCfg.name,
endpoint: 'client',
public: true
});
compiler.addClusterProvided(dtraceCfg.cluster, "dtrep", {
sector: dtraceCfg.sector,
service: dtraceCfg.name,
endpoint: 'client'
});
} else if (policy.value == 'jaeger') {
compiler.addClusterProvided(dtraceCfg.cluster, "dtrace", {
sector: dtraceCfg.sector,
service: dtraceCfg.name,
endpoint: 'web',
public: true
})
compiler.addClusterProvided(dtraceCfg.cluster, "dtrep", {
sector: dtraceCfg.sector,
service: dtraceCfg.name,
endpoint: 'client'
})
}
for(var service of registry.services)
{
if (service.isCompiled) {
continue
}
if (service.isImplicit) {
continue;
}
compiler.addConsumes(service.id, {
cluster: dtraceCfg.cluster,
endpoint: 'dtrep'
});
}
}
} |
<reponame>akhatua2/mern_stack<gh_stars>0
import React, {useState} from 'react';
import api from '../../services/api'
import { Container, Button, Form, FormGroup, Input, Label, Alert } from 'reactstrap';
export default function Register({history}) {
const [ email, setEmail] = useState("")
const [ password, setPassword] = useState("")
const [ firstName, setFirstName] = useState("")
const [ lastName, setLastName] = useState("")
const [errors, setError] = useState(false)
const [errorMessage, setErrorMessage] = useState(false)
const handleSubmit = async evt => {
evt.preventDefault();
try {
if(email !== "" && password !== "" && firstName !== "" && lastName !== "") {
const response = await api.post('/user/register', {email, password, firstName, lastName})
const user_id = response.data._id || false;
if(user_id) {
history.push('/login')
} else {
const {message} = response.data
setError(true)
setErrorMessage(message)
setTimeout(() => {
setErrorMessage(false)
setErrorMessage("")
}, 2000)
console.log(errors)
}
} else {
setError(true)
setErrorMessage("Please fill out all the details")
setTimeout(() => {
setErrorMessage(false)
setErrorMessage("")
}, 2000)
console.log(errors)
}
} catch (error) {
Promise.reject(error);
console.log(error);
}
}
return(
<Container>
<h2>Register</h2>
<p>Please <strong>create</strong> a new account</p>
<Form onSubmit = {handleSubmit}>
<FormGroup className="textField">
<Label>First Name: </Label>
<Input type="text" name="firstName" id="firstName" placeholder="Your first name" onChange={evt => setFirstName(evt.target.value)}/>
</FormGroup>
<FormGroup className="textField">
<Label>Last Name: </Label>
<Input type="text" name="lastName" id="lastName" placeholder="Your last name" onChange={evt => setLastName(evt.target.value)}/>
</FormGroup>
<FormGroup className="textField">
<Label>Email Address: </Label>
<Input type="email" name="email" id="exampleEmail" placeholder="Your email" onChange={evt => setEmail(evt.target.value)}/>
</FormGroup>
<FormGroup className="textField">
<Label>Passwoard: </Label>
<Input type="password" name="password" id="examplePassword" placeholder="Your password" onChange={evt => setPassword(evt.target.value)}/>
</FormGroup>
<Button className='submit-btn'>Submit</Button>
<FormGroup>
<Button className='secondary-btn' onClick={() => history.push("/login")}>Login</Button>
</FormGroup>
{errorMessage ? (
<Alert className="event-validation" color="danger"> {errorMessage} </Alert>
) : ""}
</Form>
</Container>
)
} |
{% extends '//mix/template/cmake.sh' %}
{% block fetch %}
https://github.com/google/googletest/archive/refs/tags/release-1.11.0.tar.gz
sha:b4870bf121ff7795ba20d20bcdd8627b8e088f2d1dab299a031c1034eddc93d5
{% endblock %}
{% block lib_deps %}
lib/c
lib/c++
{% endblock %}
|
module.exports = function getDefinePlugin(config) {
return {
__DEV__: JSON.stringify(JSON.parse(process.env.DEV || 'false')),
CONFIG: JSON.stringify(config),
}
} |
#!/bin/bash
PYTHON_EXECUTABLE=$(which python3)
LAMMPS_ROOT=$(${PYTHON_EXECUTABLE} -c 'import site; print(site.getsitepackages()[0])')/lammps/
cmake -S . -B build -DLAMMPS_ROOT=${LAMMPS_ROOT}
cmake --build build --target install
|
package org.terracottamc.world.leveldb;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.PooledByteBufAllocator;
import org.iq80.leveldb.DB;
import org.iq80.leveldb.Options;
import org.iq80.leveldb.impl.Iq80DBFactory;
import org.terracottamc.math.Location;
import org.terracottamc.server.Server;
import org.terracottamc.taglib.NBTBuilder;
import org.terracottamc.taglib.nbt.io.NBTReader;
import org.terracottamc.taglib.nbt.tag.NBTTagCompound;
import org.terracottamc.world.Difficulty;
import org.terracottamc.world.World;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.ByteOrder;
/**
* Copyright (c) 2021, TerracottaMC
* All rights reserved.
*
* <p>
* This project is licensed under the BSD 3-Clause License which
* can be found in the root directory of this source tree
*
* @author Kaooot
* @version 1.0
*/
public class LevelDBProvider {
protected Location worldSpawnLocation;
protected Difficulty worldDifficulty;
private final File worldFolder;
private final File worldFile;
private DB database;
/**
* Creates a new {@link org.terracottamc.world.leveldb.LevelDBProvider}
*
* @param worldName which should be bound to this {@link org.terracottamc.world.leveldb.LevelDBProvider}
*/
public LevelDBProvider(final String worldName) {
this.worldFolder = new File(System.getProperty("user.dir") + "/worlds/" + worldName);
if (!this.worldFolder.exists()) {
this.worldFolder.mkdirs();
}
this.worldFile = new File(this.worldFolder.getPath(), "level.dat");
}
/**
* Loads the world file by this {@link org.terracottamc.world.leveldb.LevelDBProvider}
*
* @return whether the world file could be loaded successfully or not
*/
public boolean loadWorldFile() {
try (final FileInputStream fileInputStream = new FileInputStream(this.worldFile)) {
fileInputStream.skip(8);
final byte[] data = new byte[fileInputStream.available()];
fileInputStream.read(data);
final ByteBuf buffer = PooledByteBufAllocator.DEFAULT.directBuffer(data.length);
buffer.writeBytes(data);
final NBTReader nbtReader = new NBTBuilder()
.withIOBuffer(buffer)
.withByteOrder(ByteOrder.LITTLE_ENDIAN)
.buildReader();
final NBTTagCompound nbtTagCompound = nbtReader.createCompound();
final String worldName = this.worldFile.getName();
final int spawnX = nbtTagCompound.getInt("SpawnX");
final int spawnY = nbtTagCompound.getInt("SpawnY");
final int spawnZ = nbtTagCompound.getInt("SpawnZ");
final int difficultyId = nbtTagCompound.getInt("Difficulty");
final World world = Server.getInstance().getWorld(worldName);
this.worldSpawnLocation = new Location(world, spawnX, spawnY, spawnZ);
this.worldDifficulty = Difficulty.retrieveDifficultyById(difficultyId);
return true;
} catch (final IOException e) {
e.printStackTrace();
}
return false;
}
/**
* Initializes the {@link org.iq80.leveldb.DB} of this {@link org.terracottamc.world.leveldb.LevelDBProvider}
*
* @return whether the database could be initialized successfully
*/
public boolean initializeDataBase() {
try {
this.database = Iq80DBFactory.factory
.open(new File(this.worldFolder.getPath(), "db/"), new Options().createIfMissing(true));
return true;
} catch (final IOException e) {
e.printStackTrace();
}
return false;
}
/**
* Retrieves data from the database of this {@link org.terracottamc.world.leveldb.LevelDBProvider} with provided data
*
* @param data which is used to retrieve the data
*
* @return fresh retrieved data
*/
public byte[] retrieveDataFromDatabase(final byte[] data) {
return this.database.get(data);
}
} |
Page({
data: {
username: '点击登录',
defaultUrl: '/images/yuyin5.png',
userTx: '',
userInfo: {},
gender: 1,
province: '',
},
onLoad: function(){
wx.setNavigationBarTitle({
title: '我的'
})
//当重新加载这个页面时,查看是否有已经登录的信息
let username = wx.getStorageSync('username'),
avater = wx.getStorageSync('avatar');
if(username){
this.setData({
username: username,
defaultUrl: avater
})
}
wx.getSetting({
success: res => {
if(res.authSetting['scope.userInfo']){
wx.getUserInfo({
success: res => {
this.setData({
defaultUrl: res.userInfo.avatarUrl,
userInfo: res.userInfo
})
}
})
}
}
})
},
getUserInfoHandler: function(e){
console.log(e)
let d = e.detail.userInfo
var gen = d.gender==1?'男':'女'
this.setData({
userTx: d.avatarUrl,
username: d.nickName
})
wx.setStorageSync('avater', d.avatarUrl)
wx.setStorageSync('username', d.nickName)
wx.setStorageSync('gender', gen)
wx.setStorageSync('province', d.province)
//获取数据库引用
const db = wx.cloud.database()
const _ = db.command
//查看是否已有登录,无,则获取id
var userId = wx.getStorageSync('userId')
if(!userId){
userId = this.getUserId()
}
//查找数据库
db.collection('users').where({
_openid: d.openid
}).get({
success(res) {
// res.data 是包含以上定义的记录的数组
console.log('查询用户:',res)
//如果查询到数据,将数据记录,否则去数据库注册
if(res.data && res.data.length > 0){
wx.setStorageSync('openId', res.data[0]._openid)
}else{
//定时器
setTimeout(() => {
//写入数据库
db.collection('users').add({
data:{
userId: userId,
userTang: 10,
yuyin: 0,
baoyuyin: 0,
iv: d.iv
},
success: function(){
console.log('用户id新增成功')
db.collection('users').where({
userId: userId
}).get({
success: res => {
wx.setStorageSync('openId', res.data[0]._openid)
},
fail: err => {
console.log('用户_openId设置失败')
}
})
},
fail: function(e){
console.log('用户id新增失败')
}
})
},100)
}
},
fail: err=>{
}
})
},
getUserId: function(){
//生产唯一id,采用一个字母或数字+1970年到现在的毫秒数+10w的一个随机数组成
var w = "abcdefghijklmnopqrstuvwxyz0123456789",
firstW = w[parseInt(Math.random() * (w.length))];
var userId = firstW + (Date.now()) + (Math.random() * 100000).toFixed(0)
console.log(userId)
wx.setStorageSync('userId', userId)
return userId;
},
jilu: function(e) {
var nth = e.currentTarget.dataset.nth
wx.setStorageSync('nth', nth)
wx.navigateTo({
url: '../userjilu/userjilu'
})
},
myfengcun: function() {
wx.navigateTo({
url: '../userfengcun/userfengcun'
})
},
qiandao: function() {
wx.navigateTo({
url: '../tangguo/tangguo'
})
},
}) |
/*Function to check how many times an alpha-numeric character occurs in a string.
Case insensitive. */
let isAlphaNumeric = ( char ) => {
char = char.toString();
let id = char.charCodeAt( 0 );
if (
!( id > 47 && id < 58 ) && // if not numeric(0-9)
!( id > 64 && id < 91 ) && // if not letter(A-Z)
!( id > 96 && id < 123 ) // if not letter(a-z)
) {
return false;
}
return true;
};
let countChars = ( string ) => {
let obj = {};
for ( let char of string ) {
if ( isAlphaNumeric( char ) ) {
char = char.toLowerCase();
obj[ char ] !== undefined ? ++obj[ char ] : ( obj[ char ] = 1 );
}
}
return obj;
};
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/512+0+512-STG/7-model --tokenizer_name model-configs/1024-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/512+0+512-STG/7-512+0+512-NER-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_named_entities_first_half_quarter --eval_function penultimate_quarter_eval |
# Make HTML notes version of slides
pandoc slides.md -o index.html -c css/notes.css \
--template=template.html -H header.html
# Make revealjs version of slides
pandoc --section-divs -t revealjs -s \
--template template.revealjs \
-o slides.html \
-H header.html \
slides.md
# # Automatically add and commit
# git add .
# git commit
# git push
|
#!/usr/bin/env bash
# collect meta info from images
# sadly no EXIF, but still great other info
# this is where the S3 bucket is mounted
IMAGES_LOCATION="./images"
OUT_LOCATION='./metadata'
while read i; do
f=$(echo "$i"| awk '{ print $NF }')
id="${f%.*}"
echo $id
exiftool -json -U -u "${IMAGES_LOCATION}/${f}" > "${OUT_LOCATION}/${id}.json"
done < ddosecrets-parler-images-listing.txt
echo "Compressing metadata"
tar -czf metadata-images.tgz "${OUT_LOCATION}"
|
import mbuild as mb
def perform_energy_minimization(molecule, residue_name, forcefield_file, minimization_steps):
molecule.name = residue_name
print('molecule.name = ' + str(molecule.name))
molecule.energy_minimize(forcefield=forcefield_file, steps=minimization_steps)
return molecule
# Example usage
water = mb.load('O', smiles=True)
minimized_water = perform_energy_minimization(water, 'Water_res_name', 'FF_file', 10**9)
fake_water = mb.load('O', smiles=True) |
import time
class LCG:
def __init__(self, seed=time.time()):
self.set_seed_lcg(seed)
# Set a starting seed for LCG.
def set_seed_lcg(self, seed): # Timestamp as a seed for default.
global rand
rand = float(seed)
# Parameters taken from https://www.wikiwand.com/en/Numerical_Recipes
# "Numerical Recipes: The Art of Scientific Computing", <NAME>, <NAME>, <NAME> and <NAME>.
def custom_uniform(self, low_lim=0.0, up_lim=100.0, precision=True):
# Linear Congruential Method
# x1 = (x0 * a + c) % m
a = 1664525
c = 1013904223
m = 2**32 - 1 # -1 is for precision.
global rand
rand = (((a*rand + c) % m) % up_lim) + low_lim
# Get precision between 0 and 1.
if (precision):
rand = rand + self.custom_uniform(0, 1000, False) / 1000
if up_lim == 1:
rand = rand % 1
else:
rand
return rand
|
<gh_stars>0
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package hermes.command.requete.channel;
import hermes.chat.controleur.Chatter;
import hermes.client.Client;
import hermes.client.ClientStatus;
import hermes.command.requete.base.Requete;
import hermes.protocole.Protocole;
import hermes.protocole.ProtocoleSwinen;
/**
*
* @author salto
*/
public class JoinChannel extends Requete {
public JoinChannel(Chatter chat) {
super(chat);
}
@Override
public void execute() {
if (verifierArguments(1)) {
Protocole protocole = chat.getProtocole();
Client client = chat.getClient();
String text = (String) args[0];
protocole.prepare(ProtocoleSwinen.JOINCHANNEL);
if (protocole.check(text)) {
String channel = protocole.get(ProtocoleSwinen.channel);
String user = protocole.get(ProtocoleSwinen.user);
chat.getChannels().rejoindre(channel, user);
} else {
client.setEtat(ClientStatus.BadProtocoleReceived);
}
}
}
}
|
import subprocess
import os
def encrypt_file(input_file, output_file, iv):
subprocess.run(['openssl', 'enc', '-aes-256-cbc', '-iv', iv, '-in', input_file, '-out', output_file])
def decrypt_file(input_file, output_file, iv):
subprocess.run(['openssl', 'enc', '-d', '-aes-256-cbc', '-iv', iv, '-in', input_file, '-out', output_file])
def set_file_permissions(file_path):
os.chmod(file_path, 0o400)
# Example usage
input_file = 'sensitive_data.txt'
encrypted_file = 'sensitive_data.enc'
decrypted_file = 'sensitive_data_decrypted.txt'
iv = 'b9bd228152b0'
encrypt_file(input_file, encrypted_file, iv)
set_file_permissions(encrypted_file)
decrypt_file(encrypted_file, decrypted_file, iv)
set_file_permissions(decrypted_file) |
def word_frequency(text):
# Convert the text to lowercase and remove punctuation
text = text.lower()
text = ''.join(char if char.isalnum() or char.isspace() else ' ' for char in text)
# Split the text into words
words = text.split()
# Create a dictionary to store word frequencies
word_freq = {}
for word in words:
word_freq[word] = word_freq.get(word, 0) + 1
# Sort the dictionary by frequency in descending order
sorted_word_freq = sorted(word_freq.items(), key=lambda x: x[1], reverse=True)
# Print the sorted word frequencies
for word, freq in sorted_word_freq:
print(f"{word}: {freq}")
# Test the function with the given example
input_text = "The quick brown fox jumps over the lazy dog. The dog barks, and the fox jumps."
word_frequency(input_text) |
base_dir=$PWD
script_dir=`dirname $0`
# delete non-python files
find $1 -type f ! -name '*.py' -delete
# delete tests
find $1 -type d -name 'tests' -exec rm -rf {} +
# delete unnecessary
rm -rf $1/testing
rm -rf $1/drawing
rm -rf $1/readwrite
rm -rf $1/linalg
find $1/generators -type f ! -name 'random_graphs.py' ! -name 'degree_seq.py' ! -name 'classic.py' -delete
rm -rf $1/algorithms
# python fixes
cd $script_dir
python install.py $1
cd $base_dir
# patches
cp -a $script_dir/patch/. $1
|
package somind.dtlab.ingest.mqtt
import akka.actor.{ActorRef, ActorSystem, Props}
import akka.http.scaladsl.model.{ContentType, ContentTypes}
import akka.util.Timeout
import com.typesafe.config.{Config, ConfigFactory}
import com.typesafe.scalalogging.LazyLogging
import somind.dtlab.ingest.mqtt.observe.Observer
import somind.dtlab.ingest.mqtt.utils.InitJavaLogging
import scala.concurrent.ExecutionContextExecutor
object Conf extends LazyLogging {
InitJavaLogging()
implicit val actorSystem: ActorSystem = ActorSystem("DtlabIngestMqtt")
implicit val executionContext: ExecutionContextExecutor =
actorSystem.dispatcher
// implicit val materializer: ActorMaterializer = ActorMaterializer(ActorMaterializerSettings(actorSystem))
val conf: Config = ConfigFactory.load()
val port: Int = conf.getInt("main.port")
val mqttUrl: String = conf.getString("main.mqttUrl")
val mqttClientId: String = conf.getString("main.mqttClientId")
val mqttTopic: String = conf.getString("main.mqttTopic")
val keyStorePassword: String = conf.getString("main.keyStorePassword")
val keyStorePath: String = conf.getString("main.keyStorePath")
val dtlabIngestUris: List[String] =
conf.getString("main.dtlabIngestUris").split(' ').toList
import scala.concurrent.duration._
val webhookTimeoutSeconds: Duration =
conf.getInt("main.webhookTimeoutSeconds").seconds
val telemetryContentType: ContentType.NonBinary =
conf.getString("main.telemetryContentType") match {
case "csv" => ContentTypes.`text/csv(UTF-8)`
case "text" => ContentTypes.`text/plain(UTF-8)`
case _ => ContentTypes.`application/json`
}
def requestDuration: Duration = {
val t = "120 seconds"
Duration(t)
}
implicit def requestTimeout: Timeout = {
val d = requestDuration
FiniteDuration(d.length, d.unit)
}
val healthToleranceSeconds: Int =
conf.getString("main.healthToleranceSeconds").toInt
val observer: ActorRef = actorSystem.actorOf(Props[Observer], "observer")
}
|
const fs = require('fs');
const path = require('path');
const checkDirectory = (src, dst, callback) => {
fs.access(dst, fs.constants.F_OK, err => {
if (err) {
fs.mkdirSync(dst);
callback(src, dst);
} else {
callback(src, dst);
}
});
};
const copy = (src, dst) => {
const paths = fs.readdirSync(src); // 同步读取当前目录
paths.forEach(pathname => {
const _src = `${src}/${pathname}`;
const _dst = `${dst}/${pathname}`;
fs.stat(_src, (err, stats) => {
// stats 该对象 包含文件属性
if (err) throw err;
if (stats.isFile()) {
// 如果是个文件则拷贝
const readable = fs.createReadStream(_src); // 创建读取流
const writable = fs.createWriteStream(_dst); // 创建写入流
readable.pipe(writable);
} else if (stats.isDirectory()) {
// 是目录则 递归
checkDirectory(_src, _dst, copy);
}
});
});
};
const SOURCES_DIRECTORY = path.resolve(__dirname, '../../src/dist'); // 源目录
const DEST_DIRECTORY = path.resolve(
__dirname,
'../../SketchTemplate1.sketchplugin/Contents/Resources/_webpack_resources',
);
checkDirectory(SOURCES_DIRECTORY, DEST_DIRECTORY, copy);
|
class UpdateConfigModules < ActiveRecord::Migration
def self.up
add_index :config_modules, :smoke_test_id
add_index :config_modules, :type
add_index :config_modules, [:type, :smoke_test_id]
end
def self.down
remove_index :config_modules, :smoke_test_id
remove_index :config_modules, :type
remove_index :config_modules, [:type, :smoke_test_id]
end
end
|
#ifndef H_LINGO_PLATFORM_ARCHITECTURE
#define H_LINGO_PLATFORM_ARCHITECTURE
// This header detects the processor architecture
// Generates a compile error when the architecture is not detected
// Processor types
#define LINGO_ARCHITECTURE_X86 0x0001
#define LINGO_ARCHITECTURE_X64 0x0002
// Endianness
#define LINGO_ARCHITECTURE_BIG_ENDIAN 0x0001
#define LINGO_ARCHITECTURE_LITTLE_ENDIAN 0x0002
// Detect architecture
#ifndef LINGO_ARCHITECTURE
#if defined(__i386) || defined(__i386__) || defined(_M_IX86) || defined(_X86_)
#define LINGO_ARCHITECTURE LINGO_ARCHITECTURE_X86
#elif defined(__x86_64) || defined(__x86_64__) || defined(__amd64) || defined(__amd64__) || defined(_M_AMD64) || defined(_M_X64)
#define LINGO_ARCHITECTURE LINGO_ARCHITECTURE_X64
#endif
#endif
#if !defined(LINGO_ARCHITECTURE)
#error Unable to detect processor architecture
#endif
// Detect architecture bits
#ifndef LINGO_ARCHITECTURE_BITS
#if LINGO_ARCHITECTURE == LINGO_ARCHITECTURE_X86
#define LINGO_ARCHITECTURE_BITS 32
#endif
#if LINGO_ARCHITECTURE == LINGO_ARCHITECTURE_X64
#define LINGO_ARCHITECTURE_BITS 64
#endif
#endif
#ifndef LINGO_ARCHITECTURE_BYTES
#if LINGO_ARCHITECTURE_BITS == 8
#define LINGO_ARCHITECTURE_BYTES 1
#elif LINGO_ARCHITECTURE_BITS == 16
#define LINGO_ARCHITECTURE_BYTES 2
#elif LINGO_ARCHITECTURE_BITS == 32
#define LINGO_ARCHITECTURE_BYTES 4
#elif LINGO_ARCHITECTURE_BITS == 64
#define LINGO_ARCHITECTURE_BYTES 8
#endif
#endif
#if !defined(LINGO_ARCHITECTURE_BITS) || !defined(LINGO_ARCHITECTURE_BYTES)
#error Unable to detect processor architecture bits
#endif
// Detect architecture endianness
#ifndef LINGO_ARCHITECTURE_ENDIANNESS
#if LINGO_ARCHITECTURE == LINGO_ARCHITECTURE_X86 || LINGO_ARCHITECTURE == LINGO_ARCHITECTURE_X64
#define LINGO_ARCHITECTURE_ENDIANNESS LINGO_ARCHITECTURE_LITTLE_ENDIAN
#endif
#endif
#if !defined(LINGO_ARCHITECTURE_ENDIANNESS)
#error Unable to detect processor architecture endianness
#endif
#endif |
#! /bin/sh
export KSROOT=/koolshare
source $KSROOT/scripts/base.sh
eval `dbus export webrecord_`
cp -rf /tmp/webrecord/init.d/* $KSROOT/init.d/
cp -rf /tmp/webrecord/scripts/* $KSROOT/scripts/
cp -rf /tmp/webrecord/webs/* $KSROOT/webs/
cp /tmp/webrecord/uninstall.sh $KSROOT/scripts/uninstall_webrecord.sh
chmod +x $KSROOT/scripts/webrecord_*
dbus set softcenter_module_webrecord_description=查看网址和搜索记录
dbus set softcenter_module_webrecord_install=1
dbus set softcenter_module_webrecord_name=webrecord
dbus set softcenter_module_webrecord_title="上网记录"
dbus set softcenter_module_webrecord_version=0.1
sleep 1
rm -rf /tmp/webrecord >/dev/null 2>&1
|
package minimumcost_spanning_tree;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.PriorityQueue;
import java.util.StringTokenizer;
/**
*
* @author exponential-e
* 백준 10021번: Watering the Fields
*
* @see https://www.acmicpc.net/problem/10021
*
*/
public class Boj10021 {
private static int[] parent;
private static PriorityQueue<Node> pq = new PriorityQueue<>();
private static class Node implements Comparable<Node>{
int node1;
int node2;
int cost;
public Node(int node1, int node2, int cost) {
this.node1 = node1;
this.node2 = node2;
this.cost = cost;
}
@Override
public int compareTo(Node n) {
return this.cost - n.cost;
}
}
private static class Coordinate {
int x;
int y;
public Coordinate(int x, int y) {
this.x = x;
this.y = y;
}
}
public static void main(String[] args) throws Exception {
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
int N = Integer.parseInt(st.nextToken());
int C = Integer.parseInt(st.nextToken());
init(N);
Coordinate[] coors = new Coordinate[N];
for(int i = 0; i < N; i++) {
st = new StringTokenizer(br.readLine());
coors[i] = new Coordinate(Integer.parseInt(st.nextToken()), Integer.parseInt(st.nextToken()));
}
graphModeling(coors, C);
System.out.println(MST());
}
/**
*
* Graph modeling
*
* line 77: except cheap cost.
*
* @param c
* @param limit
*/
private static void graphModeling(Coordinate[] c, int limit) {
for(int i = 0; i < c.length; i++) {
for(int j = i + 1; j < c.length; j++) {
int cost = euclideanDistance(c[i], c[j]);
if(cost < limit) continue;
pq.offer(new Node(i, j, cost));
}
}
}
/**
*
* MST
*
* line 103 ~ 104: if graph has more than two groups, then -1.
*
* @return
*/
private static int MST() {
int cost = 0;
while(!pq.isEmpty()) {
Node current = pq.poll();
if(merged(current.node1, current.node2)) continue;
cost += current.cost;
}
int set = 0;
for(int i = 0; i < parent.length; i++) {
if(parent[i] < 0) set++;
}
return set > 1 ? -1: cost;
}
private static int find(int x){
if(parent[x] < 0) return x;
return parent[x] = find(parent[x]);
}
private static boolean merged(int x, int y) {
x = find(x);
y = find(y);
if(x == y) return true;
if(parent[x] < parent[y]) {
parent[x] += parent[y];
parent[y] = x;
}
else {
parent[y] += parent[x];
parent[x] = y;
}
return false;
}
private static void init(int n) {
parent = new int[n];
for(int i = 0; i < n; i++) {
parent[i] = -1;
}
}
private static int euclideanDistance(Coordinate c1, Coordinate c2) {
return (c1.x - c2.x) * (c1.x - c2.x) + (c1.y - c2.y) * (c1.y - c2.y);
}
}
|
<gh_stars>0
class CreateRegulars < ActiveRecord::Migration[5.2]
def change
create_table :regulars do |t|
t.references :tender, index: true
t.references :customer, index: true
t.timestamps
end
add_foreign_key :regulars, :users, column: :tender_id, primary_key: :id
add_foreign_key :regulars, :users, column: :customer_id, primary_key: :id
end
end
|
#!/bin/bash
FILES=${1:-$(find ../rodinia -type f -name "*.spv")}
# Check all files
for FILE in $FILES; do
# Assemble
java -jar ../../dist/spirv-beehive-toolkit.jar -d $FILE -o proto/out.spvasm
java -jar ../../dist/spirv-beehive-toolkit.jar -d --tool asm -o proto/out.spv proto/out.spvasm
# Validate
if spirv-val proto/out.spv; then
echo "$FILE success"
else
echo "$FILE fail"
fi
done
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.