text
stringlengths 1
1.05M
|
|---|
<reponame>tryond/test_artifact_20
package com.plugra.view;
public interface MouseTypeViewListener {
public void mouseTypeSelected(String mouseType);
}
|
import style from './style.module.css'
const NotFound = () => {
return (
<div className={style.wrap}>404 NotFound</div>
)
}
export default NotFound
|
import re
def extract_author_info(code):
author_info = {}
author_name = re.search(r'author="<(.*?)>"', code)
author_email = re.search(r'author_email="<(.*?)>"', code)
if author_name and author_email:
author_info['author'] = author_name.group(1)
author_info['author_email'] = author_email.group(1)
return author_info
|
#!/bin/bash
# Script can be ONLY included by "source" command.
[[ -z "$BASH" || (-n "$BASH_LINENO" && BASH_LINENO[0] -le 0) ]] && return
source '/bin/bash_tacklelib' || exit $?
tkl_include '__init__.sh' || tkl_abort_include
tkl_include "$TACKLELIB_BASH_ROOT/tacklelib/testlib.sh" || tkl_abort_include
function TestUserModuleInit()
{
TEST_SOURCES=()
TEST_FUNCTIONS=()
TEST_VARIABLES=(CWD "$TESTS_PROJECT_ROOT/unit/03_load_config" TEST_DATA_DIR "$TESTS_PROJECT_ROOT/unit/03_load_config/data")
}
function TestUserModuleExit() { :; }
function TestUserInit()
{
tkl_convert_native_path_to_backend "$CWD"
CWD="$RETURN_VALUE"
}
function TestUserExit() { :; }
|
import React, { useState, useEffect } from 'react';
import './info-local.css';
import MapaLocal from './mapa-local';
import { navigate } from 'hookrouter';
import Button from 'react-bootstrap/Button';
import axios from 'axios';
import config from './config';
import ModalDescricaoLocal from './modal-descricao-local';
import Galeria from './galeria';
function InfoLocal(props) {
const [dadosLocal, setDadosLocal] = useState();
useEffect(() => {
async function obterDadosLocal() {
try {
const dados = await axios.get(
`${config.API_URL_BASE}info-local/${props.latlng}?apiKey=${config.API_KEY}`);
setDadosLocal(dados.data);
} catch(error) {
alert('Erro obtendo dados.');
navigate('/');
}
}
if (!dadosLocal) {
obterDadosLocal();
}
}, [dadosLocal, props.latlng]);
return (
<>
<div className="div-botao-voltar">
<Button
variant="secondary"
onClick={() => navigate('/')}>
<span className="oi oi-arrow-thick-left"></span>
</Button>
</div>
{dadosLocal && <MapaLocal latlng={props.latlng} nome={dadosLocal.nome} />}
{dadosLocal && <ModalDescricaoLocal
nome={dadosLocal.nome}
descricao={dadosLocal.descricao} />}
{dadosLocal && <Galeria imagens={dadosLocal.imagens} />}
</>
);
}
export default InfoLocal;
|
<reponame>liaoyinglong/kkt<filename>example/rematch/src/utils/request.js
import axios from 'axios';
import { splitUrl } from './utils.js';
// Get the current location.
// const location = history.location;
const codeMessage = {
200: '服务器成功返回请求的数据。',
201: '新建或修改数据成功。',
202: '一个请求已经进入后台排队(异步任务)。',
204: '删除数据成功。',
400: '发出的请求有错误,服务器没有进行新建或修改数据的操作。',
401: '用户没有权限(令牌、用户名、密码错误)。',
403: '用户得到授权,但是访问是被禁止的。',
404: '发出的请求针对的是不存在的记录,服务器没有进行操作。',
406: '请求的格式不可得。',
410: '请求的资源被永久删除,且不会再得到的。',
422: '当创建一个对象时,发生一个验证错误。',
500: '服务器发生错误,请检查服务器。',
502: '网关错误。',
503: '服务不可用,服务器暂时过载或维护。',
504: '网关超时。',
};
/**
* Requests a URL, returning a promise.
*
* @param {string} url The URL we want to request
* @param {object} [options] The options we want to pass to "fetch"
* @return {object} An object containing either "data" or "err"
*/
export default function request(url, options = {}) {
const method = options.method || 'GET';
const newOptions = {
url,
method,
data: options.body,
headers: {
'Content-Type': 'application/json; charset=utf-8',
Accept: 'application/json',
},
};
if (/(GET)/.test(method)) {
newOptions.url = splitUrl(url, { ...options.body });
delete newOptions.body;
}
return axios.request(newOptions)
.then((response) => {
return response.data;
})
.catch((err) => {
const response = err.response;
if (response.status >= 200 && response.status < 300) {
return response;
}
const errortext = codeMessage[response.status] || response.statusText;
// Notification.error({
// message: '错误提示:',
// description: (response.data && response.data.info) || '没有错误提示',
// });
const error = new Error(errortext);
error.name = response.status;
error.response = response;
if (response.data) {
return response.data;
}
throw error;
});
}
// /**
// * Requests a URL, returning a promise.
// *
// * @param {string} url The URL we want to request
// * @param {object} [options] The options we want to pass to "fetch"
// * @return {object} An object containing either "data" or "err"
// */
// export default function request(url, options) {
// const defaultOptions = {};
// const newOptions = { ...defaultOptions, ...options };
// newOptions.headers = {
// Accept: 'application/json',
// 'Content-Type': 'application/json',
// ...newOptions.headers,
// };
// newOptions.body = JSON.stringify(newOptions.body);
// return fetch(url, newOptions)
// .then(parseJSON)
// .then((data) => {
// return data;
// })
// .catch((err) => {
// // console.log('请求错误: ', err);
// return err;
// });
// // https://www.npmjs.com/package/axios
// }
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-only-pad/7-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-only-pad/7-1024+0+512-N-VB-ADJ-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function remove_all_but_nouns_verbs_and_adjectives_first_two_thirds_sixth --eval_function penultimate_sixth_eval
|
<gh_stars>0
import React from "react"
import Header from "../Header"
import { rhythm, scale } from "../../utils/typography"
const Layout = (props): JSX.Element => {
const { location, children } = props
return (
<div>
<Header location={location} />
{children}
</div>
)
}
export default Layout
|
#!/bin/sh
EXPECTED_SIGNATURE="$(curl -L https://composer.github.io/installer.sig)"
# Original line (with wget):
# EXPECTED_SIGNATURE="$(wget -q -O - https://composer.github.io/installer.sig)"
php -r "copy('https://getcomposer.org/installer', 'composer-setup.php');"
ACTUAL_SIGNATURE="$(php -r "echo hash_file('SHA384', 'composer-setup.php');")"
if [[ "$EXPECTED_SIGNATURE" != "$ACTUAL_SIGNATURE" ]]
then
>&2 echo 'ERROR: Invalid installer signature'
rm composer-setup.php
exit 1
fi
php composer-setup.php --quiet
RESULT=$?
rm composer-setup.php
exit ${RESULT}
|
<filename>02-controller/routing/src/controller/03-restful-controller.ts<gh_stars>1-10
/**
* With combination of route configurations we can create restful route
*
* POST /users
* GET /users?limit&offset
* GET /users/:id
* PUT /users/:id
* PATCH /users/:id
* DELETE /users/:id
*/
import { meta, route } from "plumier";
export class User {
@meta.property()
userName:string
@meta.property()
name:string
}
export class UsersController {
// POST /users
@route.post("")
save(data:User){
return {}
}
// GET /users
@route.get("")
list(limit:number, offset:number){
return {}
}
// GET /users/:id
@route.get(":id")
get(id:string){
return {}
}
// PUT /users/:id
@route.put(":id")
replace(id:string){
return {}
}
// PATCH /users/:id
@route.patch(":id")
modify(id:string){
return {}
}
// DELETE /users/:id
@route.delete(":id")
delete(id:string){
return {}
}
}
|
<reponame>blackpc/wanderer<filename>src/Wandering.cpp
/**
* Filename: Wandering.cpp
* Author: <NAME>
* Date: Nov 25, 2014
*
* The MIT License (MIT)
*
* Copyright (c) 2014
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include <wanderer/Wandering.h>
Wandering::Wandering(const string& robotId,
const string& baseFrameId, bool enabled)
: robotId_(robotId), baseFrameId_(baseFrameId),
enabled_(enabled), publishStop_(false), preferRight_(true)
{
randomSteer_ = false;
randomSteerTime_ = ros::Time::now();
trajectoryMatcher_ = new SimpleTrajectoryMatcher();
preferSideChangeTime_ = ros::Time::now();
}
Wandering::~Wandering() {
delete trajectoryMatcher_;
}
TrajectoryMatch::Ptr Wandering::chooseBestTrajectory(CostMap& costMap) {
TrajectoryMatch::Ptr frontMatch =
trajectoryMatcher_->match(costMap, frontTrajectory_);
TrajectoryMatch::SetPtr leftMatches =
trajectoryMatcher_->match(costMap, leftTrajectories_);
TrajectoryMatch::SetPtr rightMatches =
trajectoryMatcher_->match(costMap, rightTrajectories_);
TrajectoryMatch::Ptr rightInPlaceMatch =
trajectoryMatcher_->match(costMap, rightInPlaceTrajectory_);
TrajectoryMatch::Ptr leftInPlaceMatch =
trajectoryMatcher_->match(costMap, leftInPlaceTrajectory_);
TrajectoryMatch::SetPtr tempSet(new TrajectoryMatch::Set());
bool frontBlocked = false;
if (frontMatch->isBlocked() && leftMatches->begin()->get()->isBlocked() && rightMatches->begin()->get()->isBlocked())
frontBlocked = true;
if ( (ros::Time::now() - preferSideChangeTime_).toSec() > 60) {
preferSideChangeTime_ = ros::Time::now();
preferRight_ = !preferRight_;
}
if (randomSteerTime_ < ros::Time::now()) {
ros::Time nextRandomSteerUpdate =
ros::Time::now() + (randomSteer_ ? ros::Duration(40) : ros::Duration(15) );
randomSteerTime_ = nextRandomSteerUpdate;
randomSteer_ = !randomSteer_;
ROS_INFO("Random steer = %s, Prefer right = %s", randomSteer_ ? "True" : "False", preferRight_ ? "True" : "False");
}
if (frontBlocked) {
return rightInPlaceMatch;
}
else if (!frontMatch->isBlocked() &&
!leftMatches->begin()->get()->isBlocked() &&
!rightMatches->begin()->get()->isBlocked())
{
// All front trajectories are clear
if (randomSteer_)
if (preferRight_)
return *rightMatches->begin();
else
return *leftMatches->begin();
return frontMatch;
}
else if (!frontMatch->isBlocked()) {
/**
* Front is free, choose it
*/
return frontMatch;
} else {
/**
* Both left and right are not blocked,
* so choose preferable side
*/
if (!leftMatches->begin()->get()->isBlocked() &&
!rightMatches->begin()->get()->isBlocked() &&
leftMatches->begin()->get()->getScore() ==
rightMatches->begin()->get()->getScore())
{
if (preferRight_)
return *rightMatches->begin();
else
return *leftMatches->begin();
}
tempSet->insert(*rightMatches->begin());
tempSet->insert(*leftMatches->begin());
return *tempSet->begin();
}
}
void Wandering::spin() {
ros::NodeHandle nodePrivate("~");
srand(time(0));
LaserScanDataSource* laserScanDataSource = new LaserScanDataSource(nodePrivate, "/scan");
CostMap costMap(laserScanDataSource, new RosParametersProvider());
/**
* Publishers
*/
ros::Publisher mapPublisher = nodePrivate.advertise<nav_msgs::OccupancyGrid>("costmap", 1, false);
ros::Publisher pathPublisher = nodePrivate.advertise<nav_msgs::Path>("path", 1, false);
ros::Publisher bestPathPublisher = nodePrivate.advertise<nav_msgs::Path>("path_best", 1, false);
ros::Publisher ackermannPublisher = nodePrivate.advertise<ackermann_msgs::AckermannDriveStamped>("/ackermann_cmd", 1, false);
ros::Subscriber stateSubscriber = nodePrivate.subscribe(string("/decision_making/" + robotId_ + "/events"), 1, &Wandering::stateCallback, this);
createTrajectories(2.5, 0.2);
ros::Rate rate(5);
while (ros::ok()) {
ros::spinOnce();
if (!enabled_) {
if (publishStop_) {
ackermannPublisher.publish(ackermann_msgs::AckermannDriveStamped());
publishStop_ = 0;
}
rate.sleep();
continue;
}
/**
* Evaluate trajectories
*/
TrajectoryMatch::Ptr bestMatch = chooseBestTrajectory(costMap);
/**
* Publish all paths
*/
pathPublisher.publish(frontTrajectory_->getPath(true, baseFrameId_));
for (int i = 0; i < leftTrajectories_->size(); ++i) {
pathPublisher.publish((*leftTrajectories_)[i]->getPath(true, baseFrameId_));
}
for (int i = 0; i < rightTrajectories_->size(); ++i) {
pathPublisher.publish((*rightTrajectories_)[i]->getPath(true, baseFrameId_));
}
/**
* Publish best matched trajectory
*/
bestPathPublisher.publish(bestMatch->getTrajectory()->getPath(true, baseFrameId_));
/**
* Publish velocity command
*/
ackermannPublisher.publish(bestMatch->getTrajectory()->getMotionModelAs<AckermannModel>()->getAckermannMessage());
/**
* Publish local cost map
*/
mapPublisher.publish(costMap.getOccupancyGrid());
rate.sleep();
}
}
void Wandering::createTrajectories(double simulationTime, double granularity) {
TrajectorySimulator trajectorySimulator(simulationTime, granularity);
leftTrajectories_ = Trajectory::VectorPtr(new Trajectory::Vector());
rightTrajectories_ = Trajectory::VectorPtr(new Trajectory::Vector());
Trajectory::Ptr trajectory;
/**
* Front
*/
frontTrajectory_ = trajectorySimulator.simulate(new AckermannModel(0.2, 0.5, 0));
frontTrajectory_->setWeight(1.0);
/**
* Left
*/
trajectory = trajectorySimulator.simulate(new AckermannModel(0.2, 0.5, 0.161799388));
trajectory->setWeight(0.5);
leftTrajectories_->push_back(trajectory);
trajectory = trajectorySimulator.simulate(new AckermannModel(0.2, 0.5, 0.261799388));
trajectory->setWeight(0.4);
leftTrajectories_->push_back(trajectory);
/**
* Right
*/
trajectory = trajectorySimulator.simulate(new AckermannModel(0.2, 0.5, -0.261799388));
trajectory->setWeight(0.4);
rightTrajectories_->push_back(trajectory);
trajectory = trajectorySimulator.simulate(new AckermannModel(0.2, 0.5, -0.161799388));
trajectory->setWeight(0.5);
rightTrajectories_->push_back(trajectory);
/**
* In place
*/
rightInPlaceTrajectory_ = trajectorySimulator.simulate(new AckermannModel(0.2, 0.0, -1.0));
rightInPlaceTrajectory_->setWeight(1.0);
leftInPlaceTrajectory_ = trajectorySimulator.simulate(new AckermannModel(0.2, 0.0, 1.0));
leftInPlaceTrajectory_->setWeight(1.0);
}
void Wandering::stateCallback(const std_msgs::String::Ptr& message) {
if (message->data == "RESUME") {
enabled_ = true;
publishStop_ = false;
ROS_INFO("Started!");
} else if (message->data == "PAUSE") {
if (enabled_)
publishStop_ = true;
enabled_ = false;
ROS_INFO("Stoped!");
}
}
|
<filename>typekit/typekit.py
import copy
from .request import make_request
from .exceptions import NoKitFoundException, NoFontFoundException
import pdb
class Typekit(object):
use_ssl = True
host = 'typekit.com/api/v1/json/'
def __init__(self, **kwargs):
self.use_ssl = kwargs.get('use_ssl', self.use_ssl)
self.host = kwargs.get('host', self.host)
self.scheme = self.use_ssl and 'https://' or 'http://'
self.default_domains = ['localhost']
self.api_token = kwargs.get('api_token', None)
if 'api_token' not in kwargs:
raise TypeError('The Typekit API Token must be provided')
def list_kits(self):
"""
Returns a json representation of the kits associated with this
api token
"""
return make_request('GET', self.__build_url(method='list')).get('kits')
def get_kit(self, kit_id):
"""
Returns an existing kit of given id (including unpublished ones)
"""
url = self.__build_url('get', kit_id=kit_id)
kit = make_request('GET', url)
if 'errors' in kit:
raise NoKitFoundException(value='Kit with id "{}" does not exist'.format(kit_id))
return kit
def __modify_kit(self, kit_id=None, name=None, domains=None, families=None, badge=False):
"""
Updates or creates a new kit.
Kwargs contains the parameters:
name (string),
domains (list/string),
families
list of dictionaries with key : values
- 'id' : family id (string)
- (optional) 'variations' : comma separated variations (string)
If successful, returns True. Else, returns False
"""
params = {}
if name is not None:
params['name'] = name
if domains is not None:
params['domains[]'] = self.__get_param_type_list(domains)
else:
params['domains[]'] = self.default_domains
if families is not None:
for idx, family in enumerate(families):
if 'id' not in family:
raise TypeError('the "id" key is required for families')
params['families[{}][id]'.format(idx)] = family.get('id')
if 'variations' in family:
params['families[{}][variations]'.format(idx)] = family.get('variations')
if not badge:
params['badge'] = 'false'
else:
params['badge'] = 'true'
if kit_id is None:
url = self.__build_url('create')
else:
url = self.__build_url('update', kit_id=kit_id)
return make_request('POST', url, params)
def update_kit(self, kit_id, name=None, domains=None, families=None, badge=None):
"""
Completely replaces the existing value with the new value during POST request (Typekit spec)
"""
return self.__modify_kit(kit_id=kit_id, name=name, domains=domains, families=families, badge=badge)
def create_kit(self, name, domains, families=None, badge=False):
"""
Creates an existing kit
"""
return self.__modify_kit(name=name, domains=domains, families=families, badge=badge)
def remove_kit(self, kit_id):
"""
Removes an existing kit.
"""
url = self.__build_url('delete', kit_id=kit_id)
return make_request('DELETE', url, {})
def publish_kit(self, kit_id):
"""
Publishes an existing kit.
"""
url = self.__build_url('publish', kit_id=kit_id)
return make_request('POST', url, {})
def get_font_family(self, font):
"""
Retrieves font information from Typekit.
Can use either font_slug or font_id. The font slug must
be a slug for it to work, so slugify your input before using it.
"""
url = self.__build_url('families', font=font)
font_response = make_request('GET', url)
if 'errors' in font_response:
raise NoFontFoundException('Font "{}" does not exist'.format(font))
return font_response
def get_font_variations(self, font):
"""
Retrieves all variations of the font family.
If font does not exist, returns False
"""
font_json = self.get_font_family(font)
variations = []
for var in font_json.get('family').get('variations'):
variations.append(var.get('fvd'))
return variations
def kit_contains_font(self, kit_id, font):
"""
Checks to see if a font exists in a kit.
If it does, returns True.
If the kit does not exist or the font does not exist, returns None.
Else, return False.
"""
kit_fonts = self.get_kit_fonts(kit_id)
if len(kit_fonts) == 0:
return False
font = self.get_font_family(font)
if font.get('family').get('id') in kit_fonts:
return True
return False
def kit_add_font(self, kit_id, font, variations=None):
"""
Adds a font to a given kit.
Font is a string.
Variations is an optional tuple. Add only valid variations. If
variations is not given, adds all variations (default behavior).
If font exists in kit, returns without doing anything.
Else, adds font to kit, returns.
"""
if self.kit_contains_font(kit_id, font):
print 'Font already in kit'
return
new_font_family = {'id' : font}
# add only the valid variations
if variations is not None:
font_avail_vars = self.get_font_variations(font)
new_vars = []
for var in variations:
if var in font_avail_vars:
new_vars.append(var)
if len(new_vars) > 0:
new_font_family['variations'] = ','.join(new_vars)
kit = self.get_kit_vals(kit_id)
kit[3].append(new_font_family)
self.update_kit(kit_id, name=kit[0], domains=kit[1], badge=kit[2], families=kit[3])
def kit_remove_font(self, kit_id, font):
"""
Removes a font from a given kit.
Font is a string.
If font does not exist in kit, returns without doing anything.
Else, removes font to kit, returns.
"""
if not self.kit_contains_font(kit_id, font):
print 'Font not in kit. Nothing to remove.'
return
kit = self.get_kit_vals(kit_id)
font_data = self.get_font_family(font)
font_id = font_data.get('family').get('id')
for idx, family in enumerate(kit[3]):
if font_id == family.get('id'):
kit[3].pop(idx)
self.update_kit(kit_id, name=kit[0], domains=kit[1], badge=kit[2], families=kit[3])
def get_kit_vals(self, kit_id):
"""
Retrieves kit vals in a list of format: [name, domains, families, badge]
"""
kit = self.get_kit(kit_id).get('kit')
families = []
for f in kit.get('families'):
family_dict = {
'id' : f.get('id'),
'variations' : ','.join(f.get('variations'))
}
families.append(family_dict)
return [kit.get('name'), kit.get('domains'), kit.get('badge'), families]
def get_kit_fonts(self, kit_id):
"""
Retrieves a list of font ids in a given kit
Returns None if kit does not exist
Returns an empty list if no fonts in kit
"""
kit = self.get_kit(kit_id)
return [family.get('id') for family in kit.get('kit').get('families')]
def __build_url(self, method, kit_id=None, font=None):
url = self.scheme + self.host
if method == 'list' or method == 'create':
url += 'kits'
if method == 'get' or method == 'update' or method == 'delete':
url += 'kits/{}'.format(kit_id)
if method == 'publish':
url += 'kits/{}/publish'.format(kit_id)
if method == 'families':
url += 'families/{}'.format(font)
url += '?token={}'.format(self.api_token)
return url
def __get_param_type_list(self, param, param_name=None):
if isinstance(param, list):
return param
elif isinstance(param, basestring):
return [param]
else:
if param_name is not None:
error_message = '"{}" parameter must be of type list'.format(param_name)
else:
error_message = 'The parameter must be of type list'
raise TypeError(error_message)
|
package com.bones.si.jdbc.load
import java.sql.{Connection, ResultSet}
import com.bones.si.jdbc.{CrossReference, Deferrability, UpdateDeleteRule}
object LoadCrossReference extends DefaultLoader[CrossReference] {
override protected def loadFromQuery(databaseQuery: DatabaseQuery, con: Connection): Stream[ResultSet] = {
val queryParams = Retrieve.databaseQueryToHierarchyQuery(databaseQuery).toStream
queryParams.map(param =>
con.getMetaData.getCrossReference(
param._1.orNull,
param._2.orNull,
param._3.orNull,
param._1.orNull,
param._2.orNull,
param._3.orNull)
)
}
protected override def extractRow(rs: ResultSet): CrossReference = {
val updateRuleId = rs.getInt("UPDATE_RULE")
val updateRule = UpdateDeleteRule
.findById(updateRuleId)
.getOrElse(
throw new MissingDataException(s"could not find UpdateDeleteRule by id: ${updateRuleId}"))
val deleteRuleId = rs.getInt("DELETE_RULE")
val deleteRule = UpdateDeleteRule
.findById(updateRuleId)
.getOrElse(
throw new MissingDataException(s"could not find UpdateDeleteRule by id: ${deleteRuleId}"))
val deferrabilityId = rs.getInt("DEFERRABILITY")
val deferrability = Deferrability
.findById(deferrabilityId)
.getOrElse(
throw new MissingDataException(s"could not find Deferrability by id: ${deferrabilityId}"))
CrossReference(
Option(rs.getString("PKTABLE_CAT")),
Option(rs.getString("PKTABLE_SCHEM")),
req(rs.getString("PKTABLE_NAME")),
req(rs.getString("PKCOLUMN_NAME")),
Option(rs.getString("FKTABLE_CAT")),
Option(rs.getString("FKTABLE_SCHEM")),
req(rs.getString("FKTABLE_NAME")),
req(rs.getString("FKCOLUMN_NAME")),
req(rs.getShort("KEY_SEQ")),
updateRule,
deleteRule,
Option(rs.getString("FK_NAME")),
Option(rs.getString(("PK_NAME"))),
deferrability
)
}
}
|
#!/bin/bash
function pause(){
read -p "Presiona enter cuando acabe ese pedo"
}
handle_error() {
echo "FAIL: line $1, exit code $2"
exit 1
}
trap 'handle_error $LINENO $?' ERR
echo "Vamos a settear madres..."
echo ""
# Cambiar nombre de la compu
function nombre(){
read -e -p "¿Cómo se llama esta compu? " COMPUTAR_NAME
COMPUTAR_SUBNET_NAME=`echo $COMPUTAR_NAME | iconv -f utf8 -t us-ascii//TRANSLIT//IGNORE | tr -cd '[[:alnum:]._-]' | awk '{print tolower($0)}'`
read -e -p "¿Y, de cariño? (${COMPUTAR_SUBNET_NAME}.local) " SUBNET_NAME
if [ -n "$SUBNET_NAME" ]; then
COMPUTAR_SUBNET_NAME=$SUBNET_NAME
fi
echo "$COMPUTAR_NAME ($COMPUTAR_SUBNET_NAME)"
sudo systemsetup -setcomputername $COMPUTAR_NAME
sudo systemsetup -setlocalsubnetname $COMPUTAR_SUBNET_NAME
}
nombre
echo "Autorizando a Jimi y a Rob descagar tu sistema via SSH"
sudo systemsetup -setremotelogin on
echo "Configurando valores de Energía"
# conectada
sudo /usr/bin/pmset -c sleep 0
sudo /usr/bin/pmset -c displaysleep 60
# batería
sudo /usr/bin/pmset -b sleep 60
sudo /usr/bin/pmset -b displaysleep 15
# auto-restart after power loss
sudo systemsetup -setrestartfreeze on
sudo systemsetup -setrestartpowerfailure on
sudo systemsetup -setwaitforstartupafterpowerfailure 0
echo "Prendiendo Firewall"
sudo /usr/bin/defaults write /Library/Preferences/com.apple.alf globalstate -int 1
sudo /usr/bin/defaults write /Library/Preferences/com.apple.alf stealthenabled -int 1
echo "Creando directorios en /usr/local"
sudo mkdir -v /usr/local
sudo chown -R rob:staff /usr/local
mkdir -v /usr/local/bin
mkdir -v /usr/local/var
mkdir -v /usr/local/gems
mkdir -v /usr/local/npm
function sublime_text() {
echo "Descargando SublimeText 3"
open "http://www.sublimetext.com/3"
echo "Copiando licencia de ST al clipboard"
cat private/sublime.st-license | pbcopy
open /Applications/SublimeText.app
pause
echo "Copiando instalación de Package Manager a clipboard"
echo "import urllib.request,os,hashlib; h = '7183a2d3e96f11eeadd761d777e62404' + 'e330c659d4bb41d3bdf022e94cab3cd0'; pf = 'Package Control.sublime-package'; ipp = sublime.installed_packages_path(); urllib.request.install_opener( urllib.request.build_opener( urllib.request.ProxyHandler()) ); by = urllib.request.urlopen( 'http://sublime.wbond.net/' + pf.replace(' ', '%20')).read(); dh = hashlib.sha256(by).hexdigest(); print('Error validating download (got %s instead of %s), please try manual install' % (dh, h)) if dh != h else open(os.path.join( ipp, pf), 'wb' ).write(by)
" | pbcopy
pause
echo "Instala estos paquetes:"
echo <<PACKAGES
markdownediting
rsub
nginx
ini
gitgutter
sublimelinter
sublimelinter-php
sublimelinter-jshint
sublimelinter-coffee
sublimelinter-ruby
Theme - Soda
PACKAGES
ST_USER_PREFS="~/Library/Application Support/Packages/User/Preferences.sublime-settings"
echo "Copiando preferencias de ST"
cp -v config/Preferences.sublime-settings $ST_USER_PREFS
echo "Symlinkeando subl"
ln -s "/Applications/Sublime Text.app/Contents/SharedSupport/bin/subl" /usr/local/bin/subl
}
echo "Descargando iTerm"
curl -O -L http://iterm2.com/downloads/stable/iTerm2_v1_0_0.zip
unzip iTerm2_v1_0_0.zip -d /Applications/
rm iTerm2_v1_0_0.zip
echo "Descargando DejaVu Mono Sans"
curl -L http://sourceforge.net/projects/dejavu/files/dejavu/2.34/dejavu-fonts-ttf-2.34.tar.bz2/download >> dejavu.tar.bz2
tar xfz dejavu.tar.bz2
cp dejavu-fonts-ttf-2.34/ttf/*.ttf ~/Library/Fonts
rm -rf dejavu.tar.bz2
rm -rf dejavu-fonts-ttf-2.34
# Dotfiles
echo "Clonando dotfiles"
git clone git@github.com:/unRob/dotfiles.git .dotfiles
echo "Instalando Oh My ZSH"
git clone https://github.com/robbyrussell/oh-my-zsh.git ~/.dotfiles/oh-my-zsh
# curl -L https://raw.github.com/robbyrussell/oh-my-zsh/master/tools/install.sh | ZSH=~/.dotfiles/oh-my-zsh sh
# ZSH
echo "Cambiando el shell a ZSH"
chsh -s `which zsh`
echo "Copiando .zshrc"
ln -s .dotfiles/zshrc.dotfile .zshrc
source .zshrc
echo "Copiando dotfiles"
ln -s .dotfiles/*.dotfile .
zmv '(*).dotfile' '.$1'
echo "Instalando settings de iTerm"
cp .dotfiles/com.googlecode.iterm2.plist ~/Library/Preferences/
defaults read com.googlecode.iterm2
killall cfprefsd
open /Applications/iTerm.app
rm -rf ./private
echo "Listo, ahora corre ./config.sh en iTerm"
read -p "Presiona enter para cerrar este pedo"
osascript -e 'tell application "Terminal" to quit'
|
const createArray = (n) => {
const arr = [];
for (let i = 0; i < n; i++) {
arr.push(Math.floor(Math.random() * n))
}
return arr;
};
let arr = createArray(8000000);
//let arr = [8, 17, 125];
//console.log(arr);
let bucket = [];
for (let i = 0; i < 10; i++) {
bucket[i] = []
}
const radix = (arr, n) => {
let temp = [];
for (let mod = 0; mod < n; mod++) {
for (let i = 0; i < arr.length; i++) {
let sub = (Math.floor(arr[i] / Math.pow(10, mod))) % 10;
bucket[sub].push(arr[i])
}
for (let i = 0; i < 10; i++) {
temp = [...temp, ...bucket[i]];
bucket[i] = []
}
arr = temp;
temp = []
}
return arr;
};
console.time('radix');
const res = radix(arr, 7);
console.log(res);
console.timeEnd('radix');
|
import keras
# Input layer
input_layer = keras.layers.Input(shape=(784,))
# Hidden layer
hidden_layer = keras.layers.Dense(128,
activation='relu')(input_layer)
# Output layer
output_layer = keras.layers.Dense(10,
activation='softmax')(hidden_layer)
# Model
model = keras.Model(inputs=input_layer, outputs=output_layer)
# Compile model
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
|
<reponame>orthoticholdingsinc/comments
$(document).ready(function(){
$('.story-slider').slick({
infinite: true,
arrows: true,
slidesToShow: 2,
centerMode: true,
centerPadding: '0px',
prevArrow: '<i class="fa fa-caret-left slider-prev slick-prev"></i>',
nextArrow: '<i class="fa fa-caret-right slider-next slick-next"></i>',
responsive: [
{
breakpoint: 767,
settings: {
slidesToShow: 1,
}
},
]
});
});
|
<reponame>littlemole/MTL
#include "MTL/ole/cp.h"
|
#!/usr/bin/env bash
#
# vim:ft=sh:tabstop=4:shiftwidth=4:softtabstop=4:noexpandtab
#
# Copyright (c) 2010-2017 Wael Nasreddine <wael.nasreddine@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307,
# USA.
#
set -euo pipefail
source @out_dir@/lib/list-workspaces.sh
if [[ -z "${*}" ]]; then
# print the name of the current workspace
@i3-msg_bin@ -t get_workspaces | @jq_bin@ -r '.[] | if .focused == true then .name else empty end'
else
# switch to the given workspace
@i3-msg_bin@ rename workspace to "${@}" >/dev/null
fi
|
<reponame>bedeljani/Backend_Test
const dotenv = require('dotenv')
dotenv.config()
module.exports = {
development: {
client: 'mysql',
connection: {
host : process.env.DB_HOST,
user : process.env.DB_USER,
password : <PASSWORD>,
database : process.env.DB_NAME
},
debug: true
},
staging: {
client: '',
connection: {
database: '',
user: '',
password: ''
},
pool: {
min: 2,
max: 10
},
migrations: {
tableName: 'knex_migrations'
}
},
production: {
client: '',
connection: {
database: '',
user: '',
password: ''
},
pool: {
min: 2,
max: 10
},
migrations: {
tableName: 'knex_migrations'
}
}
};
|
// file : xsde/cxx/qname.hxx
// author : <NAME> <<EMAIL>>
// copyright : Copyright (c) 2005-2011 Code Synthesis Tools CC
// license : GNU GPL v2 + exceptions; see accompanying LICENSE file
#ifndef XSDE_CXX_QNAME_HXX
#define XSDE_CXX_QNAME_HXX
#include <xsde/cxx/config.hxx>
namespace xsde
{
namespace cxx
{
class qname
{
public:
#ifndef XSDE_EXCEPTIONS
enum error
{
error_none,
error_no_memory
};
#endif
~qname ();
// The default c-tor creates an uninitialized qname. Use
// modifiers to initialize it.
//
qname ();
explicit
qname (char* name);
qname (char* prefix, char* name);
void
swap (qname&);
private:
qname (const qname&);
qname& operator= (const qname&);
public:
char*
prefix ();
const char*
prefix () const;
void
prefix (char*);
#ifndef XSDE_EXCEPTIONS
error
#else
void
#endif
prefix_copy (const char*);
char*
prefix_detach ();
public:
char*
name ();
const char*
name () const;
void
name (char*);
#ifndef XSDE_EXCEPTIONS
error
#else
void
#endif
name_copy (const char*);
char*
name_detach ();
qname*
_clone () const;
#ifndef XSDE_EXCEPTIONS
bool
#else
void
#endif
_copy (qname&) const;
private:
char* prefix_;
char* name_;
};
bool
operator== (const qname&, const qname&);
bool
operator!= (const qname&, const qname&);
}
}
#include <xsde/cxx/qname.ixx>
#endif // XSDE_CXX_QNAME_HXX
|
var AWS = require('aws-sdk');
AWS.config.update({region: 'us-east-2'});
s3 = new AWS.S3({apiVersion: '2006-03-01'});
// var bucketParams = {Bucket: process.argv[2]};
// s3.getBucketAcl(bucketParams, (err, data) => {
// if (err) {
// console.log('Error', err);
// } else {
// console.log('Success', data.Grants);
// }
// });
// var bucketParams = {
// Bucket: process.argv[2],
// ACL: 'public-read-write'
// }
// s3.putBucketAcl(bucketParams, (err, data) => {
// if (err) {
// console.log('Error', err);
// } else {
// console.log('Success', data);
// }
// })
|
import random
def dice_roll(sides):
# generate a random integer from 1 to the number of sides
roll = random.randint(1, sides)
return roll
|
<gh_stars>1-10
package ext
import (
context "golang.org/x/net/context"
mgo "gopkg.in/mgo.v2"
"github.com/go-redis/redis"
"github.com/gocql/gocql"
"github.com/rs/zerolog"
"google.golang.org/grpc"
)
// CassandraUnaryServerInterceptor creates a Cassandra session and add it to the context
func CassandraUnaryServerInterceptor(name string, cfg *gocql.ClusterConfig) grpc.UnaryServerInterceptor {
return func(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) {
s, err := gocql.NewSession(*cfg)
defer s.Close()
if err != nil {
zerolog.Ctx(ctx).Error().Msg(err.Error())
return handler(ctx, req)
}
ctx = WithContextAny(ctx, name, s)
return handler(ctx, req)
}
}
// MongoDBUnaryServerInterceptor creates a MongoDB session and add it to the context
func MongoDBUnaryServerInterceptor(name string, cfg *mgo.DialInfo) grpc.UnaryServerInterceptor {
return func(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) {
s, err := mgo.DialWithInfo(cfg)
defer s.Close()
if err != nil {
zerolog.Ctx(ctx).Error().Msg(err.Error())
return handler(ctx, req)
}
ctx = WithContextAny(ctx, name, s)
return handler(ctx, req)
}
}
// RedisUnaryServerInterceptor wrap redis client to grpc
// Note: Caller is responsile to close the redis connection when its done.
// It is rare to Close a Client, as the Client is meant to be
// long-lived and shared between many goroutines.
func RedisUnaryServerInterceptor(name string, clt *redis.Client) grpc.UnaryServerInterceptor {
return func(ctx context.Context, req interface{}, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (interface{}, error) {
ctx = WithContextAny(ctx, name, clt)
return handler(ctx, req)
}
}
|
#!/bin/bash
# sudo apt install liblzma-dev
if [ ! -d "models" ]
then
mkdir -p models
# GLIDE models, downloaded upfront for docker caching
curl -C - https://openaipublic.blob.core.windows.net/diffusion/dec-2021/base.pt --output models/base.pt
curl -C - https://openaipublic.blob.core.windows.net/diffusion/dec-2021/upsample.pt --output models/upsample.pt
curl -C - https://openaipublic.blob.core.windows.net/diffusion/dec-2021/clip_image_enc.pt --output models/clip_image_enc.pt
curl -C - https://openaipublic.blob.core.windows.net/diffusion/dec-2021/clip_text_enc.pt --output models/clip_text_enc.pt
fi
pip install git+https://github.com/openai/glide-text2im
|
import re
from collections import Counter
def count_words_in_file(file_path, top_n):
with open(file_path, 'r') as file:
text = file.read().lower() # Convert the text to lowercase
words = re.findall(r'\b\w+\b', text) # Extract words using regular expression
stop_words = set(['the', 'is', 'at', 'on', 'in', 'and', 'for', 'of', 'a', 'an']) # Define common stop words
filtered_words = [word for word in words if word not in stop_words] # Remove stop words
word_counts = Counter(filtered_words) # Count occurrences of each word
top_words = word_counts.most_common(top_n) # Get the top N most frequent words
for word, count in top_words:
print(f"{word}: {count}")
# Example usage
count_words_in_file('license.txt', 3)
|
<reponame>zoho/Zoho-CRM-Field-Buddy
package com.zoho.crm_field_buddy;
import android.app.ProgressDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Bitmap;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.text.Editable;
import android.text.TextWatcher;
import android.util.Log;
import android.view.MenuItem;
import android.view.View;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.Toast;
import com.zoho.crm_field_buddy.listActivity.ListViewAdapter;
import com.zoho.crm.library.crud.ZCRMRecord;
import com.zoho.crm.sdk.android.zcrmandroid.common.SDKCommonUtil;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* Created by sruthi-4404 on 08/09/16.
*/
public class JobCardsCreation extends AppCompatActivity
{
private ZCRMRecord zcrmRecord;
private static final int PICK_FROM_FILE=3,PICK_FROM_CAMERA=1,PICK_FROM_GALLERY=2;
private List<String> attachemnts = new ArrayList<>();
private Uri selectedPath;
Button save;
public void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.survey_create);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setTitle(ListViewAdapter.nameClicked);
loadForm();
}
public void loadForm() {
zcrmRecord = new ZCRMRecord("Surveys");
save = (Button) findViewById(R.id.button3);
save.setEnabled(false);
save.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
//save.setText("SAVING..."); //No I18N
APImodeRunner runner = new APImodeRunner();
runner.execute();
}
});
EditText details = (EditText) findViewById(R.id.editText3);
details.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {
}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {
}
@Override
public void afterTextChanged(Editable s) {
zcrmRecord.setFieldValue("Details", s.toString());
}
});
addAttachment();
}
private void addAttachment() {
String[] items = new String[]{"From camera", "From Gallery", "From File Manager"};
ArrayAdapter<String> arrayAdapter = new ArrayAdapter<String>(this, android.R.layout.select_dialog_item, items);
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Upload from");
builder.setAdapter(arrayAdapter, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
if (which == 0) {
Intent intent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
startActivityForResult(intent, PICK_FROM_CAMERA);
dialog.dismiss();
} else if (which == 1) {
Intent intent = new Intent(Intent.ACTION_GET_CONTENT);
intent.setType("image/*");
startActivityForResult(Intent.createChooser(intent, "Complete action using"), PICK_FROM_GALLERY);
} else {
Intent intent = new Intent(Intent.ACTION_GET_CONTENT);
intent.setType("file/*");
startActivityForResult(Intent.createChooser(intent, "Complete action using"), PICK_FROM_FILE);
}
}
});
final AlertDialog dialog = builder.create();
Button addAttachment = (Button) findViewById(R.id.button5);
addAttachment.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dialog.show();
}
});
}
public void onActivityResult(int reqCode, int resCode, Intent data)
{
if (resCode == RESULT_OK) {
if (reqCode == PICK_FROM_FILE || reqCode == PICK_FROM_GALLERY) {
if (null == data) return;
selectedPath = data.getData();
selectedPath = Uri.parse(ImageFilePath.getPath(getApplicationContext(), selectedPath));
Log.i("Image File Path", "" + selectedPath.getPath());
}else if(reqCode == PICK_FROM_CAMERA)
{
Bitmap thumbnail = (Bitmap) data.getExtras().get("data");
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
thumbnail.compress(Bitmap.CompressFormat.JPEG, 90, bytes);
File destination = new File(Environment.getExternalStorageDirectory(),"image" + String.valueOf(System.currentTimeMillis()) + ".jpg");//
FileOutputStream fo;
try {
fo = new FileOutputStream(destination);
fo.write(bytes.toByteArray());
fo.close();
} catch (IOException e) {
e.printStackTrace();
}
selectedPath = Uri.parse(destination.getPath());//Uri.parse("/storage/emulated/0/"+destination.getPath());
}
attachemnts.add(selectedPath.getPath());
/*TextView attachment = (TextView) findViewById(R.id.textView23);
String text = String.valueOf(attachment.getText());
attachment.setText(text + "\n" + selectedPath.getPath());*/
ImageView imageView = (ImageView) findViewById(R.id.imageView3);
imageView.setImageURI(selectedPath);
save.setEnabled(true);
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
// API 5+ solution
onBackPressed();
return true;
default:
return super.onOptionsItemSelected(item);
}
}
public void onBackPressed()
{
finish();
}
class APImodeRunner extends AsyncTask<String, String, String>
{
private String resp;
private ProgressDialog dialog;
@Override
protected void onPreExecute()
{
super.onPreExecute();
dialog = ProgressDialog.show(JobCardsCreation.this, "","Saving...", true, false); //No I18N
}
@Override
protected String doInBackground(String... params)
{
try
{
if(attachemnts.size() > 0) {
zcrmRecord.setFieldValue("Name",ListViewAdapter.nameClicked); //No I18N
zcrmRecord.setFieldValue("Appointment", ListViewAdapter.idClicked);
zcrmRecord.setFieldValue("Visit_Time", SDKCommonUtil.millisecToISO(System.currentTimeMillis(),null)); //No I18N
zcrmRecord.create();
for (String path : attachemnts)
{
zcrmRecord.uploadAttachment(path);
}
ZCRMRecord appointment = ZCRMRecord.getInstance("Appointments", ListViewAdapter.idClicked);
appointment.setFieldValue("Status","Completed");
appointment.update();
}else {
runOnUiThread(new Runnable() {
public void run() {
Toast.makeText(getApplicationContext(),"Error! try uploading with Attachments!", Toast.LENGTH_LONG).show();
}
});
}
resp = "Success"; //no I18N
} catch (Exception e)
{
dialog.dismiss();
e.printStackTrace();
resp = e.getMessage();
}
return resp;
}
@Override
protected void onPostExecute(String result)
{
dialog.dismiss();
Intent loadlist = new Intent(getApplicationContext(), MapsActivity.class);
startActivity(loadlist);
}
}
}
|
import java.net.UnknownHostException;
public class TFPort implements ITFNetworkAddress{
public int number;
private TFNetworkAddress address;
private TFRouter owner;
public TFPort(TFRouter owner) {
this.address = new TFNetworkAddress();
this.setName(owner.name);
this.owner = owner;
}
public void setIPrefix(String ipprefix) throws UnknownHostException {
address.setIPrefix(ipprefix);
}
public String getIP() {
return address.getIP();
}
public String getIPPrefix() {
return address.getIPPrefix();
}
public int getNetCIDR(){
return address.getNetCIDR();
}
public String getNetwork() {
return address.getNetwork();
}
public String getMAC() {
return address.getMAC();
}
public void setMAC(String MAC) {
address.setMAC(MAC);
}
public String getName() {
return address.getName();
}
public void setName(String name) {
address.setName(name);
}
public boolean isSameNetwork(String otherIP,int otherCIDR){
return address.isSameNetwork(otherIP,otherCIDR);
}
public String toString() {
return "[" + number + "]MAC: " + getMAC() + " IP/Prefix:" + getIPPrefix();
}
public TFNetworkElement getOwner() {
return owner;
}
}
|
python test.py */*_test.py
|
<filename>src/main/java/com/pipemaze/eggdropsoap/mod_pndc/Main.java
package com.pipemaze.eggdropsoap.mod_pndc;
import net.minecraftforge.common.MinecraftForge;
import cpw.mods.fml.common.Mod;
import cpw.mods.fml.common.Mod.EventHandler;
import cpw.mods.fml.common.event.FMLInitializationEvent;
@Mod(modid = Main.MODID, version = Main.VERSION, name = Main.NAME)
public class Main {
public static final String MODID = "mod_pndc";
public static final String VERSION = "0.1";
public static final String NAME = "Peaceful Nights, Dangerous Caves";
@EventHandler
public void load(FMLInitializationEvent event)
{
MinecraftForge.EVENT_BUS.register(new MyCheckSpawn());
}
}
|
DATE=`date "+%Y%m%d"`
curl -s http://cancer.sanger.ac.uk/cancergenome/assets/cancer_gene_census.tsv \
| perl -pe 's/\r\n|\n|\r/\n/g' \
> cancer_gene_census.$DATE.tsv
|
class MinHeapPriorityQueue:
def __init__(self):
self.heap = []
def push(self, value, priority):
self.heap.append((value, priority))
self._sift_up(len(self.heap) - 1)
def pop(self):
if not self.heap:
return None
if len(self.heap) == 1:
return self.heap.pop()[0]
top = self.heap[0]
self.heap[0] = self.heap.pop()
self._sift_down(0)
return top[0]
def empty(self):
return len(self.heap) == 0
def _sift_up(self, index):
# Implementation not shown
def _sift_down(self, index):
# Implementation not shown
|
<reponame>Sasha7b9Work/S8-53M2<filename>sources/common/_VS_GUI/USBD/usbd_ctlreq.cpp
#define _STL_COMPILER_PREPROCESSOR 0
#include "defines.h"
#include <usbd_ctlreq.h>
void USBD_GetString(uint8_t *desc, uint8_t *unicode, uint16_t *len)
{
UNUSED(desc);
UNUSED(unicode);
UNUSED(len);
}
|
export default {
none: 0,
xxs: '.25rem',
xs: '.5rem',
s: '1rem',
m: '1.5rem',
l: '2rem',
xl: '3rem',
};
|
'use strict'
import * as assert from 'assert';
describe('Item search', function(){
it('should show results in case of multiple items matches', function(){
browser.url('/');
const searchItem = 'Duck';
$('[type=search]').setValue(searchItem);
browser.keys('Enter');
const searchResultsSection = $('#box-search-results');
assert(searchResultsSection.isDisplayed(), `Search results section cannot be found`);
const foundProducts = searchResultsSection.$$('div.products > div');
assert(foundProducts.length >= 2, `Search results section contains less than 2 items`);
browser.pause(2000);
})
})
|
#!/bin/bash
# while sleep 9m; do echo -e '\n=====[ Gradle build is still running ]====='; done &
echo "Running Couchbase docker image..."
docker run -d --name couchbase -p 8091-8094:8091-8094 -p 11210:11210 couchbase/server:6.5.0
echo "Waiting for Couchbase server to come online..."
sleep 20
until $(curl --output /dev/null --silent --head --fail http://localhost:8091); do
printf '.'
sleep 1
done
echo -e "\n*************************************************************"
echo -e "Setting default memory quota for the pool"
echo -e "*************************************************************"
curl http://localhost:8091/pools/default -d memoryQuota=512
echo -e "\n*************************************************************"
echo -e "Initialize node..."
echo -e "*************************************************************"
curl http://localhost:8091/nodes/self/controller/settings -d path=/opt/couchbase/var/lib/couchbase/data -d \
index_path=/opt/couchbase/var/lib/couchbase/data
echo -e "*************************************************************"
echo -e "Rename node..."
echo -e "*************************************************************"
curl http://localhost:8091/node/controller/rename -d hostname=127.0.0.1
echo -e "\n*************************************************************"
echo -e "Setting cluster services..."
echo -e "*************************************************************"
curl http://localhost:8091/node/controller/setupServices -d 'services=kv%2Cn1ql%2Cindex'
echo -e "\n*************************************************************"
echo -e "Setup Administrator username and password..."
echo -e "*************************************************************"
curl http://localhost:8091/settings/web -d password=password -d username=admin -d port=8091 -d roles=full_admin
echo -e "\n*************************************************************"
echo -e "Creating Couchbase buckets..."
echo -e "*************************************************************"
curl -u 'admin:password' -d 'name=testbucket' -d 'bucketType=couchbase' -d 'ramQuotaMB=220' -d 'authType=sasl' -d \
'saslPassword=password' http://localhost:8091/pools/default/buckets
curl -u 'admin:password' -X PUT --data "roles=bucket_full_access[testbucket]&password=password" \
-H "Content-Type: application/x-www-form-urlencoded" \
http://localhost:8091/settings/rbac/users/local/testbucket
curl -u 'admin:password' -d name=casbucket -d bucketType=couchbase -d 'ramQuotaMB=120' -d authType='none' http://localhost:8091/pools/default/buckets
echo -e "\n*************************************************************"
echo -e "Loading Couchbase buckets..."
echo -e "*************************************************************"
curl -u 'admin:password' http://localhost:8091/pools/default/buckets
echo -e "\n*************************************************************"
echo -e "Creating index settings..."
echo -e "*************************************************************"
curl -u 'admin:password' 'http://localhost:8091/settings/indexes' -d 'indexerThreads=0' -d 'logLevel=info' -d \
'maxRollbackPoints=5' -d 'memorySnapshotInterval=200' -d 'stableSnapshotInterval=5000' -d 'storageMode=memory_optimized'
sleep 2
echo -e "\n*************************************************************"
echo -e "Creating index..."
echo -e "*************************************************************"
curl -u 'admin:password' http://localhost:8093/query/service -d 'statement=CREATE INDEX accounts_idx ON testbucket(username)' \
-d 'namespace=default'
sleep 2
echo -e "\n*************************************************************"
echo -e "Creating primary index..."
echo -e "*************************************************************"
curl -u 'admin:password' http://localhost:8093/query/service -d \
'statement=CREATE PRIMARY INDEX `primary-idx` ON `testbucket` USING GSI;' \
-d 'namespace=default'
sleep 2
echo -e "\n*************************************************************"
echo -e "Creating document/accounts..."
echo -e "*************************************************************"
curl -u 'admin:password' http://localhost:8093/query/service \
-d 'statement=INSERT INTO `testbucket` (KEY,VALUE) VALUES("accounts", {"username": "casuser", "psw": "Mellon", "firstname": "CAS", "lastname":"User"})'
docker ps | grep "couchbase"
retVal=$?
if [ $retVal == 0 ]; then
echo "Couchbase docker image is running."
else
echo "Couchbase docker image failed to start."
exit $retVal
fi
|
// Displays message in console when this file is loaded
document.addEventListener('DOMContentLoaded',
() => {
console.log('api.js loaded')
});
// Object Oriented JavaScript
// Class
// Class Api to encapsulate all information/behaviors that represents the apis
// This Api class holds our functions for fetching our project and family apis
class Api {
// static baseUrl = 'http://localhost:3000'
// new routes specifically for Heroku
static baseUrl = 'https://my-fixit-api.herokuapp.com/'
static PROJECTS_URL = `${Api.baseUrl}/projects`
static FAMILIES_URL = `${Api.baseUrl}/families`
// Fetch request is our GET request for our projects
// Promises exist to make the asynchronous requests more managable.
// Fetch returns a Promise: handles asynchronous operations without the need for a callback.
// A promise can be in one of three states: pending, fulfilled or rejected.
// Which represents the status of an asynchronous request.
// To do something after the resource is fetched, you write it in a .then call
// What we requested is hidden in body as a readable stream. We need to call an appropriate method to convert this readable stream into data we can consume.
// We know the response is JSON. We can call our function (parseJSON) (which is response.json) to convert the data.
// This was defined in index.js: const parseJSON = response => response.json()
static fetchProjects() {
return fetch(Api.baseUrl + '/projects')
.then(parseJSON)
}
// Fetch request is our GET request for our families
static fetchFamilies(){
return fetch(Api.baseUrl + '/families')
.then(parseJSON)
}
}
|
import java.util.Scanner;
public class HelloWorld {
public static void main(String[] args) {
// Prints "Hello, World" to the terminal window.
Scanner scanner = new Scanner(System.in);
System.out.println("Please enter your name:");
String name = scanner.nextLine();
System.out.println("Hello, " + name + "!");
}
}
|
#!/usr/bin/env bash
BUILD_WEB="${BUILD_WEB:-true}"
if [ "$BUILD_WEB" != "true" ]; then
exit 0
fi
# Only run one at a time
# [ "${FLOCKER}" != "$0" ] && exec env FLOCKER="$0" flock -e "$0" "$0" "$@" || :
# Exit when any command fails
set -o errexit
# Exit when an undeclared variable is used
set -o nounset
# Exit when a piped command returns a non-zero exit code
set -o pipefail
PROJECT="${1:-skeleton}"
WATCH="${WATCH:-false}"
source ./env.sh 2>/dev/null
PROJECT_DIR="`realpath \"${PROJECT}\"`"
TP_DIR="`realpath third_party`"
NODE_BIN_DIR="${TP_DIR}/nodejs/bin"
WEB="${PROJECT_DIR}/web"
STATIC="${PROJECT_DIR}/static"
YARN="${WEB}/node_modules/yarn/bin/yarn"
if test -d "$WEB" && [ "$BUILD_WEB" == "true" ]; then
cd "$WEB"
export PATH="$NODE_BIN_DIR:$PATH"
if ! test -s "$YARN"; then
npm install yarn
rm package-lock.json
fi
npx yarn install
if [ "$WATCH" == "true" ]; then
npx yarn build:watch
else
npx yarn build
fi
cd ../..
fi
|
<reponame>bygui86/go-metrics
package rest
import "github.com/bygui86/go-metrics/monitoring"
const (
// Custom metrics
// .. general
metricsNamespace = "echoserver"
metricsSubsystem = "rest"
// .. opsProcessed
opsProcessedKey = "opsProcessed"
opsProcessedName = "processed_ops_total"
opsProcessedHelp = "Total number of processed operations"
)
// addCustomMetrics -
func addCustomMetrics(customMetrics monitoring.ICustomMetrics) {
customMetrics.AddCounter(metricsNamespace, metricsSubsystem, opsProcessedName, opsProcessedHelp, opsProcessedKey)
}
|
<filename>app/controllers/info_window_controller.rb
# http://oldwiki.rubyonrails.org/rails/pages/HowtoWorkWithSessions
class InfoWindowController < ApplicationController
def index
session[:thing] = Thing.find_by_id(params[:thing_id])
session[:thing] = Thing.find_by_id(session[:id]) if session[:thing].nil?
if session[:thing].adopted?
if user_signed_in? && current_user.id == session[:thing].user_id
if session[:conflict] == true
render('users/conflict')
else
render('users/thank_you')
end
else
render('users/profile')
end
else
if user_signed_in?
render('things/adopt')
else
render('users/sign_in')
end
end
end
end
|
<gh_stars>0
# frozen_string_literal: true
RSpec.shared_examples 'work item base types importer' do
it 'creates all base work item types' do
expect { subject }.to change(WorkItem::Type, :count).from(0).to(WorkItem::Type::BASE_TYPES.count)
end
end
|
<filename>framework/common/src/org/ofbiz/common/preferences/PreferenceServices.java
/*******************************************************************************
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*******************************************************************************/
/* This file has been modified by Open Source Strategies, Inc. */
package org.ofbiz.common.preferences;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.ofbiz.base.util.Debug;
import org.ofbiz.base.util.GeneralException;
import org.ofbiz.base.util.ObjectType;
import static org.ofbiz.base.util.UtilGenerics.checkMap;
import org.ofbiz.base.util.UtilMisc;
import org.ofbiz.base.util.UtilProperties;
import org.ofbiz.base.util.UtilValidate;
import org.ofbiz.entity.Delegator;
import org.ofbiz.entity.GenericEntityException;
import org.ofbiz.entity.GenericValue;
import org.ofbiz.entity.util.EntityUtil;
import org.ofbiz.service.DispatchContext;
import org.ofbiz.service.ServiceUtil;
/**
* User preference services.<p>User preferences are stored as key-value pairs.
* <p>User preferences can be grouped - so that multiple preference pairs can be
* handled at once. Preference groups also allow a single userPrefTypeId to be
* used more than once - with each occurence having a unique userPrefGroupTypeId.</p>
* <p>User preference values are stored as Strings, so the easiest and most
* efficient way to handle user preference values is to keep them as strings.
* This class handles any data conversion needed.</p>
*/
public class PreferenceServices {
public static final String module = PreferenceServices.class.getName();
public static final String resource = "PrefErrorUiLabels";
/**
* Retrieves a single user preference from persistent storage. Call with
* userPrefTypeId and optional userPrefLoginId. If userPrefLoginId isn't
* specified, then the currently logged-in user's userLoginId will be
* used. The retrieved preference is contained in the <b>userPrefMap</b> element.
* @param ctx The DispatchContext that this service is operating in.
* @param context Map containing the input arguments.
* @return Map with the result of the service, the output parameters.
*/
public static Map<String, Object> getUserPreference(DispatchContext ctx, Map<String, ?> context) {
Locale locale = (Locale) context.get("locale");
if (!PreferenceWorker.isValidGetId(ctx, context)) {
return ServiceUtil.returnError(UtilProperties.getMessage(resource, "getPreference.permissionError", locale));
}
Delegator delegator = ctx.getDelegator();
String userPrefTypeId = (String) context.get("userPrefTypeId");
if (UtilValidate.isEmpty(userPrefTypeId)) {
return ServiceUtil.returnError(UtilProperties.getMessage(resource, "getPreference.invalidArgument", locale));
}
String userLoginId = PreferenceWorker.getUserLoginId(context, true);
Map<String, String> fieldMap = UtilMisc.toMap("userLoginId", userLoginId, "userPrefTypeId", userPrefTypeId);
String userPrefGroupTypeId = (String) context.get("userPrefGroupTypeId");
if (UtilValidate.isNotEmpty(userPrefGroupTypeId)) {
fieldMap.put("userPrefGroupTypeId", userPrefGroupTypeId);
}
Map<String, Object> userPrefMap = null;
try {
GenericValue preference = EntityUtil.getFirst(delegator.findByAnd("UserPreference", fieldMap));
if (preference != null) {
userPrefMap = PreferenceWorker.createUserPrefMap(preference);
}
} catch (GenericEntityException e) {
Debug.logWarning(e.getMessage(), module);
return ServiceUtil.returnError(UtilProperties.getMessage(resource, "getPreference.readFailure", new Object[] { e.getMessage() }, locale));
} catch (GeneralException e) {
Debug.logWarning(e.getMessage(), module);
return ServiceUtil.returnError(UtilProperties.getMessage(resource, "getPreference.readFailure", new Object[] { e.getMessage() }, locale));
}
Map<String, Object> result = ServiceUtil.returnSuccess();
result.put("userPrefMap", userPrefMap);
if (userPrefMap != null) {
// Put the value in the result Map too, makes access easier for calling methods.
Object userPrefValue = userPrefMap.get(userPrefTypeId);
if (userPrefValue != null) {
result.put("userPrefValue", userPrefValue);
}
}
return result;
}
/**
* Retrieves a group of user preferences from persistent storage. Call with
* userPrefGroupTypeId and optional userPrefLoginId. If userPrefLoginId isn't
* specified, then the currently logged-in user's userLoginId will be
* used. The retrieved preferences group is contained in the <b>userPrefMap</b> element.
* @param ctx The DispatchContext that this service is operating in.
* @param context Map containing the input arguments.
* @return Map with the result of the service, the output parameters.
*/
public static Map<String, Object> getUserPreferenceGroup(DispatchContext ctx, Map<String, ?> context) {
Locale locale = (Locale) context.get("locale");
if (!PreferenceWorker.isValidGetId(ctx, context)) {
return ServiceUtil.returnError(UtilProperties.getMessage(resource, "getPreference.permissionError", locale));
}
Delegator delegator = ctx.getDelegator();
String userPrefGroupTypeId = (String) context.get("userPrefGroupTypeId");
if (UtilValidate.isEmpty(userPrefGroupTypeId)) {
return ServiceUtil.returnError(UtilProperties.getMessage(resource, "getPreference.invalidArgument", locale));
}
String userLoginId = PreferenceWorker.getUserLoginId(context, true);
Map<String, Object> userPrefMap = null;
try {
Map<String, String> fieldMap = UtilMisc.toMap("userLoginId", "_NA_", "userPrefGroupTypeId", userPrefGroupTypeId);
userPrefMap = PreferenceWorker.createUserPrefMap(delegator.findByAnd("UserPreference", fieldMap));
fieldMap.put("userLoginId", userLoginId);
userPrefMap.putAll(PreferenceWorker.createUserPrefMap(delegator.findByAnd("UserPreference", fieldMap)));
} catch (GenericEntityException e) {
Debug.logWarning(e.getMessage(), module);
return ServiceUtil.returnError(UtilProperties.getMessage(resource, "getPreference.readFailure", new Object[] { e.getMessage() }, locale));
} catch (GeneralException e) {
Debug.logWarning(e.getMessage(), module);
return ServiceUtil.returnError(UtilProperties.getMessage(resource, "getPreference.readFailure", new Object[] { e.getMessage() }, locale));
}
// for the 'DEFAULT' values find the related values in general properties and if found use those.
Iterator it = userPrefMap.entrySet().iterator();
Map generalProperties = UtilProperties.getProperties("general");
while (it.hasNext()) {
Map.Entry pairs = (Map.Entry) it.next();
if ("DEFAULT".equals(pairs.getValue())) {
if (UtilValidate.isNotEmpty(generalProperties.get(pairs.getKey()))) {
userPrefMap.put((String) pairs.getKey(), generalProperties.get(pairs.getKey()));
}
}
}
Map<String, Object> result = ServiceUtil.returnSuccess();
result.put("userPrefMap", userPrefMap);
return result;
}
/**
* Stores a single user preference in persistent storage. Call with
* userPrefTypeId, userPrefGroupTypeId, userPrefValue and optional userPrefLoginId.
* If userPrefLoginId isn't specified, then the currently logged-in user's
* userLoginId will be used.
* @param ctx The DispatchContext that this service is operating in.
* @param context Map containing the input arguments.
* @return Map with the result of the service, the output parameters.
*/
public static Map<String, Object> setUserPreference(DispatchContext ctx, Map<String, ?> context) {
Delegator delegator = ctx.getDelegator();
Locale locale = (Locale) context.get("locale");
String userLoginId = PreferenceWorker.getUserLoginId(context, false);
String userPrefTypeId = (String) context.get("userPrefTypeId");
Object userPrefValue = (String) context.get("userPrefValue");
if (UtilValidate.isEmpty(userLoginId) || UtilValidate.isEmpty(userPrefTypeId) || userPrefValue == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(resource, "setPreference.invalidArgument", locale));
}
String userPrefGroupTypeId = (String) context.get("userPrefGroupTypeId");
String userPrefDataType = (String) context.get("userPrefDataType");
try {
if (UtilValidate.isNotEmpty(userPrefDataType)) {
userPrefValue = ObjectType.simpleTypeConvert(userPrefValue, userPrefDataType, null, null, false);
}
GenericValue rec = delegator.makeValidValue("UserPreference", PreferenceWorker.toFieldMap(userLoginId, userPrefTypeId, userPrefGroupTypeId, userPrefValue));
delegator.createOrStore(rec);
} catch (GenericEntityException e) {
Debug.logWarning(e.getMessage(), module);
return ServiceUtil.returnError(UtilProperties.getMessage(resource, "setPreference.writeFailure", new Object[] { e.getMessage() }, locale));
} catch (GeneralException e) {
Debug.logWarning(e.getMessage(), module);
return ServiceUtil.returnError(UtilProperties.getMessage(resource, "setPreference.writeFailure", new Object[] { e.getMessage() }, locale));
}
return ServiceUtil.returnSuccess();
}
/**
* Stores a user preference group in persistent storage. Call with
* userPrefMap, userPrefGroupTypeId and optional userPrefLoginId. If userPrefLoginId
* isn't specified, then the currently logged-in user's userLoginId will be
* used.
* @param ctx The DispatchContext that this service is operating in.
* @param context Map containing the input arguments.
* @return Map with the result of the service, the output parameters.
*/
public static Map<String, Object> setUserPreferenceGroup(DispatchContext ctx, Map<String, ?> context) {
Delegator delegator = ctx.getDelegator();
Locale locale = (Locale) context.get("locale");
String userLoginId = PreferenceWorker.getUserLoginId(context, false);
Map<String, Object> userPrefMap = checkMap(context.get("userPrefMap"), String.class, Object.class);
String userPrefGroupTypeId = (String) context.get("userPrefGroupTypeId");
if (UtilValidate.isEmpty(userLoginId) || UtilValidate.isEmpty(userPrefGroupTypeId) || userPrefMap == null) {
return ServiceUtil.returnError(UtilProperties.getMessage(resource, "setPreference.invalidArgument", locale));
}
try {
for (Iterator i = userPrefMap.entrySet().iterator(); i.hasNext();) {
Map.Entry mapEntry = (Map.Entry) i.next();
GenericValue rec = delegator.makeValidValue("UserPreference", PreferenceWorker.toFieldMap(userLoginId, (String) mapEntry.getKey(), userPrefGroupTypeId, (String) mapEntry.getValue()));
delegator.createOrStore(rec);
}
} catch (GenericEntityException e) {
Debug.logWarning(e.getMessage(), module);
return ServiceUtil.returnError(UtilProperties.getMessage(resource, "setPreference.writeFailure", new Object[] { e.getMessage() }, locale));
} catch (GeneralException e) {
Debug.logWarning(e.getMessage(), module);
return ServiceUtil.returnError(UtilProperties.getMessage(resource, "setPreference.writeFailure", new Object[] { e.getMessage() }, locale));
}
return ServiceUtil.returnSuccess();
}
/**
* Copies a user preference group. Call with
* fromUserLoginId, userPrefGroupTypeId and optional userPrefLoginId. If userPrefLoginId
* isn't specified, then the currently logged-in user's userLoginId will be
* used.
* @param ctx The DispatchContext that this service is operating in.
* @param context Map containing the input arguments.
* @return Map with the result of the service, the output parameters.
*/
public static Map<String, Object> copyUserPreferenceGroup(DispatchContext ctx, Map<String, ?> context) {
Delegator delegator = ctx.getDelegator();
Locale locale = (Locale) context.get("locale");
String userLoginId = PreferenceWorker.getUserLoginId(context, false);
String fromUserLoginId = (String) context.get("fromUserLoginId");
String userPrefGroupTypeId = (String) context.get("userPrefGroupTypeId");
if (UtilValidate.isEmpty(userLoginId) || UtilValidate.isEmpty(userPrefGroupTypeId) || UtilValidate.isEmpty(fromUserLoginId)) {
return ServiceUtil.returnError(UtilProperties.getMessage(resource, "copyPreference.invalidArgument", locale));
}
try {
Map<String, String> fieldMap = UtilMisc.toMap("userLoginId", fromUserLoginId, "userPrefGroupTypeId", userPrefGroupTypeId);
List<GenericValue> resultList = delegator.findByAnd("UserPreference", fieldMap);
if (resultList != null) {
for (GenericValue preference: resultList) {
preference.set("userLoginId", userLoginId);
}
delegator.storeAll(resultList);
}
} catch (GenericEntityException e) {
Debug.logWarning(e.getMessage(), module);
return ServiceUtil.returnError(UtilProperties.getMessage(resource, "copyPreference.writeFailure", new Object[] { e.getMessage() }, locale));
}
return ServiceUtil.returnSuccess();
}
}
|
import React from 'react';
import styled, { css } from 'styled-components';
import PropTypes from 'prop-types';
export default function Container(props) {
const { footer, header, main, children } = props;
switch (true) {
case footer:
return (
<Footer>
<div>{children}</div>
</Footer>
);
case header:
return (
<Header>
<div>{children}</div>
</Header>
);
case main:
return (
<Main>
<div>{children}</div>
</Main>
);
default:
return (
<Section>
<div>{children}</div>
</Section>
);
}
}
const baseStyles = css``;
const Footer = styled.footer`
${baseStyles}
height: 50px;
display: flex;
align-items: center;
justify-content: center;
`;
const Header = styled.header`
${baseStyles};
`;
const Main = styled.main`
${baseStyles};
width: 100%;
height: calc(100vh - 50px);
display: flex;
justify-content: center;
align-items: center;
flex-direction: column;
padding: 20px;
box-sizing: border-box;
`;
const Section = styled.section`
${baseStyles};
display: flex;
justify-content: center;
`;
Container.propTypes = {
children: PropTypes.any,
footer: PropTypes.bool,
header: PropTypes.bool,
main: PropTypes.bool,
};
|
// Define the EntityBase type
export declare type EntityBase = {};
// Implement the EntityCollection class
class EntityCollection<T extends EntityBase> {
private entities: T[];
constructor() {
this.entities = [];
}
add(entity: T): void {
this.entities.push(entity);
}
remove(entity: T): void {
const index = this.entities.indexOf(entity);
if (index !== -1) {
this.entities.splice(index, 1);
}
}
getAll(): T[] {
return this.entities;
}
}
// Usage example
class CustomEntity implements EntityBase {
// Define custom properties and methods for the entity
}
const entityCollection = new EntityCollection<CustomEntity>();
const entity1 = new CustomEntity();
const entity2 = new CustomEntity();
entityCollection.add(entity1);
entityCollection.add(entity2);
console.log(entityCollection.getAll()); // Output: [entity1, entity2]
entityCollection.remove(entity1);
console.log(entityCollection.getAll()); // Output: [entity2]
|
#!/bin/bash
# Copyright 2017 MLiy Contributors
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
read -r -d "" PROFILE_D <<EOF
export MLIY_HOME="$MLIY_HOME"
export MLIY_SOFTWARE="$MLIY_HOME/software"
export TZ="$TIME_ZONE"
export MAKE=/usr/bin/make
export SCALA_HOME="\$MLIY_SOFTWARE/scala/scala-2.10.6"
export SPARK_HOME="\$MLIY_SOFTWARE/spark/spark-2.4.0-bin-hadoop2.7"
export WEKA_HOME="\$MLIY_SOFTWARE/weka/weka-3-8-2"
export JUPYTER_PATH="\$MLIY_SOFTWARE/jupyter/kernels/py3/share/jupyter:\$MLIY_SOFTWARE/jupyter/kernels/py2/share/jupyter"
EOF
echo "$PROFILE_D" > /etc/profile.d/mliy.sh
chmod +x /etc/profile.d/mliy.sh
PROXY_FILE=$(find $(pwd)/../../../ "$MLIY_HOME/scripts/" -iname "proxy*.sh" | head -1)
if [[ ! -z "$PROXY_FILE" ]]; then
cp "$PROXY_FILE" "/etc/profile.d/proxy.sh"
chmod +x /etc/profile.d/proxy.sh
fi
|
#!/bin/bash
function addTests {
dir=$1
name=$(echo -n ${dir} | tr [:punct:][:space:] _)
echo "${name}_TESTS = \\"
for test in $dir/*.t.cc; do
echo " ${test%.cc} \\"
done
echo " \$(NULL)"
echo "TESTS += \$(${name}_TESTS)"
echo "check_PROGRAMS += \$(${name}_TESTS)"
echo
}
function amName {
echo -n ${1%.cc} | tr [:punct:][:space:] _
}
test_CXXFLAGS="-Icommon -Itools -Ideps/libtap++-0.01"
addTests gearbox/t/core
for test in gearbox/t/core/*.t.cc; do
name=$(amName $test)
cat <<EOF
${name}_CXXFLAGS=${test_CXXFLAGS} -DTESTDIR='"\$(abs_top_srcdir)/gearbox/t/core"'
${name}_LDFLAGS=\$(BOOST_LDFLAGS) \$(BOOST_SYSTEM_LIB)
${name}_LDADD=\$(LIBTAP) gearbox/core/libgearbox_core.la
${name}_SOURCES=$test
EOF
done
cat <<'EOF'
gearbox_t_core_Plugin_t_CXXFLAGS+=-DTESTPLUGINDIR='"$(abs_top_srcdir)/gearbox/t/core/plugins/.libs"'
gearbox_t_core_PluginAll_t_CXXFLAGS+=-DTESTPLUGINDIR='"$(abs_top_srcdir)/gearbox/t/core/plugins/.libs"'
check_LTLIBRARIES += gearbox/t/core/plugins/hello.la gearbox/t/core/plugins/goodbye.la
gearbox_t_core_plugins_hello_la_LDFLAGS = -module -rpath /dev/null
gearbox_t_core_plugins_hello_la_LIBADD=gearbox/core/libgearbox_core.la
gearbox_t_core_plugins_hello_la_SOURCES=gearbox/t/core/plugins/TestPlugin.cc gearbox/t/core/plugins/hello.cc
gearbox_t_core_plugins_goodbye_la_LDFLAGS = -module -rpath /dev/null
gearbox_t_core_plugins_goodbye_la_LIBADD=gearbox/core/libgearbox_core.la
gearbox_t_core_plugins_goodbye_la_SOURCES=gearbox/t/core/plugins/TestPlugin.cc gearbox/t/core/plugins/goodbye.cc
EOF
addTests gearbox/t/job
for test in gearbox/t/job/*.t.cc; do
name=$(amName $test)
cat <<EOF
${name}_CXXFLAGS=${test_CXXFLAGS} -DTESTDIR='"\$(abs_top_srcdir)/gearbox/t/job"'
${name}_LDFLAGS=\$(BOOST_LDFLAGS) \$(BOOST_SYSTEM_LIB)
${name}_LDADD=\$(LIBTAP) gearbox/job/libgearbox_job.la
${name}_SOURCES=$test
EOF
done
addTests gearbox/t/store
for test in gearbox/t/store/*.t.cc; do
name=$(amName $test)
cat <<EOF
${name}_CXXFLAGS=${test_CXXFLAGS} -DTESTDIR='"\$(abs_top_srcdir)/gearbox/t/store"'
${name}_LDFLAGS=\$(BOOST_LDFLAGS) \$(BOOST_SYSTEM_LIB)
${name}_LDADD=\$(LIBTAP) gearbox/store/libgearbox_store.la
${name}_SOURCES=$test
EOF
done
addTests gearbox/t/worker
for test in gearbox/t/worker/*.t.cc; do
name=$(amName $test)
cat <<EOF
${name}_CXXFLAGS=${test_CXXFLAGS} -DTESTDIR='"\$(abs_top_srcdir)/gearbox/t/worker"'
${name}_LDFLAGS=\$(BOOST_LDFLAGS) \$(BOOST_SYSTEM_LIB)
${name}_LDADD=\$(LIBTAP) gearbox/worker/libgearbox_worker.la
${name}_SOURCES=$test
EOF
done
addTests workers/t/gearbox
for test in workers/t/gearbox/*.t.cc; do
name=$(amName $test)
cat <<EOF
${name}_CXXFLAGS=${test_CXXFLAGS} -DTESTDIR='"\$(abs_top_srcdir)/workers/t/gearbox"'
${name}_LDFLAGS=\$(BOOST_LDFLAGS) \$(BOOST_SYSTEM_LIB)
${name}_LDADD=\$(LIBTAP) gearbox/worker/libgearbox_worker.la gearbox/job/libgearbox_job.la gearbox/core/libgearbox_core.la workers/gearbox/libworker_gearbox.la
${name}_SOURCES=$test
EOF
done
addTests plugins/status/t/sql
for test in plugins/status/t/sql/*.t.cc; do
name=$(amName $test)
cat <<EOF
${name}_CXXFLAGS=${test_CXXFLAGS} -DTESTDIR='"\$(abs_top_srcdir)/plugins/status/t/sql"'
${name}_LDFLAGS=\$(BOOST_SYSTEM_LIB)
${name}_LDADD=\$(LIBTAP) gearbox/job/libgearbox_job.la gearbox/core/libgearbox_core.la gearbox/store/libgearbox_store.la
${name}_SOURCES=$test
EOF
done
cat <<EOF
check_LTLIBRARIES += common/stub/libgearman_stub.la
common_stub_libgearman_stub_la_CXXFLAGS = ${test_CXXFLAGS} \$(LOG4CXX_CFLAGS)
common_stub_libgearman_stub_la_LIBADD = gearbox/core/libgearbox_core.la
common_stub_libgearman_stub_la_LDFLAGS = -rpath /dev/null -avoid-version \$(LOG4CXX_LIBS) \$(BOOST_LDFLAGS) \$(BOOST_SYSTEM_LIB)
common_stub_libgearman_stub_la_SOURCES = \
common/stub/gearman_stub.cc \
\$(NONE)
EOF
echo "gearbox_t_swig_perl_TESTS = \\"
for test in gearbox/t/swig/perl/*.t; do
echo " ${test} \\"
done
echo " \$(NULL)"
echo
echo "TESTS += \$(gearbox_t_swig_perl_TESTS)"
echo "if WITH_PHP"
echo "gearbox_t_swig_php_TESTS = \\"
for test in gearbox/t/swig/php/*.t; do
echo " ${test} \\"
done
echo " \$(NULL)"
echo
echo "TESTS += \$(gearbox_t_swig_php_TESTS)"
echo "endif"
echo "gearbox_t_swig_python_TESTS = \\"
for test in gearbox/t/swig/python/*.t; do
echo " ${test} \\"
done
echo " \$(NULL)"
echo
echo "TESTS += \$(gearbox_t_swig_python_TESTS)"
echo "export PATH=$PATH:\$(abs_top_srcdir)/bin"
|
#!/bin/bash -e
# Copyright 2020. IBM All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
BASEDIR="$(dirname "$0")"
# shellcheck disable=SC2034
# shellcheck disable=SC2034
# shellcheck disable=SC1090
source ${BASEDIR}/env.sh
#PATCHLIST
PATCH1FILE=patch-v1200-service.tar #PATCH 1 - V1200 - vision-service - resolved in tracker/14394
declare -a PATCHLIST=(
"${COS_BUCKET_BASE}/${PATCH1FILE}"
)
echo "Installing aria2..."
apt-get -o Dpkg::Use-Pty=0 update -qq || echo " RC${?} Got an error on update???"
apt-get -o Dpkg::Use-Pty=0 install -qq aria2
#Fetch the patches
echo "Downloading to ${RAMDISK}..."
pushd $RAMDISK
echo "Fetching patches..."
for patchfile in "${PATCHLIST[@]}"; do
echo "Fetching patch ${patchfile} from COS..."
aria2c -q -s160 -x16 $patchfile
done
#Apply patches
#PATCH 1
docker tag vision-service:1.2.0.0 vision-service:1.2.0.0-backup
docker load -i $PATCH1FILE
echo "Uninstalling aria2"
apt-get -o Dpkg::Use-Pty=0 remove -qq aria2
echo "SUCCESS: Patches applied successfully!"
popd
|
<filename>src/controller/Controller.java
package controller;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.sql.Connection;
import java.sql.Date;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.servlet.ServletConfig;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.annotation.MultipartConfig;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import javax.servlet.http.Part;
import javax.sql.DataSource;
import org.json.JSONException;
import org.json.JSONObject;
import beans.User;
import database.Account;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.Bucket;
import com.amazonaws.services.s3.model.CannedAccessControlList;
import com.amazonaws.services.s3.model.ObjectMetadata;
import java.util.List;
import com.amazonaws.AmazonClientException;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.auth.profile.ProfileCredentialsProvider;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.amazonaws.services.s3.model.PutObjectResult;
import org.apache.commons.io.FilenameUtils;
/**
* Servlet implementation class Controller
*/
@MultipartConfig
@WebServlet("/Controller")
public class Controller extends HttpServlet {
private static final long serialVersionUID = 1L;
private DataSource ds;
/**
* @see HttpServlet#HttpServlet()
*/
public Controller()
{
super();
// TODO Auto-generated constructor stub
}
/**
* @see Servlet#init(ServletConfig)
*/
public void init(ServletConfig config) throws ServletException
{
try
{
InitialContext initContext = new InitialContext();
Context env = (Context)initContext.lookup("java:comp/env");
//now I can use this context to look up my data source (the mysql aws database)
ds = (DataSource)env.lookup("jdbc/critiqueudb");
/***storage test***
final AmazonS3 s3 = AmazonS3ClientBuilder.defaultClient();
List<Bucket> buckets = s3.listBuckets();
System.out.println("Your Amazon S3 buckets are:");
for (Bucket b : buckets) {
System.out.println("* " + b.getName());
}
//***storage test***/
}
catch (NamingException e)
{
// TODO Auto-generated catch block
//e.printStackTrace();
throw new ServletException();
}
}
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException
{
PrintWriter out = response.getWriter();
String action = request.getParameter("action");
if (action == null)
{
request.getRequestDispatcher("/home.jsp").forward(request, response);
}
else if(action.equals("login"))
{
request.setAttribute("email", "");
request.setAttribute("password", "");
request.setAttribute("message", "");
request.getRequestDispatcher("/login.jsp").forward(request, response);
}
else if(action.equals("createaccount"))
{
request.setAttribute("email", "");
request.setAttribute("password", "");
request.setAttribute("repeatpassword", "");
request.setAttribute("message", "");
request.getRequestDispatcher("/createaccount.jsp").forward(request, response);
}
else if(action.equals("myart"))
{
request.getRequestDispatcher("/loginsuccess.jsp").forward(request, response);
}
else if(action.equals("more"))
{
/*TODO:
* the idea here is to query the database to return json containing 9 additional image stems,
* extensions, and the email address associated with them. This data will then be looped through
* in the javascript function to create the html and append to the proper div
*
*
* select * from artwork
where email='<EMAIL>'
order by datetime desc
limit index, 9;
*
*/
//TODO: query the database and return data needed for the image grid and the modal (json)
HttpSession mySession = request.getSession();
String emailTemp = (String) mySession.getAttribute("email");
String startIndex = request.getParameter("index");
//startIndex = "0"; //delete this! using for testing
String sql = "SELECT * FROM artwork AS result WHERE email=? ORDER BY datetime DESC LIMIT " + startIndex +", 9"; // ? character is a wildcard
//declare and initialize Json object to return
JSONObject obj = new JSONObject();
PreparedStatement statement;
try {
Connection conn = null;
conn = ds.getConnection();
statement = conn.prepareStatement(sql);
statement.setString(1, emailTemp);
//the result of a SQL query gets returned to ResultSet type object
ResultSet rs = statement.executeQuery();
//declare inner Json to hold each of 9 new image data, one at at time
JSONObject innerObj;
//get context path. This will be included in the returned json so that it can then be used in
//the javascript function in each artwork modal to link to that image's critique page
String contextPath = request.getContextPath();
//declare int to iterate over for json
int jsonIndex = 0;
//the result has an internal pointer that begins before the first entry, so we first must move it up
while(rs.next())
{
innerObj = new JSONObject();
innerObj.put("email", emailTemp);
innerObj.put("title", rs.getString("title"));
innerObj.put("description", rs.getString("description"));
innerObj.put("contextPath", contextPath);
innerObj.put("wip", rs.getBoolean("work_in_progress"));
String url = rs.getString("image_stem") + "." + rs.getString("image_extension");
innerObj.put("url", url);
//System.out.println(rs.getString("title"));
obj.put(jsonIndex+"", innerObj);
jsonIndex++;
}
System.out.println(obj.toString());
rs.close();
statement.close();
} catch (SQLException | JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
//System.out.println(request.getContextPath());
//old test portion being returned to the javascript function
//String index = request.getParameter("index");
//String text = "<p>This was generated on the server with index "+index+"</p>";
response.setContentType("text/plain"); // Set content type of the response so that jQuery knows what it can expect.
response.setCharacterEncoding("UTF-8");
response.getWriter().write(obj.toString()); // Write response body.
}
else if(action.equals("browse"))
{
request.getRequestDispatcher("/browse.jsp").forward(request, response);
}
else if(action.equals("browsemore"))
{
/*TODO:
* the idea here is to query the database to return json containing 9 additional image stems,
* extensions, and the email address associated with them. This data will then be looped through
* in the javascript function to create the html and append to the proper div
*
*
* select * from artwork
where email='<EMAIL>'
order by datetime desc
limit index, 9;
*
*/
//TODO: query the database and return data needed for the image grid and the modal (json)
HttpSession mySession = request.getSession();
//String emailTemp = (String) mySession.getAttribute("email");
String startIndex = request.getParameter("index");
//startIndex = "0"; //delete this! using for testing
String sql = "SELECT * FROM artwork AS result ORDER BY datetime DESC LIMIT " + startIndex +", 9"; // ? character is a wildcard
//declare and initialize Json object to return
JSONObject obj = new JSONObject();
PreparedStatement statement;
try {
Connection conn = null;
conn = ds.getConnection();
statement = conn.prepareStatement(sql);
//statement.setString(1, emailTemp);
//the result of a SQL query gets returned to ResultSet type object
ResultSet rs = statement.executeQuery();
//declare inner Json to hold each of 9 new image data, one at at time
JSONObject innerObj;
//get context path. This will be included in the returned json so that it can then be used in
//the javascript function in each artwork modal to link to that image's critique page
String contextPath = request.getContextPath();
//declare int to iterate over for json
int jsonIndex = 0;
//the result has an internal pointer that begins before the first entry, so we first must move it up
while(rs.next())
{
innerObj = new JSONObject();
System.out.println(rs.getString("email")); //test email for each image entry
innerObj.put("email", rs.getString("email"));
innerObj.put("title", rs.getString("title"));
innerObj.put("description", rs.getString("description"));
innerObj.put("contextPath", contextPath);
innerObj.put("wip", rs.getBoolean("work_in_progress"));
String url = rs.getString("image_stem") + "." + rs.getString("image_extension");
innerObj.put("url", url);
//System.out.println(rs.getString("title"));
obj.put(jsonIndex+"", innerObj);
jsonIndex++;
}
System.out.println(obj.toString());
rs.close();
statement.close();
} catch (SQLException | JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
//System.out.println(request.getContextPath());
//old test portion being returned to the javascript function
//String index = request.getParameter("index");
//String text = "<p>This was generated on the server with index "+index+"</p>";
response.setContentType("text/plain"); // Set content type of the response so that jQuery knows what it can expect.
response.setCharacterEncoding("UTF-8");
response.getWriter().write(obj.toString()); // Write response body.
}
else if(action.equals("image"))
{
/*If action is "image", we will grab artist and image parameters from the url
*then forward to a new .jsp which will contain high-res image, plus the form
*to complete a full critique. The idea is to do it this way so that any image url can be easily
*shared publicly. The page should only allow critique submission if user is logged in and if the
*the user is not the artist (use condition to render multiple versions in the jsp)
*/
request.setAttribute("message", "");
String artist = request.getParameter("artist");
String title = request.getParameter("title");
System.out.println("Controller successfully accessed '"+ title + "' image data by artist: " + artist + ".");
//example url: http://localhost:8080/CritiqueU/Controller?action=image&artist=<EMAIL>&title=raccoon
request.getRequestDispatcher("/imagecritique.jsp").forward(request, response);
}
else if(action.equals("browse"))
{
System.out.println("browse section accessed");
}
else
{
out.println("unrecognized action");
return;
}
//getServletContext().getRequestDispatcher("/home.jsp").forward(request, response);
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException
{
PrintWriter out = response.getWriter();
String action = request.getParameter("action");
if(action == null)
{
out.println("unrecognized action");
return;
}
HttpSession mySession = request.getSession();
Connection conn = null;
try
{
conn = ds.getConnection();
}
catch (SQLException e)
{
throw new ServletException();
}
Account account = new Account(conn);
if(action.equals("dologin"))
{
String email = request.getParameter("email");
String password = request.getParameter("password");
//User user = new User(email, password);
//set these request attributes. In case of login failure, they will
//autopopulate in the form for a retry (empty string password is intended)
request.setAttribute("email", email);
request.setAttribute("password", "");
//if we successfully log in, forward to the loginsuccess.jsp page
try {
if(account.login(email, password)) //Account object attempts authentication and returns boolean
{
//set the User bean as a session variable
// get the session object
mySession = request.getSession();
String emailTemp = (String) mySession.getAttribute("email");
if(emailTemp == null)
emailTemp = email;
mySession.setAttribute("email", email);
mySession.setAttribute("loggedin", "true");
//close database connection
try
{
conn.close();
account.closeDBConnection();
}
catch (SQLException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
request.getRequestDispatcher("/loginsuccess.jsp").forward(request, response);
}
else
{
//set an error message as an attribute "message"
request.setAttribute("message", "Error! Email address or password is incorrect.");
//close database connection
try
{
conn.close();
account.closeDBConnection();
}
catch (SQLException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
//and forward back to the login page
request.getRequestDispatcher("/login.jsp").forward(request, response);
}
}
catch (SQLException e)
{
// TODO: Do something sensible here, like forward to an error.jsp
e.printStackTrace();
}
finally
{
try
{
conn.close();
account.closeDBConnection();
}
catch (SQLException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
//TODO: if the users login info can be found in the db, then we go to the success page
// if it can't, we go back to the form.
//request.getRequestDispatcher("/loginsuccess.jsp").forward(request, response);
}
else if(action.equals("createaccount"))
{
String email = request.getParameter("email");
String password = request.getParameter("password");
String repeatPassword = request.getParameter("repeatpassword");
request.setAttribute("email", email);
request.setAttribute("password", "");
request.setAttribute("repeatpassword", "");
request.setAttribute("message", "");
if(!password.equals(repeatPassword))
{
request.setAttribute("message", "Error! Passwords do not match.");
//close database connection
try
{
conn.close();
account.closeDBConnection();
}
catch (SQLException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
request.getRequestDispatcher("/createaccount.jsp").forward(request, response);
}
else
{
User user = new User(email, password);
if(!user.validate())
{
//the email or password is in the wrong format
request.setAttribute("message", user.getMessage());
//close database connection
try
{
conn.close();
account.closeDBConnection();
}
catch (SQLException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
request.getRequestDispatcher("/createaccount.jsp").forward(request, response);
}
else
{
try
{
if(account.exists(email))
{
//the email already exists in the user database
//close database connection
try
{
conn.close();
account.closeDBConnection();
}
catch (SQLException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
request.setAttribute("message", "Error! An account with this email address already exists.");
request.getRequestDispatcher("/createaccount.jsp").forward(request, response);
}
else
{
//passes all checks. Create the account
account.create(email, password);
//close database connection
try
{
conn.close();
account.closeDBConnection();
}
catch (SQLException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
//set the user's email as a session variable
// get the session object
mySession = request.getSession();
String emailTemp = (String) mySession.getAttribute("email");
if(emailTemp == null)
emailTemp = email;
mySession.setAttribute("email", email);
request.getRequestDispatcher("/createsuccess.jsp").forward(request, response);
}
}
catch(SQLException e)
{
e.printStackTrace();
//request.getRequestDispatcher("/error.jsp").forward(request, response);
}
finally
{
try
{
conn.close();
account.closeDBConnection();
}
catch (SQLException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
}
else if(action.equals("uploadimage"))
{
// //***storage test***
// final AmazonS3 s3 = AmazonS3ClientBuilder.defaultClient();
// List<Bucket> buckets = s3.listBuckets();
// System.out.println("Your Amazon S3 buckets are:");
// for (Bucket b : buckets) {
// System.out.println("* " + b.getName());
// }
// //***storage test***/
String emailTemp = (String) mySession.getAttribute("email");
System.out.println("inside upload image section");
System.out.println(request.getParameter("title"));
//System.out.println(request.getParameter("image-to-upload"));
//****** happy coding!
//get the file chosen by the user
Part filePart = request.getPart("image-to-upload");
String fileName = filePart.getSubmittedFileName();
if(fileName.endsWith(".jpg") || fileName.endsWith(".png")){
InputStream fileInputStream = filePart.getInputStream();
//String accessKeyId = "YOUR_ACCESS_KEY_ID";
//String secretAccessKey = "YOUR_SECRET_ACCESS_KEY";
//String region = "YOUR_BUCKET REGION";
String bucketName = "critique-u";
String subdirectory = emailTemp + "/";
//AWS Access Key ID and Secret Access Key
//BasicAWSCredentials awsCreds = new BasicAWSCredentials(accessKeyId, secretAccessKey);
//This class connects to AWS S3 for us
//AmazonS3 s3client = AmazonS3ClientBuilder.standard().withRegion(region)
//.withCredentials(new AWSStaticCredentialsProvider(awsCreds)).build();
@SuppressWarnings("deprecation")
AmazonS3 s3client = new AmazonS3Client(new ProfileCredentialsProvider());
//Specify the file's size
ObjectMetadata metadata = new ObjectMetadata();
metadata.setContentLength(filePart.getSize());
//Create the upload request, giving it a bucket name, subdirectory, filename, input stream, and metadata
PutObjectRequest uploadRequest = new PutObjectRequest(bucketName, subdirectory + fileName, fileInputStream, metadata);
//Make it public so we can use it as a public URL on the internet
uploadRequest.setCannedAcl(CannedAccessControlList.PublicRead);
//Upload the file. This can take a while for big files!
s3client.putObject(uploadRequest);
//Create a URL using the bucket, subdirectory, and file name
//String fileUrl = "http://s3.amazonaws.com/" + bucketName + "/" + subdirectory + "/" + fileName;
//get other data from the form to submit to the database
String title = request.getParameter("title");
String description = request.getParameter("artwork-description");
String workInProgressString = request.getParameter("work-in-progress");
String stem = FilenameUtils.removeExtension(fileName);
String extension = FilenameUtils.getExtension(fileName);
boolean workInProgress = false;
if(workInProgressString != null)
{
if(workInProgressString.equals("on"))
{
workInProgress = true;
}
}
//System.out.println(title + " " + stem + " " + extension + " " + emailTemp + " " + description);
///////////////////////
String sql = "INSERT INTO artwork (email, title, description, datetime, work_in_progress, average_successfulness, image_stem, image_extension) VALUES (?, ?, ?, ?, ?, ?, ?, ?)";
//This will execute an insert statement, even with nulls. In the future, this could be not allowed, and error message sent back (?)
PreparedStatement statement;
try {
//java.util.Date today = new java.util.Date();
//String now = new java.sql.Date(today.getTime()).toString();
DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
java.util.Date date = new java.util.Date();
String now = dateFormat.format(date);
System.out.println(now);
statement = conn.prepareStatement(sql);
statement.setString(1, emailTemp);
statement.setString(2, title);
statement.setString(3, description);
statement.setString(4, now);
statement.setBoolean(5, workInProgress); //work in progress (should operate off of a checkbox in upload form)
statement.setFloat(6, 0.0f);
statement.setString(7, stem);
statement.setString(8, extension);
//the result of a SQL query gets returned to ResultSet type object
statement.executeUpdate();
statement.close();
//set a flag to allow an alert to be displayed that the image was successfully uploaded
mySession.setAttribute("uploadflag", "success");
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
mySession.setAttribute("uploadflag", "failure");
} finally {
request.getRequestDispatcher("/loginsuccess.jsp").forward(request, response);
}
//response.getOutputStream().println("<p>Thanks " + name + "! Here's the image you uploaded:</p>");
//response.getOutputStream().println("<img src=\"" + fileUrl + "\" />");
//response.getOutputStream().println("<p>Upload another image <a href=\"http://localhost:8080/index.html\">here</a>.</p>");
}
else{
//the file was not a JPG or PNG
response.getOutputStream().println("<p>Please only upload JPG or PNG files.</p>");
response.getOutputStream().println("<p>Upload another file <a href=\"http://localhost:8080/index.html\">here</a>.</p>");
}
}
else if(action.equals("submitcritique"))
{
response.setContentType("text/plain"); // Set content type of the response so that jQuery knows what it can expect.
response.setCharacterEncoding("UTF-8");
String criticEmail = (String) mySession.getAttribute("email");
String artistEmail = request.getParameter("artist-email");
String title = request.getParameter("title");
String compositionRating = request.getParameter("composition-rating");
String lineRating = request.getParameter("line-rating");
String formRating = request.getParameter("form-rating");
String colorRating = request.getParameter("color-rating");
String craftRating = request.getParameter("craft-rating");
String successfulnessRating = request.getParameter("successfulness-rating");
String compositionComments = request.getParameter("composition-comments");
String lineComments = request.getParameter("line-comments");
String formComments = request.getParameter("form-comments");
String colorComments = request.getParameter("color-comments");
String craftComments = request.getParameter("craft-comments");
String successfulnessComments = request.getParameter("successfulness-comments");
System.out.println("**********");
System.out.println("critic email: " + criticEmail);
System.out.println("artwork: " + title + " by " + "artist: " + artistEmail);
System.out.println("composition: " + compositionRating + ": " + compositionComments);
System.out.println("line: " + lineRating + ": " + lineComments);
System.out.println("form: " + formRating + ": " + formComments);
System.out.println("color: " + colorRating + ": " + colorComments);
System.out.println("craft: " + craftRating + ": " + craftComments);
System.out.println("Successfulness: " + successfulnessRating + ": " + successfulnessComments);
//insert query into database here
//first create a prepared statement in jdbc
// this is a class that encapsulates a SQL statement
// great thing about it is that wildcards can be used
// don't ever concatenate this sql statement with username and password, because
//it will open you up to SQL injection attacks.
//String sql = "SELECT COUNT(*) AS count FROM user WHERE email=? AND password=?"; // ? character is a wildcard
//String sql = "INSERT INTO user (email, password) VALUES(?, ?)";
String sql = "INSERT INTO critique (email, title, criticEmail, composition, compositionComments, line, lineComments, form, formComments, color, colorComments, craft, craftComments, successfulness, comments, datetime) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
//This will execute an insert statement, even with nulls. In the future, this could be not allowed, and error message sent back (?)
PreparedStatement statement;
try {
DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
java.util.Date date = new java.util.Date();
String now = dateFormat.format(date);
statement = conn.prepareStatement(sql);
statement.setString(1, artistEmail);
statement.setString(2, title);
statement.setString(3, criticEmail);
statement.setString(4, compositionRating);
statement.setString(5, compositionComments);
statement.setString(6, lineRating);
statement.setString(7, lineComments);
statement.setString(8, formRating);
statement.setString(9, formComments);
statement.setString(10, colorRating);
statement.setString(11, colorComments);
statement.setString(12, craftRating);
statement.setString(13, craftComments);
statement.setString(14, successfulnessRating);
statement.setString(15, successfulnessComments);
statement.setString(16, now);
//the result of a SQL query gets returned to ResultSet type object
statement.executeUpdate();
statement.close();
//TODO: Now that the critique has been added to the database,
//call a helper function to return critiques (most recent first)
//in JSON format. This will be appended to the bottom of the modal
response.getWriter().write("success"); // Write response body.
} catch (SQLException e) {
// TODO Auto-generated catch block
e.printStackTrace();
response.getWriter().write("failure"); // Write response body.
}
//response.setContentType("text/plain"); // Set content type of the response so that jQuery knows what it can expect.
//response.setCharacterEncoding("UTF-8");
//test response
//response.getWriter().write("artist email: " + artistEmail); // Write response body.
//request.setAttribute("artist", artistEmail);
//request.setAttribute("title", title);
//System.out.println("line before the forward");
//request.getRequestDispatcher("/imagecritique.jsp").forward(request, response);
}
else if(action.equals("logout"))
{
//close the database connection
try
{
conn.close();
account.closeDBConnection();
}
catch (SQLException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
//invalidate the session
request.setAttribute("email", null);
mySession.setAttribute("email", null);
mySession.setAttribute("loggedin", "false");
mySession.invalidate();
request.getRequestDispatcher("/home.jsp").forward(request, response);
}
else
{
out.println("unrecognized action");
try
{
conn.close();
account.closeDBConnection();
}
catch (SQLException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
return;
}
try
{
conn.close();
account.closeDBConnection();
}
catch (SQLException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
}
protected String loadCritiques(HttpServletRequest request, HttpServletResponse response, int startIndex, String artistEmail, String title)
{
String sql = "SELECT * FROM critique AS result WHERE email=? AND title=? ORDER BY datetime DESC LIMIT " + startIndex +", 5"; // ? character is a wildcard
//declare and initialize Json object to return
JSONObject obj = new JSONObject();
PreparedStatement statement;
try {
Connection conn = null;
conn = ds.getConnection();
statement = conn.prepareStatement(sql);
//statement.setString(1, emailTemp);
//the result of a SQL query gets returned to ResultSet type object
ResultSet rs = statement.executeQuery();
//declare inner Json to hold each of 9 new image data, one at at time
JSONObject innerObj;
//get context path. This will be included in the returned json so that it can then be used in
//the javascript function in each artwork modal to link to that image's critique page
String contextPath = request.getContextPath();
//declare int to iterate over for json
int jsonIndex = 0;
//the result has an internal pointer that begins before the first entry, so we first must move it up
while(rs.next())
{
innerObj = new JSONObject();
//innerObj.put("email", emailTemp);
innerObj.put("title", rs.getString("title"));
innerObj.put("description", rs.getString("description"));
innerObj.put("contextPath", contextPath);
String url = rs.getString("image_stem") + "." + rs.getString("image_extension");
innerObj.put("url", url);
//System.out.println(rs.getString("title"));
obj.put(jsonIndex+"", innerObj);
jsonIndex++;
}
System.out.println(obj.toString());
rs.close();
statement.close();
} catch (SQLException | JSONException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return "";
}
}
|
package main
import (
"fmt"
)
func main() {
mySlice := make([]int, 1, 4)
fmt.Printf("Length is: %d.\nCapacity is: %d.\n", len(mySlice), cap(mySlice))
for i := 1; i < 17; i++ {
mySlice = append(mySlice, i)
fmt.Println("Capacity is:", cap(mySlice))
}
}
|
package server
import (
"fmt"
"log"
"net/http"
"time"
"github.com/gin-gonic/gin"
"github.com/shien/weightrec-backend/pkg/auth"
"github.com/shien/weightrec-backend/pkg/csvparser"
"github.com/shien/weightrec-backend/pkg/globalcfg"
)
const (
cookieUserInfo = "UserInfo"
)
func NewRouter() *gin.Engine {
router := gin.New()
router.Use(gin.Logger())
router.Use(gin.Recovery())
router.GET("/ping", func(c *gin.Context) {
c.JSON(200, gin.H{
"message": "pong",
})
})
router.GET("/graphs", func(c *gin.Context) {
userinfo, err := c.Cookie(cookieUserInfo)
if err != nil {
c.HTML(http.StatusOK, "graphs.tmpl", gin.H{
"title": "WeightRec",
"mailAddress": "My Account",
"authstatus": false,
})
} else {
// User name is mail address
name, err := auth.GetMailAddress(userinfo)
authstatus := false
if err != nil {
log.Println("Failed to get mail address:", err)
name = "My Account"
} else {
authstatus = true
}
c.HTML(http.StatusOK, "graphs.tmpl", gin.H{
"title": "WeightRec",
"mailAddress": name,
"authstatus": authstatus,
})
}
})
router.GET("/upload", func(c *gin.Context) {
userinfo, err := c.Cookie(cookieUserInfo)
if err != nil {
c.HTML(http.StatusOK, "upload.tmpl", gin.H{
"title": "WeightRec",
"mailAddress": "My Account",
"authstatus": false,
})
} else {
// User name is mail address
name, err := auth.GetMailAddress(userinfo)
authstatus := false
if err != nil {
log.Println("Failed to get mail address:", err)
name = "My Account"
} else {
authstatus = true
}
c.HTML(http.StatusOK, "upload.tmpl", gin.H{
"title": "WeightRec",
"mailAddress": name,
"authstatus": authstatus,
})
}
})
router.GET("/api/login", func(c *gin.Context) {
url := auth.GetLoginURL()
c.Redirect(http.StatusSeeOther, url)
})
router.GET("/", func(c *gin.Context) {
userinfo, err := c.Cookie(cookieUserInfo)
if err != nil {
c.HTML(http.StatusOK, "index.tmpl", gin.H{
"title": "WeightRec",
"mailAddress": "My Account",
"authstatus": false,
})
} else {
// User name is mail address
name, err := auth.GetMailAddress(userinfo)
authstatus := false
if err != nil {
log.Println("Failed to get mail address:", err)
name = "My Account"
} else {
authstatus = true
}
c.HTML(http.StatusOK, "index.tmpl", gin.H{
"title": "WeightRec",
"mailAddress": name,
"authstatus": authstatus,
})
}
})
router.GET("/login", func(c *gin.Context) {
userinfo, err := c.Cookie(cookieUserInfo)
if err != nil || userinfo != "" {
c.HTML(http.StatusOK, "login.tmpl", gin.H{
"title": "WeightRec Login",
})
}
c.Redirect(http.StatusSeeOther, "/")
})
router.GET("/weight/:userid/data", func(c *gin.Context) {
today := time.Now()
year, month, day := today.Date()
date := fmt.Sprintf(
"%d-%d-%d",
year, month, day,
)
s := c.DefaultQuery("startDate", date)
e := c.Query("endDate")
c.String(http.StatusOK, s, e)
})
router.POST("/api/upload", func(c *gin.Context) {
file, _, err := c.Request.FormFile("csv")
if err != nil {
log.Println("FormFile ", err)
c.String(http.StatusBadRequest, "Bad Request")
return
}
_, err = csvparser.Parse(file)
if err != nil {
log.Println("Parse Error ", err)
c.String(http.StatusBadRequest, "Bad Request")
return
}
})
router.GET("/user/:id/*action", func(c *gin.Context) {
name := c.Param("name")
action := c.Param("action")
message := name + " is " + action
c.String(http.StatusOK, message)
})
router.GET("/logout", func(c *gin.Context) {
domain := globalcfg.GetDomain()
c.SetCookie(cookieUserInfo, "", -1, "/", domain, false, true)
c.Redirect(http.StatusSeeOther, "/")
})
router.GET("/api/callback", func(c *gin.Context) {
code := c.Query("code")
userinfo, err := auth.GetUserInfo(code)
if err != nil {
c.HTML(http.StatusInternalServerError, "internalservererror.tmpl", gin.H{
"title": "Internal Server Error",
})
}
domain := globalcfg.GetDomain()
c.SetCookie(cookieUserInfo, userinfo, 3600, "/", domain, false, true)
c.Redirect(http.StatusSeeOther, "/")
})
return router
}
|
g++ -o Message_lengths.o Message_lengths.cpp -L../../build/lib -loqs
./Message_lengths.o
|
#!/usr/bin/env bash
#-------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
#
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/go.md
# Maintainer: The VS Code and Codespaces Teams
#
# Syntax: ./go-debian.sh [Go version] [GOROOT] [GOPATH] [non-root user] [Add GOPATH, GOROOT to rc files flag] [Install tools flag]
TARGET_GO_VERSION=${1:-"latest"}
TARGET_GOROOT=${2:-"/usr/local/go"}
TARGET_GOPATH=${3:-"/go"}
USERNAME=${4:-"automatic"}
UPDATE_RC=${5:-"true"}
INSTALL_GO_TOOLS=${6:-"true"}
set -e
if [ "$(id -u)" -ne 0 ]; then
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
exit 1
fi
# Ensure that login shells get the correct path if the user updated the PATH using ENV.
rm -f /etc/profile.d/00-restore-env.sh
echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" > /etc/profile.d/00-restore-env.sh
chmod +x /etc/profile.d/00-restore-env.sh
# Determine the appropriate non-root user
if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
USERNAME=""
POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
if id -u ${CURRENT_USER} > /dev/null 2>&1; then
USERNAME=${CURRENT_USER}
break
fi
done
if [ "${USERNAME}" = "" ]; then
USERNAME=root
fi
elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then
USERNAME=root
fi
updaterc() {
if [ "${UPDATE_RC}" = "true" ]; then
echo "Updating /etc/bash.bashrc and /etc/zsh/zshrc..."
echo -e "$1" >> /etc/bash.bashrc
if [ -f "/etc/zsh/zshrc" ]; then
echo -e "$1" >> /etc/zsh/zshrc
fi
fi
}
# Figure out correct version of a three part version number is not passed
find_version_from_git_tags() {
local variable_name=$1
local requested_version=${!variable_name}
if [ "${requested_version}" = "none" ]; then return; fi
local repository=$2
local prefix=${3:-"tags/v"}
local separator=${4:-"."}
local last_part_optional=${5:-"false"}
if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then
local escaped_separator=${separator//./\\.}
local last_part
if [ "${last_part_optional}" = "true" ]; then
last_part="(${escaped_separator}[0-9]+)?"
else
last_part="${escaped_separator}[0-9]+"
fi
local regex="${prefix}\\K[0-9]+${escaped_separator}[0-9]+${last_part}$"
local version_list="$(git ls-remote --tags ${repository} | grep -oP "${regex}" | tr -d ' ' | tr "${separator}" "." | sort -rV)"
if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ] || [ "${requested_version}" = "lts" ]; then
declare -g ${variable_name}="$(echo "${version_list}" | head -n 1)"
else
set +e
declare -g ${variable_name}="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|$)")"
set -e
fi
fi
if [ -z "${!variable_name}" ] || ! echo "${version_list}" | grep "^${!variable_name//./\\.}$" > /dev/null 2>&1; then
echo -e "Invalid ${variable_name} value: ${requested_version}\nValid values:\n${version_list}" >&2
exit 1
fi
echo "${variable_name}=${!variable_name}"
}
# Function to run apt-get if needed
apt_get_update_if_needed()
{
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
echo "Running apt-get update..."
apt-get update
else
echo "Skipping apt-get update."
fi
}
# Checks if packages are installed and installs them if not
check_packages() {
if ! dpkg -s "$@" > /dev/null 2>&1; then
apt_get_update_if_needed
apt-get -y install --no-install-recommends "$@"
fi
}
export DEBIAN_FRONTEND=noninteractive
# Install curl, tar, git, other dependencies if missing
check_packages curl ca-certificates tar g++ gcc libc6-dev make pkg-config
if ! type git > /dev/null 2>&1; then
apt_get_update_if_needed
apt-get -y install --no-install-recommends git
fi
# Get closest match for version number specified
find_version_from_git_tags TARGET_GO_VERSION "https://go.googlesource.com/go" "tags/go" "." "true"
architecture="$(uname -m)"
case $architecture in
x86_64) architecture="amd64";;
aarch64 | armv8*) architecture="arm64";;
aarch32 | armv7* | armvhf*) architecture="armv6l";;
i?86) architecture="386";;
*) echo "(!) Architecture $architecture unsupported"; exit 1 ;;
esac
# Install Go
GO_INSTALL_SCRIPT="$(cat <<EOF
set -e
echo "Downloading Go ${TARGET_GO_VERSION}..."
curl -sSL -o /tmp/go.tar.gz "https://golang.org/dl/go${TARGET_GO_VERSION}.linux-${architecture}.tar.gz"
echo "Extracting Go ${TARGET_GO_VERSION}..."
tar -xzf /tmp/go.tar.gz -C "${TARGET_GOROOT}" --strip-components=1
rm -f /tmp/go.tar.gz
EOF
)"
if [ "${TARGET_GO_VERSION}" != "none" ] && ! type go > /dev/null 2>&1; then
mkdir -p "${TARGET_GOROOT}" "${TARGET_GOPATH}"
chown -R ${USERNAME} "${TARGET_GOROOT}" "${TARGET_GOPATH}"
su ${USERNAME} -c "${GO_INSTALL_SCRIPT}"
else
echo "Go already installed. Skipping."
fi
# Install Go tools that are isImportant && !replacedByGopls based on
# https://github.com/golang/vscode-go/blob/0c6dce4a96978f61b022892c1376fe3a00c27677/src/goTools.ts#L188
# exception: golangci-lint is installed using their install script below.
GO_TOOLS="\
golang.org/x/tools/gopls \
honnef.co/go/tools/... \
golang.org/x/lint/golint \
github.com/mgechev/revive \
github.com/uudashr/gopkgs/v2/cmd/gopkgs \
github.com/ramya-rao-a/go-outline \
github.com/go-delve/delve/cmd/dlv \
github.com/golangci/golangci-lint/cmd/golangci-lint"
if [ "${INSTALL_GO_TOOLS}" = "true" ]; then
echo "Installing common Go tools..."
export PATH=${TARGET_GOROOT}/bin:${PATH}
mkdir -p /tmp/gotools /usr/local/etc/vscode-dev-containers ${TARGET_GOPATH}/bin
cd /tmp/gotools
export GOPATH=/tmp/gotools
export GOCACHE=/tmp/gotools/cache
# Go tools w/module support
export GO111MODULE=on
(echo "${GO_TOOLS}" | xargs -n 1 go get -v )2>&1 | tee -a /usr/local/etc/vscode-dev-containers/go.log
# Move Go tools into path and clean up
mv /tmp/gotools/bin/* ${TARGET_GOPATH}/bin/
rm -rf /tmp/gotools
chown -R ${USERNAME} "${TARGET_GOPATH}"
fi
# Add GOPATH variable and bin directory into PATH in bashrc/zshrc files (unless disabled)
updaterc "$(cat << EOF
export GOPATH="${TARGET_GOPATH}"
if [[ "\${PATH}" != *"\${GOPATH}/bin"* ]]; then export PATH="\${PATH}:\${GOPATH}/bin"; fi
export GOROOT="${TARGET_GOROOT}"
if [[ "\${PATH}" != *"\${GOROOT}/bin"* ]]; then export PATH="\${PATH}:\${GOROOT}/bin"; fi
EOF
)"
echo "Done!"
|
fn simulate_packet_transmission<'a, I>(packets: I, sender_id: usize, receiver_id: usize) -> usize
where
I: Iterator<Item = &'a [u8]>
{
let mut packets_sent = 0;
for packet in packets {
// Simulate sending packets
packets_sent += send_packet(sender_id, receiver_id, packet);
// Introduce network delay
std::thread::sleep(std::time::Duration::from_millis(10));
}
packets_sent
}
fn send_packet(sender_id: usize, receiver_id: usize, packet: &[u8]) -> usize {
// Simulate sending packets and return the number of packets successfully sent
// Replace this with actual packet sending logic
// For simulation purposes, let's assume all packets are successfully sent
packet.len()
}
|
<gh_stars>0
/*
* Copyright (C) 2005-2017 Centre National d'Etudes Spatiales (CNES)
*
* This file is part of Orfeo Toolbox
*
* https://www.orfeo-toolbox.org/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef otbHooverInstanceFilter_hxx
#define otbHooverInstanceFilter_hxx
#include "otbHooverInstanceFilter.h"
#include "otbMacro.h"
namespace otb
{
/** Constructor */
template <class TLabelMap>
HooverInstanceFilter<TLabelMap>
::HooverInstanceFilter() : m_NumberOfRegionsGT(0), m_NumberOfRegionsMS(0), m_Threshold(0.8), m_UseExtendedAttributes(false)
{
this->SetNumberOfRequiredInputs(2);
this->SetNumberOfRequiredOutputs(2);
typename LabelMapType::Pointer secondOutput = LabelMapType::New();
this->AddOutput(secondOutput);
m_HooverMatrix.SetSize(0, 0);
m_CardRegGT.SetSize(0);
m_CardRegMS.SetSize(0);
m_LabelsGT.resize(0);
m_MeanRC = static_cast<AttributesValueType>(0);
m_MeanRF = static_cast<AttributesValueType>(0);
m_MeanRA = static_cast<AttributesValueType>(0);
m_MeanRM = static_cast<AttributesValueType>(0);
m_MeanRN = static_cast<AttributesValueType>(0);
}
/** Set the ground truth label map */
template <class TLabelMap>
void HooverInstanceFilter<TLabelMap>
::SetGroundTruthLabelMap(const LabelMapType *gt)
{
this->SetInput(0, gt);
}
/** Set the machine segmentation label map */
template <class TLabelMap>
void HooverInstanceFilter<TLabelMap>
::SetMachineSegmentationLabelMap(const LabelMapType *ms)
{
this->SetInput(1, ms);
}
/** Get the input ground truth label map */
template <class TLabelMap>
const TLabelMap* HooverInstanceFilter<TLabelMap>
::GetGroundTruthLabelMap()
{
return this->GetInput(0);
}
/** Get the input machine segmentation label map */
template <class TLabelMap>
TLabelMap* HooverInstanceFilter<TLabelMap>
::GetMachineSegmentationLabelMap()
{
return const_cast<TLabelMap*> (this->GetInput(1));
}
/** Get the output ground truth label map */
template <class TLabelMap>
TLabelMap* HooverInstanceFilter<TLabelMap>
::GetOutputGroundTruthLabelMap()
{
return this->GetOutput(0);
}
/** Get the output machine segmentation label map */
template <class TLabelMap>
TLabelMap* HooverInstanceFilter<TLabelMap>
::GetOutputMachineSegmentationLabelMap()
{
return this->GetOutput(1);
}
template <class TLabelMap>
void HooverInstanceFilter<TLabelMap>
::AllocateOutputs()
{
Superclass::AllocateOutputs();
if( this->GetInPlace() && this->CanRunInPlace() )
{
LabelMapPointer secondInput = const_cast<TLabelMap *>(this->GetMachineSegmentationLabelMap());
if( secondInput )
{
ImageRegionType region = this->GetOutput(1)->GetLargestPossibleRegion();
this->GraftNthOutput( 1 , secondInput );
this->GetOutput(1)->SetRegions( region );
}
}
else
{
// copying the second input : machine segmentation
const TLabelMap * inputMS = this->GetInput(1);
TLabelMap * outputMS = this->GetOutput(1);
assert( inputMS != NULL );
assert( outputMS != NULL );
outputMS->SetBackgroundValue( inputMS->GetBackgroundValue() );
ConstIteratorType it = ConstIteratorType( inputMS );
while( !it.IsAtEnd() )
{
const LabelObjectType * labeObject = it.GetLabelObject();
assert( labeObject != NULL );
assert( labeObject->GetLabel() == it.GetLabel() );
typename LabelObjectType::Pointer newLabelObject = LabelObjectType::New();
newLabelObject->CopyAllFrom( labeObject );
outputMS->AddLabelObject( newLabelObject );
++it;
}
}
}
template <class TLabelMap>
void HooverInstanceFilter<TLabelMap>
::ReleaseInputs()
{
Superclass::ReleaseInputs();
if( this->GetInPlace() )
{
// Release second input
TLabelMap * ptr = const_cast<TLabelMap*>( this->GetInput(1) );
if( ptr )
{
ptr->ReleaseData();
}
}
}
template <class TLabelMap>
void HooverInstanceFilter<TLabelMap>
::BeforeThreadedGenerateData()
{
// first : call superclass method
Superclass::BeforeThreadedGenerateData();
m_NumberOfRegionsGT = this->GetGroundTruthLabelMap()->GetNumberOfLabelObjects();
m_NumberOfRegionsMS = this->GetMachineSegmentationLabelMap()->GetNumberOfLabelObjects();
if (m_NumberOfRegionsGT == 0 || m_NumberOfRegionsMS == 0)
{
itkExceptionMacro("Empty label map");
}
//Check the matrix size
if (m_NumberOfRegionsGT != m_HooverMatrix.Rows() || m_NumberOfRegionsMS != m_HooverMatrix.Cols())
{
itkExceptionMacro("The given Hoover confusion matrix ("<<m_HooverMatrix.Rows()<<" x "<<m_HooverMatrix.Cols() <<
") doesn't match with the input label maps ("<<m_NumberOfRegionsGT<<" x "<<m_NumberOfRegionsMS<<")");
}
//Init cardinalities lists
m_CardRegGT.SetSize(m_NumberOfRegionsGT);
m_CardRegGT.Fill(0);
m_CardRegMS.SetSize(m_NumberOfRegionsMS);
m_CardRegMS.Fill(0);
//Fill cardinalities list for MS
unsigned long i = 0;
IteratorType iter = IteratorType( this->GetMachineSegmentationLabelMap() );
typename LabelObjectType::Pointer blankRegion;
while ( !iter.IsAtEnd() )
{
LabelObjectType *regionMS = iter.GetLabelObject();
m_CardRegMS[i] = regionMS->Size();
if (m_CardRegMS[i] == 0)
{
otbWarningMacro("Region "<<i<<" in machine segmentation label map is empty");
}
// reset any Hoover attribute already present
regionMS->SetAttribute(GetNameFromAttribute(ATTRIBUTE_RC), 0.0);
regionMS->SetAttribute(GetNameFromAttribute(ATTRIBUTE_RF), 0.0);
regionMS->SetAttribute(GetNameFromAttribute(ATTRIBUTE_RA), 0.0);
regionMS->SetAttribute(GetNameFromAttribute(ATTRIBUTE_RN), 0.0);
if (m_UseExtendedAttributes)
{
blankRegion = LabelObjectType::New();
blankRegion->SetLabel(regionMS->GetLabel());
std::vector< std::string > attKeys = regionMS->GetAvailableAttributes();
for (unsigned int k=0; k<attKeys.size(); k++)
{
if (attKeys[k].find("HooverInstance_Ext_") == 0)
{
continue;
}
else
{
blankRegion->SetAttribute(attKeys[k].c_str(), regionMS->GetAttribute(attKeys[k].c_str()));
}
}
regionMS->CopyAttributesFrom(blankRegion);
}
i++;
++iter;
}
m_LabelsGT = this->GetGroundTruthLabelMap()->GetLabels();
}
template <class TLabelMap>
void HooverInstanceFilter<TLabelMap>
::ThreadedProcessLabelObject( LabelObjectType * labelObject )
{
// Find the index corresponding to the current label object in GT
unsigned long currentRegionGT = 0;
LabelType currentLabelGT = labelObject->GetLabel();
for (unsigned long k=0; k<m_NumberOfRegionsGT; k++)
{
if (currentLabelGT == m_LabelsGT[k])
{
currentRegionGT = k;
break;
}
}
m_CardRegGT[currentRegionGT] = labelObject->Size();
if (m_CardRegGT[currentRegionGT] == 0)
{
otbWarningMacro("Region "<<currentRegionGT<<" in ground truth label map is empty");
}
// reset any Hoover attribute already present
labelObject->SetAttribute(GetNameFromAttribute(ATTRIBUTE_RC), 0.0);
labelObject->SetAttribute(GetNameFromAttribute(ATTRIBUTE_RF), 0.0);
labelObject->SetAttribute(GetNameFromAttribute(ATTRIBUTE_RA), 0.0);
labelObject->SetAttribute(GetNameFromAttribute(ATTRIBUTE_RM), 0.0);
if (m_UseExtendedAttributes)
{
typename LabelObjectType::Pointer blankRegion;
blankRegion = LabelObjectType::New();
blankRegion->SetLabel(labelObject->GetLabel());
std::vector< std::string > attKeys = labelObject->GetAvailableAttributes();
for (unsigned int k=0; k<attKeys.size(); k++)
{
if (attKeys[k].find("HooverInstance_Ext_") == 0)
{
continue;
}
else
{
blankRegion->SetAttribute(attKeys[k].c_str(), labelObject->GetAttribute(attKeys[k].c_str()));
}
}
labelObject->CopyAttributesFrom(blankRegion);
}
}
template <class TLabelMap>
void HooverInstanceFilter<TLabelMap>
::AfterThreadedGenerateData()
{
LabelMapType* outGT = this->GetOutput(0);
LabelMapType* outMS = this->GetOutput(1);
// Iterators on label object container (to gain efficiency when accessing them)
IteratorType iterGT = IteratorType( outGT );
IteratorType iterMS = IteratorType( outMS );
// Set of classified regions
RegionSetType GTindices;
RegionSetType MSindices;
// flags to detect empty rows or columns
bool IsRowEmpty;
bool IsColEmpty;
// temporary buffers to compute average scores
double bufferRC = 0.0;
double bufferRF = 0.0;
double bufferRA = 0.0;
double bufferRM = 0.0;
double bufferRN = 0.0;
double areaGT = 0.0;
double areaMS = 0.0;
// first pass : loop on GT regions first
for(unsigned int row=0; row<m_NumberOfRegionsGT; row++, iterGT++)
{
double sumOS = 0.0; // sum of coefT for potential over-segmented regions
double sumScoreRF = 0.0; // temporary sum of (Tij x (Tij - 1)) terms for the RF score
RegionSetType regionsOfMS; // stores region indexes
ObjectVectorType objectsOfMS; // stores region pointers
double tGT = static_cast<double>(m_CardRegGT[row]) * m_Threshold; // card Ri x t
IsRowEmpty = true;
iterMS.GoToBegin();
for(unsigned int col=0; col<m_NumberOfRegionsMS; col++, iterMS++)
{
// Tij
double coefT = static_cast<double>(m_HooverMatrix(row, col));
if(coefT < 0.5)
{
// the regions Ri and ^Rj have an empty intersection : we can jump to the next matrix cell
continue;
}
else
{
IsRowEmpty = false;
}
double tMS = static_cast<double>(m_CardRegMS[col]) * m_Threshold; // card Rj x t
otbDebugMacro(<< "* coef[" << row << "," << col << "]=" << coefT << " #tGT=" << tGT << " #tMS=" << tMS);
// Looking for Correct Detection and Over Segmentation (both can happen for the same GT region)
if(coefT>=tMS)
{
if(coefT>=tGT)
{
otbDebugMacro(<< "1 coef[" << row << "," << col << "]=" << coefT << " #tGT=" << tGT << " #tMS=" << tMS << " -> CD");
LabelObjectType *regionGT = iterGT.GetLabelObject();
LabelObjectType *regionMS = iterMS.GetLabelObject();
double scoreRC = m_Threshold * (std::min(coefT / tGT, coefT / tMS));
bufferRC += scoreRC * static_cast<double>(m_CardRegGT[row]);
regionGT->SetAttribute(GetNameFromAttribute(ATTRIBUTE_RC), static_cast<AttributesValueType>(scoreRC));
regionMS->SetAttribute(GetNameFromAttribute(ATTRIBUTE_RC), static_cast<AttributesValueType>(scoreRC));
if (m_UseExtendedAttributes)
{
regionGT->SetAttribute(GetNameFromAttribute(ATTRIBUTE_CD), static_cast<AttributesValueType>(regionMS->GetLabel()));
regionMS->SetAttribute(GetNameFromAttribute(ATTRIBUTE_CD), static_cast<AttributesValueType>(regionGT->GetLabel()));
}
GTindices.insert(row);
MSindices.insert(col);
}
else
{
otbDebugMacro(<< "2 coef[" << row << "," << col << "]=" << coefT << " #tGT=" << tGT << " #tMS=" << tMS << " -> OSmaybe");
}
objectsOfMS.push_back(iterMS.GetLabelObject()); // candidate region for over-segmentation
regionsOfMS.insert(col);
sumOS += coefT;
sumScoreRF += coefT*(coefT-1.0);
}
} // end of column loop
otbDebugMacro(<< "end of line " << row << "; sumOS=" << sumOS << " " << regionsOfMS.size() << " of MS region");
if(sumOS>=tGT && sumOS>0)
{
// CD
if(regionsOfMS.size()==1)
{
otbDebugMacro(<< "CD only");
}
// OS
else if(regionsOfMS.size()>1)
{
otbDebugMacro(<< row << " OS by ");
LabelObjectType *regionGT = iterGT.GetLabelObject();
double cardRegGT = static_cast<double>(m_CardRegGT[row]);
double scoreRF = 1.0 - sumScoreRF / (cardRegGT * (cardRegGT - 1.0));
bufferRF += scoreRF * cardRegGT;
regionGT->SetAttribute(GetNameFromAttribute(ATTRIBUTE_RF), static_cast<AttributesValueType>(scoreRF));
unsigned int indexOS=1;
for(typename ObjectVectorType::iterator it=objectsOfMS.begin(); it!=objectsOfMS.end(); ++it)
{
LabelObjectType *regionMS = *it;
std::ostringstream attribute;
attribute << ATTRIBUTE_OS << "_" << indexOS;
regionMS->SetAttribute(GetNameFromAttribute(ATTRIBUTE_RF), static_cast<AttributesValueType>(scoreRF));
if (m_UseExtendedAttributes)
{
regionGT->SetAttribute(attribute.str().c_str(), static_cast<AttributesValueType>(regionMS->GetLabel()));
regionMS->SetAttribute(GetNameFromAttribute(ATTRIBUTE_OS), static_cast<AttributesValueType>(regionGT->GetLabel()));
}
indexOS++;
}
GTindices.insert(row);
for(RegionSetType::iterator it=regionsOfMS.begin(); it!=regionsOfMS.end(); ++it)
{
MSindices.insert(*it);
otbDebugMacro(<< *it << " ");
}
}
else
{
otbDebugMacro(<< "No MS region present in potential OS instance");
}
}
// check for empty rows : they should be ignored and have no Hoover attribute
if (IsRowEmpty)
{
GTindices.insert(row);
}
else
{
areaGT += static_cast<double>(m_CardRegGT[row]);
}
} // end of line loop
// second pass : loop on MS regions first
iterMS.GoToBegin();
for(unsigned int col=0; col<m_NumberOfRegionsMS; col++, iterMS++)
{
double sumUS = 0.0; // sum of coefT for potential under-segmented regions
double sumScoreUS = 0.0; // temporary sum of the (Tij x (Tij - 1)) for RA score
double sumCardUS = 0.0; // temporary sum of under segmented region sizes
RegionSetType regionsOfGT; // stores region indexes
ObjectVectorType objectsOfGT; // stores region pointers
double tMS = static_cast<double>(m_CardRegMS[col]) * m_Threshold;
IsColEmpty = true;
iterGT.GoToBegin();
for(unsigned int row=0; row<m_NumberOfRegionsGT; row++, iterGT++)
{
double coefT = static_cast<double>(m_HooverMatrix(row, col));
if(coefT < 0.5)
{
// the regions Ri and ^Rj have an empty intersection : we can jump to the next matrix cell
continue;
}
else
{
IsColEmpty = false;
}
double tGT = static_cast<double>(m_CardRegGT[row]) * m_Threshold;
// Looking for Under-Segmented regions
if(coefT>=tGT)
{
otbDebugMacro(<< "3 coef[" << row << "," << col << "]=" << coefT << " #tGT=" << tGT << " #tMS=" << tMS << " -> USmaybe");
regionsOfGT.insert(row);
objectsOfGT.push_back(iterGT.GetLabelObject());
sumUS += coefT;
sumScoreUS += coefT * (coefT - 1.0);
sumCardUS += static_cast<double>(m_CardRegGT[row]);
}
} // end of line loop
// US
if(sumUS>=tMS)
{
if(regionsOfGT.size()==1)
{
otbDebugMacro(<< "CD already registered");
}
else if(regionsOfGT.size()>1) // Under Segmentation
{
LabelObjectType *regionMS = iterMS.GetLabelObject();
double scoreRA = 1.0 - sumScoreUS / (sumCardUS * (sumCardUS - 1.0));
bufferRA += scoreRA * sumCardUS;
regionMS->SetAttribute(GetNameFromAttribute(ATTRIBUTE_RA), static_cast<AttributesValueType>(scoreRA));
unsigned int indexUS=1;
for(typename ObjectVectorType::iterator it=objectsOfGT.begin(); it!=objectsOfGT.end(); ++it)
{
LabelObjectType *regionGT = *it;
std::ostringstream attribute;
attribute << ATTRIBUTE_US << "_" << indexUS;
regionGT->SetAttribute(GetNameFromAttribute(ATTRIBUTE_RA), static_cast<AttributesValueType>(scoreRA));
if (m_UseExtendedAttributes)
{
regionMS->SetAttribute(attribute.str(), static_cast<AttributesValueType>(regionGT->GetLabel()));
regionGT->SetAttribute(GetNameFromAttribute(ATTRIBUTE_US), static_cast<AttributesValueType>(regionMS->GetLabel()));
}
indexUS++;
}
MSindices.insert(col);
for(RegionSetType::iterator it=regionsOfGT.begin(); it!=regionsOfGT.end(); ++it)
{
GTindices.insert(*it);
otbDebugMacro(<< *it << " ");
}
otbDebugMacro(<< "US " << col);
}
else
{
otbDebugMacro(<< "No GT region present in potential US instance.");
}
}
// check for empty columns (MS region that doesn't intersect any GT region)
if (IsColEmpty)
{
MSindices.insert(col);
}
else
{
areaMS += static_cast<double>(m_CardRegMS[col]);
}
} // end of column loop
// check for Missed regions (unregistered regions in GT)
iterGT.GoToBegin();
for(unsigned int i=0; i<m_NumberOfRegionsGT; ++i , ++iterGT)
{
if(GTindices.count(i)==0)
{
otbDebugMacro(<< "M " << i);
LabelObjectType *regionGT = iterGT.GetLabelObject();
bufferRM += static_cast<double>(m_CardRegGT[i]);
regionGT->SetAttribute(GetNameFromAttribute(ATTRIBUTE_RM), 1.0);
if (m_UseExtendedAttributes)
{
regionGT->SetAttribute(GetNameFromAttribute(ATTRIBUTE_M),
static_cast<AttributesValueType>(regionGT->GetLabel()));
}
}
}
// check for Noise regions (unregistered regions in MS)
iterMS.GoToBegin();
for(unsigned int i=0; i<m_NumberOfRegionsMS; ++i , ++iterMS)
{
if(MSindices.count(i)==0)
{
LabelObjectType *regionMS = iterMS.GetLabelObject();
bufferRN += static_cast<double>(m_CardRegMS[i]);
regionMS->SetAttribute(GetNameFromAttribute(ATTRIBUTE_RN), 1.0);
if (m_UseExtendedAttributes)
{
regionMS->SetAttribute(GetNameFromAttribute(ATTRIBUTE_N),
static_cast<AttributesValueType>(regionMS->GetLabel()));
}
}
}
// Compute average scores
m_MeanRC = static_cast<AttributesValueType>(bufferRC / areaGT);
m_MeanRF = static_cast<AttributesValueType>(bufferRF / areaGT);
m_MeanRA = static_cast<AttributesValueType>(bufferRA / areaGT);
m_MeanRM = static_cast<AttributesValueType>(bufferRM / areaGT);
m_MeanRN = static_cast<AttributesValueType>(bufferRN / areaMS);
}
}
#endif
|
<gh_stars>1-10
module Tracksale
class Answer
LIMIT = -1
attr_accessor :time, :type, :name,
:email, :identification, :phone,
:nps_answer, :last_nps_answer, :nps_comment,
:campaign_name, :campaign_code, :id,
:deadline, :elapsed_time, :dispatch_time,
:reminder_time, :status, :tags,
:categories, :justifications
def campaign
Tracksale::Campaign.find_by_code(campaign_code)
end
class << self
def all( start_time=(Time.now-86_400), end_time=(Time.now+86_400))
raw_all(start_time,end_time).map { |answer| create_from_response(answer) }
end
def create_from_response(raw_response)
new.tap do |answer|
answer.time = Time.at(raw_response['time'].to_i)
answer.type = raw_response['type']
answer.name = raw_response['name']
answer.email = raw_response['email']
answer.identification = raw_response['identification']
answer.phone = raw_response['phone']
answer.nps_answer = raw_response['nps_answer']
answer.last_nps_answer = raw_response['last_nps_answer']
answer.nps_comment = raw_response['nps_comment']
answer.campaign_name = raw_response['campaign_name']
answer.campaign_code = raw_response['campaign_code']
answer.id = raw_response['id']
answer.deadline = raw_response['deadline']
answer.elapsed_time = raw_response['elapsed_time']
answer.dispatch_time = raw_response['dispatch_time']
answer.reminder_time = raw_response['reminder_time']
answer.status = raw_response['status']
answer.tags = convert_tags(raw_response['tags'])
answer.categories = raw_response['categories'].map { |c| c['name'] }
answer.justifications = convert_justif(raw_response['justifications'])
end
end
def raw_all( start_time=(Time.now-86_400), end_time=(Time.now+86_400))
start_date = start_time.strftime('%Y-%m-%d')
end_date = end_time.strftime('%Y-%m-%d')
all_request = "report/answer?tags=true&limit=#{LIMIT}&start=#{start_date}&end=#{end_date}"
client.get(all_request)
end
def client
Tracksale.configuration.client.new
end
def convert_tags(tags)
tags.map do |tag|
{ tag['name'] => tag['value'] }
end.reduce(&:merge)
end
def convert_justif(multiple_answers)
multiple_answers.map do |single_answer|
begin
{
JSON.parse(single_answer['name']).values.first =>
single_answer['children'].map { |c| JSON.parse(c).values.first }
}
rescue JSON::ParserError
{ single_answer['name'] => single_answer['children'] }
end
end
end
end
end
end
|
def get_max(nums):
max_num = nums[0]
for num in nums:
if num > max_num:
max_num = num
return max_num
if __name__ == '__main__':
nums = [2,8,5,4,1,9]
print("Maximum value:", get_max(nums))
|
// 1071. 字符串的最大公因子
// https://leetcode-cn.com/problems/greatest-common-divisor-of-strings/
package question1013
import "testing"
func Test_gcdOfStrings(t *testing.T) {
str1 := "ABCABC"
str2 := "ABC"
t.Log(gcdOfStrings(str1, str2) == "ABC")
str1 = "ABABAB"
str2 = "ABAB"
t.Log(gcdOfStrings(str1, str2) == "AB")
str1 = "LEET"
str2 = "CODE"
t.Log(gcdOfStrings(str1, str2) == "")
}
func gcdOfStrings(str1 string, str2 string) string {
T := str1[:_gcd(len(str1), len(str2))]
if check(T, str1) && check(T, str2) {
return T
}
return ""
}
func check(t, s string) bool {
lenx := len(s) / len(t)
var ans string
for i := 1; i <= lenx; i++ {
ans = ans + t
}
return ans == s
}
func _gcd(x, y int) int {
tmp := x % y
if tmp > 0 {
return _gcd(y, tmp)
} else {
return y
}
}
|
#!/bin/bash
export CNODE_HOME=/opt/cardano/cnode
truncate -s 0 /tmp/ip2trace_out.log;
truncate -s 0 /tmp/ip2trace_in.log;
pHOST=$HOSTNAME
pIP=$(ifconfig eth0 | grep inet | grep -v inet6 | awk '{print $2}')
pPORT=$(ps ax | grep "cardano-node run" | grep -v grep |sed 's/[^ ].*port //' | awk '{print $1}')
netstat -nt | grep tcp | grep EST | grep "$pIP:$pPORT" | awk '{ print $5 }' | cut -d ':' -f 1 | grep -v 172 > /tmp/iptrace_list_in.csv
netstat -nt | grep tcp | grep EST | grep "$pIP:$pPORT" | awk '{ print $5 }' | cut -d ':' -f 1 | grep 172 > /tmp/iptrace_list_in_local.csv
netstat -nt | grep tcp | grep EST | grep -v "$pIP:$pPORT" | awk '{ print $5 }' | cut -d ':' -f 1 | grep -v 172 > /tmp/iptrace_list_out.csv
netstat -nt | grep tcp | grep EST | grep -v "$pIP:$pPORT" | awk '{ print $5 }' | cut -d ':' -f 1 | grep 172 > /tmp/iptrace_list_out_local.csv
sleep 3 2>&1;
/usr/local/bin/ip2location -list /tmp/iptrace_list_in.csv -t all > /tmp/ip2trace_list_in.plog
sleep 2;
/usr/local/bin/ip2location -list /tmp/iptrace_list_out.csv -t all > /tmp/ip2trace_list_out.plog
LinesIN=$(cat /tmp/ip2trace_list_in.plog | wc -l)
LinesOUT=$(cat /tmp/ip2trace_list_out.plog | wc -l)
timestamp=$(date +%D)
time=$(date +%T)
for ((i=1;i<=$LinesIN;i++)); do ADD=$(sed -n "$i"p /tmp/ip2trace_list_in.plog); echo "timestamp=$timestamp,time=$time,pHOST=$pHOST,pIP=$pIP,pPORT=$pPORT,app=$ADD" | sed s/" country_long"/",country_long"/g | sed s/" "/"_"/g | sed s/","/" "/g | sed s/"\""/""/g >> /tmp/ip2trace_in.log; done
for ((i=1;i<=$LinesOUT;i++)); do ADD=$(sed -n "$i"p /tmp/ip2trace_list_out.plog); echo "timestamp=$timestamp,time=$time,pHOST=$pHOST,pIP=$pIP,pPORT=$pPORT,app=$ADD" | sed s/" country_long"/",country_long"/g | sed s/" "/"_"/g | sed s/","/" "/g | sed s/"\""/""/g >> /tmp/ip2trace_out.log; done
|
<filename>rollup-plugins/export-in-place.js
/**
* Copyright 2019 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// This plugin is a workaround for a suboptimal format of Rollup
// exports in ES module mode, where it produces code like:
//
// const A = 1, B = 2, ...;
// export { A, B, ... };
//
// But for optimal size we can inline exports back as:
//
// export const A = 1, B = 2, ...;
//
// For now this plugin expects this specific form of exports
// and isn't meant for general use, but still provides helpful size
// optimisation for this project (~7% gzipped).
import MagicString from "magic-string";
import assert from "assert";
export default function() {
return {
name: "export-in-place",
renderChunk(code) {
try {
const ast = this.parse(code);
// Assert for the AST shape.
// 1) We should have only two declarations at top level.
assert.strictEqual(
ast.body.length,
2,
"Bundle should have only two items at the top level."
);
const [varDecl, exportDecl] = ast.body;
// 2) First is a variable declaration (for `const ...`).
assert.strictEqual(
varDecl.type,
"VariableDeclaration",
"First top-level item should be a variable declaration."
);
// 3) Second is a local export list (`export { ... }`).
assert.strictEqual(
exportDecl.type,
"ExportNamedDeclaration",
"Second top-level item should be an export declaration."
);
assert.strictEqual(
exportDecl.declaration,
null,
"Export declaration should contain a list of items."
);
assert.strictEqual(
exportDecl.source,
null,
"Export declaration should export local items."
);
// 4) Their counts must match.
assert.strictEqual(
varDecl.declarations.length,
exportDecl.specifiers.length,
"List of exports should contain as many items as there are variables."
);
// Now, perform actual transformation - inline exported name back
// into each variable declarator.
const output = new MagicString(code);
const exportMap = new Map(
exportDecl.specifiers.map(spec => [
spec.local.name,
spec.exported.name
])
);
for (const { id } of varDecl.declarations) {
const exportedName = exportMap.get(id.name);
// Make sure we're exporting only declared vars.
assert(
exportedName,
`Export declaration for ${id.name} does not match a local variable.`
);
exportMap.delete(id.name);
output.overwrite(id.start, id.end, exportedName);
}
// Finally, prepend `export ` right before `const ...` and return it.
return `export ${output.slice(varDecl.start, varDecl.end)}`;
} catch (e) {
console.warn("Could not inline exports:", e);
return code;
}
}
};
}
|
package ch.raiffeisen.openbank.branch.controller.api;
import java.util.HashSet;
import java.util.Set;
public class PostalAddress {
private Set<String> addressLines = new HashSet<>();
private String buildingNumber;
private String streetName;
private String townName;
private String countrySubDivision;
private String country;
private String postCode;
public Set<String> getAddressLines() {
return addressLines;
}
public void setAddressLines(Set<String> addressLines) {
this.addressLines = addressLines;
}
public String getBuildingNumber() {
return buildingNumber;
}
public void setBuildingNumber(String buildingNumber) {
this.buildingNumber = buildingNumber;
}
public String getStreetName() {
return streetName;
}
public void setStreetName(String streetName) {
this.streetName = streetName;
}
public String getTownName() {
return townName;
}
public void setTownName(String townName) {
this.townName = townName;
}
public String getCountrySubDivision() {
return countrySubDivision;
}
public void setCountrySubDivision(String countrySubDivision) {
this.countrySubDivision = countrySubDivision;
}
public String getCountry() {
return country;
}
public void setCountry(String country) {
this.country = country;
}
public String getPostCode() {
return postCode;
}
public void setPostCode(String postCode) {
this.postCode = postCode;
}
}
|
#!/bin/bash
tesseract() {
docker run --rm -it -v \
`pwd`/app/base/ProcessManager.py:/tesseract/base/ProcessManager.py \
personal-expenses-accounting_tesseract /bin/bash
}
recogniser() {
docker run --rm -it -v \
`pwd`/app/base/ProcessManager.py:/recogniser/base/ProcessManager.py \
personal-expenses-accounting_recogniser /bin/bash
}
"$@"
|
#include <iostream>
#include <algorithm>
// Function to convert string to uppercase
std::string toUpperString(std::string str)
{
std::transform(str.begin(), str.end(), str.begin(), ::toupper);
return str;
}
|
#!/bin/bash
# © Copyright 2020 UCAR
# This software is licensed under the terms of the Apache Licence Version 2.0 which can be obtained at
# http://www.apache.org/licenses/LICENSE-2.0.
set -ex
name="tau2"
version="2.28.1"
# Hyphenated version used for install prefix
compiler=$(echo $JEDI_COMPILER | sed 's/\//-/g')
mpi=$(echo $JEDI_MPI | sed 's/\//-/g')
# manage package dependencies here
if $MODULES; then
set +x
source $MODULESHOME/init/bash
module load jedi-$JEDI_COMPILER
module load jedi-$JEDI_MPI
module try-load pdtoolkit
module try-load zlib
module list
set -x
prefix="${PREFIX:-"/opt/modules"}/$compiler/$mpi/$name/$version"
if [[ -d $prefix ]]; then
[[ $OVERWRITE =~ [yYtT] ]] && ( echo "WARNING: $prefix EXISTS: OVERWRITING!";$SUDO rm -rf $prefix ) \
|| ( echo "WARNING: $prefix EXISTS, SKIPPING"; exit 1 )
fi
else
prefix=${TAU_ROOT:-"/usr/local/$name/$version"}
fi
export CC=${MPI_CC:-"mpicc"}
export CXX=${MPI_CXX:-"mpiicpc"}
if [[ $MPI_FC = "mpifort" ]]; then
export FC="mpif90"
else
export FC=${MPI_FC:-"mpif90"}
fi
export PDTOOLKIT_ROOT=$PDT_ROOT
cd ${JEDI_STACK_ROOT}/${PKGDIR:-"pkg"}
software=tau2
[[ -d $PDTOOLKIT_ROOT ]] || ( echo "$software requires pdtoolkit, ABORT!"; exit 1 )
[[ -d $software ]] || git clone https://github.com/UO-OACISS/tau2
[[ ${DOWNLOAD_ONLY} =~ [yYtT] ]] && exit 0
[[ -d $software ]] && cd $software || ( echo "$software does not exist, ABORT!"; exit 1 )
[[ -d build ]] && rm -rf build
$SUDO ./configure -prefix=$prefix -c++=$CXX -cc=$CC -fortran=$FC -mpi -ompt -bfd=download \
-dwarf=download -unwind=download -iowrapper -pdt=$PDTOOLKIT_ROOT
# Note - if this doesn't work you might have to run the entire script as root
$SUDO make install
# generate modulefile from template
$MODULES && update_modules mpi $name $version \
|| echo $name $version >> ${JEDI_STACK_ROOT}/jedi-stack-contents.log
|
<gh_stars>10-100
package io.opensphere.csvcommon.common.datetime;
/**
* Contains the picked date columns, or null if one could not be determined.
*
*/
public class DateColumnResults
{
/**
* The date column or the up time column.
*/
private DateColumn myUpTimeColumn;
/**
* If multiple dates are defined within the csv, this is the down time or
* end time column.
*/
private DateColumn myDownTimeColumn;
/**
* If multiple dates are defined within the csv, gets the down time or end
* time column.
*
* @return The down time column or null if there isn't one.
*/
public DateColumn getDownTimeColumn()
{
return myDownTimeColumn;
}
/**
* Gets the date column or the up time column.
*
* @return The date column or the up time column.
*/
public DateColumn getUpTimeColumn()
{
return myUpTimeColumn;
}
/**
* Sets the down time or end time column.
*
* @param downTimeColumn The down time column or null if there isn't one.
*
*/
public void setDownTimeColumn(DateColumn downTimeColumn)
{
myDownTimeColumn = downTimeColumn;
}
/**
* Sets the date column or the up time column.
*
* @param upTimeColumn The date column or the up time column.
*/
public void setUpTimeColumn(DateColumn upTimeColumn)
{
myUpTimeColumn = upTimeColumn;
}
}
|
<reponame>astenstrasser/HelloJava<gh_stars>0
package br.com.neobank.bank.model;
public class StocksExchange implements Taxable {
@Override
public double getTaxes() {
return 50;
}
}
|
import React, {Component} from 'react'
import {connect} from 'react-redux';
import {fetctPackages} from '../../Store/Actions/UserActions';
import ListOfCmds from '../List/List';
class SearchArea extends Component {
onkeyDown = async e => {
let query = e.target.value;
await this.props.Search(query);
this.setState({loadedItems:this.props.LoadedItems});
}
render() {
const {Loading} = this.props;
return (
<React.Fragment>
<header className=" bg-primary">
<div className="s008">
<form>
<div className="inner-form">
<div className="basic-search">
<div className="input-field">
<input id="search" type="text" onChange={this.onkeyDown} placeholder="Type Keywords" autoComplete="off"/>
<div className="icon-wrap">
{
Loading ? <i className="fa fa-circle-o-notch fa-spin" aria-hidden="true"></i>:
<i className="fa fa-search" aria-hidden="true"></i>
}
</div>
</div>
</div>
<ListOfCmds/>
</div>
</form>
</div>
</header>
</React.Fragment>
)
}
}
const mapState = state => {
return {
Loading:state.userReducers.Loading
}
}
const mapDispatch = dispatch => {
return {
Search:query => dispatch(fetctPackages(query))
}
}
export default connect(mapState,mapDispatch)(SearchArea);
|
sudo snap install beekeeper-studio
|
#!/bin/bash -e
# ----------------------------------------------------------------------------
#
# Package : carbon-charts
# Version : 0.41.31
# Source repo : https://github.com/carbon-design-system/carbon-charts
# Tested on : ubuntu_18.04 (Docker)
# Language : Node
# Travis-Check : True
# Script License: Apache License, Version 2 or later
# Maintainer : Manik Fulpagar <Manik.Fulpagar@ibm.com>
#
# Disclaimer: This script has been tested in root mode on given
# ========== platform using the mentioned version of the package.
# It may not work as expected with newer versions of the
# package and/or distribution. In such case, please
# contact "Maintainer" of this script.
#
# ----------------------------------------------------------------------------
set -ex
#Variables
PACKAGE_NAME="carbon-charts"
PACKAGE_URL=https://github.com/carbon-design-system/carbon-charts.git
PACKAGE_VERSION=v0.41.31
echo "Usage: $0 [-v <PACKAGE_VERSION>]"
echo "PACKAGE_VERSION is an optional paramater whose default value is 0.41.31, not all versions are supported."
PACKAGE_VERSION="${1:-$PACKAGE_VERSION}"
#Install dependencies
apt-get update && apt-get install -y git curl build-essential make python sed unzip python3 libpng-dev
#install nodejs
curl https://raw.githubusercontent.com/creationix/nvm/master/install.sh | bash
export NVM_DIR="$HOME/.nvm"
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" # This loads nvm
[ -s "$NVM_DIR/bash_completion" ] && \. "$NVM_DIR/bash_completion" # This loads nvm bash_completion
nvm install 12.0.0
node -v
#Install yarn
curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add -
echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list
apt-get update
apt-get install yarn -y
yarn -v
export HOME=/home/tester
mkdir -p /home/tester/output
cd /home/tester
ln -s /usr/bin/python3 /bin/python
OS_NAME=$(cat /etc/os-release | grep ^PRETTY_NAME | cut -d= -f2)
function get_checkout_url(){
url=$1
CHECKOUT_URL=`python3 -c "url='$url';github_url=url.split('tree')[0];print(github_url);"`
echo $CHECKOUT_URL
}
function get_working_path(){
url=$1
CHECKOUT_URL=`python3 -c "url='$url';github_url,uri=url.split('tree');uris=uri.split('/');print('/'.join(uris[2:]));"`
echo $CHECKOUT_URL
}
CLONE_URL=$(get_checkout_url $PACKAGE_URL)
if [ "$PACKAGE_URL" = "$CLONE_URL" ]; then
WORKING_PATH="./"
else
WORKING_PATH=$(get_working_path $PACKAGE_URL)
fi
if ! git clone $CLONE_URL $PACKAGE_NAME; then
echo "------------------$PACKAGE_NAME:clone_fails---------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME" > /home/tester/output/clone_fails
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Clone_Fails" > /home/tester/output/version_tracker
exit 1
fi
cd /home/tester/$PACKAGE_NAME
git checkout $PACKAGE_VERSION
#patch
sed -i 's#"node-sass": "4.10.0"#"node-sass": "4.12.0"#g' packages/react/package.json
sed -i 's#"node-sass": "4.10.0"#"node-sass": "4.12.0"#g' packages/vue/package.json
sed -i 's#"node-sass": "4.10.0"#"node-sass": "4.12.0"#g' packages/angular/package.json
sed -i 's#"node-sass": "4.10.0"#"node-sass": "4.12.0"#g' packages/core/package.json
sed -i 's#"svelte": "^3.31.x"#"svelte": "^3.43.1"#g' packages/svelte/package.json
sed -i 's#"rollup-plugin-svelte": "^5.2.1"#"rollup-plugin-svelte": "^7.1.0"#g' packages/svelte/package.json
sed -i 's#"rollup-plugin-terser": "5.1.2"#"rollup-plugin-terser": "7.0.2"#g' packages/svelte/package.json
sed -i 's#"svelte": "3.31.x"#"svelte": "3.43.1"#g' packages/svelte/package.json
sed -i 's#"svelte-check": "^1.1.26"#"svelte-check": "^2.2.6"#g' packages/svelte/package.json
sed -i 's#"svelte-loader": "2.13.6"#"svelte-loader": "3.1.2"#g' packages/svelte/package.json
#Build
# run the test command from test.sh
# Build and test package
if ! yarn install; then
echo "------------------$PACKAGE_NAME:install_fails-------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME" > /home/tester/output/install_fails
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_Fails" > /home/tester/output/version_tracker
exit 1
fi
if ! yarn build-all; then
echo "------------------$PACKAGE_NAME:install_fails-------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME" > /home/tester/output/install_fails
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_Fails" > /home/tester/output/version_tracker
exit 1
fi
#conclude
echo "Build Complete. Uncomment the following lines to run tests, they may take a while to complete."
#apt-get install -y phantomjs firefox libxss1
#firefox --version
##manually copy chromuim binary to root folder
#export CHROME_BIN='/root/chromium_84_0_4118_0/chrome'
#chmod 777 $CHROME_BIN
#sed -i "s#'--headless'#'--headless', '--no-sandbox'#g" /opt/carbon-charts/node_modules/karma-chrome-launcher/index.js
#sed -i "s#this.browserDisconnectTimeout = 2000#this.browserDisconnectTimeout = 210000#g" /opt/carbon-charts/node_modules/karma/lib/config.js
#sed -i "s#this.captureTimeout = 60000#this.captureTimeout = 210000#g" /opt/carbon-charts/node_modules/karma/lib/config.js
#sed -i "s#this.browserNoActivityTimeout = 30000#this.browserNoActivityTimeout = 210000#g" /opt/carbon-charts/node_modules/karma/lib/config.js
#sed -i "s#this.browserDisconnectTolerance = 0#this.browserDisconnectTolerance = 3#g" /opt/carbon-charts/node_modules/karma/lib/config.js
#yarn test
#echo "Tests Complete!"
|
<filename>src/core/utils/helpers/deviceInfo.ts<gh_stars>0
export const getDeviceId = async (): Promise<string | null | undefined> => {
try {
const devices = await navigator?.mediaDevices?.enumerateDevices?.()
const device = devices.find((device) => device.kind === 'videoinput')
return device?.deviceId || device?.groupId
} catch (error: any) {
return null
}
}
|
package org.moskito.control.plugins.opsgenie;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import org.configureme.annotations.Configure;
import org.configureme.annotations.ConfigureMe;
import org.moskito.control.plugins.notifications.config.BaseNotificationProfileConfig;
import org.moskito.control.plugins.notifications.config.NotificationStatusChange;
/**
* OpsGenie configuration unit for per-status notification of specified recipients.
*/
@ConfigureMe
@SuppressFBWarnings(value = {"EI_EXPOSE_REP2", "EI_EXPOSE_REP"},
justification = "This is the way configureMe works, it provides beans for access")
public class OpsgenieNotificationConfig extends BaseNotificationProfileConfig{
/**
* Statuses changes appliable for this config
*/
@Configure
private NotificationStatusChange[] notificationStatusChanges = new NotificationStatusChange[0];
/**
* Mail recipients.
*/
@Configure
private String[] recipients = new String[0];
/**
* Teams responsible for alerts
*/
@Configure
private String[] teams = new String[0];
/**
* Tags of alert
*/
@Configure
private String[] tags = new String[0];
/**
* OpsGenie account custom actions
*/
@Configure
private String[] actions = new String[0];
public String[] getRecipients() {
return recipients;
}
public void setRecipients(String[] recipients) {
this.recipients = recipients;
}
public String[] getTeams() {
return teams;
}
public void setTeams(String[] teams) {
this.teams = teams;
}
public String[] getTags() {
return tags;
}
public void setTags(String[] tags) {
this.tags = tags;
}
public String[] getActions() {
return actions;
}
public void setActions(String[] actions) {
this.actions = actions;
}
public NotificationStatusChange[] getStatusChanges() {
return notificationStatusChanges;
}
public void setNotificationStatusChanges(NotificationStatusChange[] notificationStatusChanges) {
this.notificationStatusChanges = notificationStatusChanges;
}
}
|
package core.shape_interface.editor.geom;
import core.geom.Line;
import core.shape_interface.editor.Control;
import core.shape_interface.editor.ShapeEditor;
import java.awt.Cursor;
import java.awt.Graphics2D;
import java.awt.Point;
import java.awt.event.MouseEvent;
/**
*
* @author <NAME>
*/
public class LineShapeEditor extends ShapeEditor {
private Line shape;
public LineShapeEditor(Line shape) {
super();
this.shape = shape;
createControlPoints();
}
@Override
public final void createControlPoints() {
ctrl = new Control[3];
ctrl[0] = new Control(shape.getX1(), shape.getY1(), Control.CONTROL_LOCS.P1);
ctrl[1] = new Control(shape.getX2(), shape.getY2(), Control.CONTROL_LOCS.P2);
ctrl[2] = new Control(shape.getCX(), shape.getCY(), Control.CONTROL_LOCS.CEN);
}
@Override
public void dragControl(MouseEvent e) {
curPoint = e.getPoint();
int ddx = curPoint.x - lastPoint.x;
int ddy = curPoint.y - lastPoint.y;
switch (ctrl[idx].type) {
case CEN: {
shape.translate(ddx, ddy);
break;
}
case P1: {
shape.resize(curPoint.x, curPoint.y, shape.getX2(), shape.getY2());
break;
}
case P2: {
shape.resize(curPoint.x, curPoint.y, shape.getX1(), shape.getY1());
}
}
updateControlPositions();
lastPoint = curPoint;
/*
* As the shape is being dragged, it keeps
* changing its respective P1 and P2 points.
* To keep track of which point is being is pressed,
* we need to continuously update the index of the
* selected control, after every drag.
*/
idx = determineSelectedControl(e.getPoint());
}
@Override
public void paintEditLines(Graphics2D g) {
for (int i = ctrl.length - 1; i >= 0; i--) {
ctrl[i].paint(g);
}
}
@Override
public void pressControl(Point p) {
idx = determineSelectedControl(p);
shape.resetInverse();
lastPoint = p;
curPoint = p;
}
@Override
protected int determineSelectedControl(Point p) {
int ind = -1;
for (int i = 0; i < ctrl.length; i++) {
if (ctrl[i].contains(p)) {
ind = i;
break;
}
}
return ind;
}
@Override
public void updateControlPositions() {
ctrl[0].updateBounds(shape.getX1(), shape.getY1());
ctrl[1].updateBounds(shape.getX2(), shape.getY2());
ctrl[2].updateBounds(shape.getCX(), shape.getCY());
}
@Override
public Cursor getCursor(Point p) {
for (int i = 0; i < ctrl.length; i++) {
if (ctrl[i].contains(p)) {
return ctrl[i].getControlCursor();
}
}
return Cursor.getDefaultCursor();
}
}
|
import { config } from "../config.js";
const { backend } = config;
const { host, port } = backend;
export const getTasksByTeam = async (teamId, memberId) => {
console.log("---at get tasks by team---");
// Send the request with fetch()
const tasks = await fetch(
host + port + "/api/task/team/" + teamId + "/" + memberId
);
return tasks;
// .then(res => {
// console.log("success get tasks by team fetch");
// console.log(res.status === 200)
// console.log(res)
// return res
// })
// .catch((error) => {
// console.log("fail get tasks by team fetch");
// // console.log(res)
// console.log(error);
// });
};
export const getTasksByCompanyId = async (Id) => {
console.log("---at get tasks by company id---");
const task = await fetch(host + port + "/api/task/company/" + Id);
return task;
};
export const getTasksByAssignedTo = async (memberId) => {
console.log("---at get tasks by team---");
const tasks = await fetch(host + port + "/api/task/toMember/" + memberId);
return tasks;
};
export const finishTask = async (taskId) => {
console.log("---read all---");
// Create our request constructor with all the parameters we need
const request = new Request(host + port + "/api/task/finish", {
method: "post",
body: JSON.stringify({
taskId: taskId,
}),
headers: {
Accept: "application/json, text/plain, */*",
"Content-Type": "application/json",
},
});
console.log("read all");
const result = await fetch(request);
return result;
};
export const joinTask = async (taskId, memberId) => {
console.log("---join task---");
// Create our request constructor with all the parameters we need
const request = new Request(host + port + "/api/task/join", {
method: "post",
body: JSON.stringify({
taskId: taskId,
memberId: memberId,
}),
headers: {
Accept: "application/json, text/plain, */*",
"Content-Type": "application/json",
},
});
console.log("read all");
const result = await fetch(request);
return result;
};
export const getTasksById = async (Id) => {
console.log("---at get tasks by task id---");
const task = await fetch(host + port + "/api/task/" + Id);
return task;
};
export const addTask = (data) => {
console.log("---Add Task---");
const {
teamId,
companyId,
name,
estimatedTime,
usedTime,
assignedToId,
assignedById,
taskDetail,
isFinish,
} = data;
console.log(data);
// Create our request constructor with all the parameters we need
const request = new Request(host + port + "/api/task/add", {
method: "put",
body: JSON.stringify({
teamId: teamId,
companyId: companyId,
name: name,
estimatedTime: estimatedTime,
usedTime: usedTime,
assignedToId: assignedToId,
assignedById: assignedById,
taskDetail: taskDetail,
isFinish: isFinish,
}),
headers: {
Accept: "application/json, text/plain, */*",
"Content-Type": "application/json",
},
});
fetch(request)
.then((res) => {
console.log("success add task");
console.log(res);
return res;
})
.catch((error) => {
console.log("fail send addTask");
console.log(error);
});
};
export const updateTask = async (data) => {
console.log("---update task---");
// Create our request constructor with all the parameters we need
const {
_id,
teamId,
companyId,
name,
estimatedTime,
usedTime,
assignedToId,
assignedById,
taskDetail,
} = data;
console.log(data);
const request = new Request(host + port + "/api/task/update", {
method: "post",
body: JSON.stringify({
id: _id,
teamId: teamId,
companyId: companyId,
name: name,
estimatedTime: estimatedTime,
usedTime: usedTime,
assignedToId: assignedToId,
assignedById: assignedById,
taskDetail: taskDetail,
}),
headers: {
Accept: "application/json, text/plain, */*",
"Content-Type": "application/json",
},
});
fetch(request)
.then((res) => {
console.log("success update task");
console.log(res);
return res;
})
.catch((error) => {
console.log("fail update addTask");
console.log(error);
});
};
|
import React from "react"
import { Link } from "gatsby"
import PropTypes from "prop-types"
import "@styles/header.scss"
const Header = ({ siteTitle }) => (
<header>
<div className="header-inner">
<h1 className="site-title">
<Link to="/">
{siteTitle}
</Link>
</h1>
<nav className="navigation">
<ul>
<li>
<Link to="/about">About</Link>
</li>
<li>
<Link to="/project">Project</Link>
</li>
<li>
<Link to="/blog">Blog</Link>
</li>
<li>
<Link to="/tag">Tag</Link>
</li>
</ul>
</nav>
</div>
</header>
)
Header.propTypes = {
siteTitle: PropTypes.string,
}
Header.defaultProps = {
siteTitle: ``,
}
export default Header
|
from rest_framework.serializers import ModelSerializer
from rest_framework_queryfields import QueryFieldsMixin
class drf_mscnt_Timestamp_Serializer(QueryFieldsMixin, ModelSerializer):
class Meta:
model = drf_mscnt_Timestamp
id_field = 'id'
fields = ('__all__')
|
#!/bin/bash
function install_falco_driver_loader () {
node_number=$1
debug_image=$2
image_option=""
if [[ ! -z "$debug_image" ]] ; then
echo "debug_image: $debug_image"
image_option="--image"
fi
#echo $node_number
nodename=${nodearray[$node_number]}
echo "---------------------------------------------------------"
echo ""
echo "Start to install kernel headers on the node $nodename."
echo ""
oc debug ${image_option} ${debug_image} node/${nodename} -- bash -c "chroot /host bash -c \"
#!/bin/bash
echo ''
echo '---------------------------------------------------------'
mkdir -p /root/.falco
echo '---------------------------------------------------------'
podman run --rm -i -t \
--privileged \
-v /root/.falco:/root/.falco \
-v /proc:/host/proc:ro \
-v /boot:/host/boot:ro \
-v /lib/modules:/host/lib/modules:ro \
-v /usr:/host/usr:ro \
-v /etc:/host/etc:ro \
docker.io/falcosecurity/falco-driver-loader:latest
echo Exiting...
exit 0
\\\\\\\" # end of bash
\\\" # end of nsenter
\" # end of chroot
echo \$?
" 2>&1 | tee /tmp/ma-install-kernel.log # end of oc debug
ret_val=`cat /tmp/ma-install-kernel.log | tail -n 3 | head -n 1`
rm /tmp/ma-install-kernel.log
return $ret_val
}
declare -a nodearray=()
nodes=`oc get node --selector='node-role.kubernetes.io/worker' | awk '{print $0}'`
echo ""
echo "---------------------------------------------------------"
echo ""
num=0
# echo -e "${num} ) ALL nodes"
# num=$((++num))
# nodearray=("${nodearray[@]}" "ALL")
IFS=$'\n'
for line in `echo -e "$nodes"`
do
if [[ $num -eq 0 ]]; then
echo -e " $line"
else
echo -e "${num} ) $line"
fi
nodename=`echo $line | awk '{print $1}'`
nodearray=("${nodearray[@]}" $nodename)
num=$((++num))
done
echo "---------------------------------------------------------"
echo ""
echo "Installing kernel headers on all nodes shown above."
echo ""
# read userinput
node_num=${#nodearray[@]}
# if [[ $userinput -lt 1 || $userinput -gt $node_num ]]; then
# echo "Input outside acceptable range. [ 1 - ${node_num} ]"
# exit 1
# fi
for (( i = 1; i < $node_num; i++ ))
do
if install_falco_driver_loader "$i" "$debug_image"; then
echo "successfully installed falco-driver-loader"
continue
else
echo "Failed to install kernel header on ${nodearray[$i]}"
exit 1
fi
done
|
//
// FSShowDeleteView.h
// Expand
//
// Created by Fudongdong on 2017/11/3.
// Copyright © 2017年 china. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface FSShowDeleteView : UIView
- (void)setText:(NSString *)text;
- (void)setNotice:(NSString *)notice;
- (void)startAnimation;
@end
|
// An expression to calculate the area of a triangle given the three sides
area = (1/4)*√( (3 + 4 + 5)*(-3 + 4 + 5)*(3 - 4 + 5)*(3 + 4 - 5))
|
const express = require('express');
const app = express();
app.get('/sequence', (req, res) => {
const query = req.query.numbers;
let dataSplit = query.split(",");
let lastElement = dataSplit[dataSplit.length-1];
// lastElement is an integer
let result = parseInt(lastElement) + 1;
res.send({next: result.toString()});
});
app.listen(3000);
|
<reponame>deltd3v/bloog
/** Generate an array of 16,777,216 colors */
export const genAllColors = (colorArray = []): string[] => {
for (let i = 0xffffff; i >= 0; --i)
colorArray[i] = i.toString(16).padStart(6, '0');
return colorArray; // exceeds 25m can't even cache this ish in localStorage
};
/** Generate a random color */
export const genRandomColor = () =>
'#' + ('00000' + ((Math.random() * (1 << 24)) | 0).toString(16)).slice(-6);
export default genAllColors;
|
# middleware.py
from django.http import HttpResponseForbidden
from .models import Article
class EditRestrictionMiddleware:
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
if request.path.startswith('/wiki/locked/edit/'):
article_id = request.path.split('/')[-3] # Extract article ID from the URL
article = Article.objects.get(id=article_id)
if article.is_locked and not request.user.is_staff:
return HttpResponseForbidden("You do not have permission to edit this locked article.")
return self.get_response(request)
|
#!/bin/bash
rm -f solutions && \
rm -rf lovelace-solutions/
docker build -t lovelace-engine . && \
docker run -d -v /var/run/docker.sock:/var/run/docker.sock -p 14714:14714 lovelace-engine && \
docker ps -a
git clone https://github.com/project-lovelace/lovelace-solutions.git && \
ln -s lovelace-solutions/python/ solutions && \
pip install -r requirements.txt
export LOVELACE_SOLUTIONS_DIR=./lovelace-solutions/
|
#!/bin/bash
read n
for i in `seq 1 10`
do
let result=n*i
echo "$n x $i = $result"
done
|
import { MeshPhongMaterialDirective } from './mesh-phong-material.directive';
describe('MeshPhongMaterialDirective', () => {
it('should create an instance', () => {
const directive = new MeshPhongMaterialDirective();
expect(directive).toBeTruthy();
});
});
|
#ifndef _llvm_functions_h
#define _llvm_functions_h
/*
* Contains all of the CLIPS=>LLVM and LLVM=>CLIPS wrapper functions.
* These functions show up in CLIPS as various different things
*/
#include "ExpertSystem/Types.h"
extern "C" void RegisterExpertSystemFunctions(void *theEnv);
extern "C" void LLVMPrint(void *theEnv);
extern "C" void BatchLoad(void *theEnv);
extern "C" void* IsIdenticalTo(void *theEnv);
extern "C" void* IsIdenticalToWhenDefined(void *theEnv);
extern "C" void* IsSameOperationAs(void *theEnv);
extern "C" PointerAddress CloneInstruction(void *theEnv);
extern "C" void* UnlinkAndMoveInstructionBefore(void *theEnv);
extern "C" void* UnlinkAndMoveInstructionAfter(void *theEnv);
extern "C" void* MoveInstructionBefore(void *theEnv);
extern "C" void* MoveInstructionAfter(void *theEnv);
extern "C" void* UnlinkAndDeleteInstruction(void *theEnv);
extern "C" void* ScheduleInstructions(void *theEnv);
extern "C" void* ReplaceUsesOf(void *theEnv);
extern "C" void* ReplaceAllUsesOf(void *theEnv);
extern "C" PointerAddress CreatePhiNode(void *theEnv);
extern "C" void* UnlinkInstruction(void *theEnv);
extern "C" void* DeleteInstruction(void *theEnv);
#endif
|
<gh_stars>1-10
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by <NAME>, <EMAIL>, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Opari2(AutotoolsPackage):
"""OPARI2 is a source-to-source instrumentation tool for OpenMP and hybrid
codes. It surrounds OpenMP directives and runtime library calls with calls
to the POMP2 measurement interface. OPARI2 will provide you with a new
initialization method that allows for multi-directory and parallel builds
as well as the usage of pre-instrumented libraries. Furthermore, an
efficient way of tracking parent-child relationships was added.
Additionally, we extended OPARI2 to support instrumentation of OpenMP 3.0
tied tasks.
"""
homepage = "http://www.vi-hps.org/projects/score-p"
url = "http://www.vi-hps.org/upload/packages/opari2/opari2-1.1.2.tar.gz"
version('2.0.3', 'f34674718ffdb098a48732a1eb9c1aa2')
version('2.0.1', '74af78f1f27b8caaa4271e0b97fb0fba')
version('2.0', '72350dbdb6139f2e68a5055a4f0ba16c')
version('1.1.4', '245d3d11147a06de77909b0805f530c0')
version('1.1.2', '9a262c7ca05ff0ab5f7775ae96f3539e')
def configure_args(self):
return ["--enable-shared"]
|
#pragma once
#include "Common.h"
#include "HostCapability.h"
#include "Capability.h"
#include "Host.h"
#include "Session.h"
|
<reponame>ComputerArchitectureGroupPWr/Floorplan-Maker<filename>src/floorplan.py
# -*- coding: utf-8 -*-
from PyQt4 import QtGui
import sys
from PyQt4.QtCore import QString, Qt
from PyQt4.QtGui import QColor
__author__ = 'pawel'
from Parameters import Param
class Floorplan:
colors = {QString("niebieski"):Qt.blue, QString("ciemny niebieski"): Qt.darkBlue,
QString("czerwony"):Qt.red, QString("ciemny czerwony"): Qt.darkRed,
QString("zielony"):Qt.green, QString("ciemny zielony"): Qt.darkGreen,
QString("cyjan"): Qt.cyan, QString("ciemny cyjan"): Qt.darkCyan,
QString("magenta"): Qt.magenta, QString("ciemna magenta"): Qt.darkMagenta,
QString("szary"): Qt.gray, QString("ciemny szary"): Qt.darkGray,
QtGui.QApplication.translate("newHeaterDialog", "żółty", None, QtGui.QApplication.UnicodeUTF8): Qt.yellow,
QtGui.QApplication.translate("newHeaterDialog", "ciemny żółty", None,
QtGui.QApplication.UnicodeUTF8): Qt.darkYellow}
def __init__(self, cols, rows):
#Zdefiniowanie pustego floorplanu i wyczysczenie listy zawartości
self.floorplan = []
self.cols = cols
self.rows = rows
for i in range(rows):
for j in range(cols):
self.floorplan.append(0)
#Lista przechowująca inofrmacje o nazwie i identyfikatorze grzałki
self.heaters = []
self.therms = {}
self.term_index = 30
def addNewHeater(self, params):
color = params["color"]
if self.heaters:
self.heaters.append([params["name"], self.heaters[-1][1]+1, color, params["type"]])
else:
self.heaters.append([params["name"], 1, color, params["type"]])
def addNewTerm(self, params):
attributes = {'index': self.term_index, 'type': params['type'], 'placed': False}
self.therms[params['name']] = attributes
self.term_index += 1
def modifyHeater(self, params,index):
item = QtGui.QTableWidgetItem("")
item.setBackgroundColor(QColor(params["color"]))
color = item.backgroundColor().name()
self.heaters[index][0] = params["name"]
self.heaters[index][2] = color
self.heaters[index][3] = params["type"]
def modifyThermometer(self,name,new_name):
self.therms[new_name] = self.therms[name]
del self.therms[name]
def removeHeater(self, params):
index = self.indexOfHeater(params["name"])
item = QtGui.QTableWidgetItem("")
item.setBackgroundColor(QColor(params["color"]))
color = item.backgroundColor().name()
self.heaters.remove([params["name"], index, color, params["type"]])
for i in range(self.cols*self.rows):
if self.floorplan[i] == index:
self.floorplan[i]=0
for i in range(self.cols*self.rows):
if index < self.floorplan[i] < 30:
self.floorplan[i]-=1
for i in range(len(self.heaters)):
if index < self.heaters[i][1] < 30:
self.heaters[i][1]-= 1
def removeThermometer(self, params):
index = self.therms[params['name']]['index']
self.removeTermUnit(params['name'],params['pos_x'],params['pos_y'])
self.therms.pop(params['name'])
self.term_index-=1
for key,value in self.therms.items():
if value['index']>index:
self.reIndexTermUnit(value['index'])
value['index']-=1
def heatersNumber(self):
return self.heaters[-1][1]
def indexOfHeater(self,name):
for h in self.heaters:
if h[0] == name:
return h[1]
def addHeaterUnit(self,name,x,y):
self.floorplan[y*self.cols+x] = self.indexOfHeater(name)
def removeHeaterUnit(self,name,x,y):
if self.floorplan[y*self.cols+x] == self.indexOfHeater(name):
self.floorplan[y*self.cols+x] = 0
def addTermUnit(self, name, x, y, openProject = False):
self.floorplan[y*self.cols+x] = self.therms[name]['index']
self.therms[name]['placed'] = True
if openProject:
self.therms[name]['x_pos'] = x
self.therms[name]['y_pos'] = y
def replaceTermUnit(self,name,x,y,x_new,y_new):
if self.floorplan[y*self.cols+x] == self.therms[str(name)]['index']:
self.floorplan[y*self.cols+x] = 0
self.floorplan[y_new*self.cols+x_new] = self.therms[str(name)]['index']
def removeTermUnit(self,name,x,y):
if self.floorplan[y*self.cols+x] == self.therms[name]['index']:
self.floorplan[y*self.cols+x] = 0
def reIndexTermUnit(self,index):
for i in range(self.cols*self.rows):
if self.floorplan[i] == index:
self.floorplan[i]-=1
def addObstacle(self,x,y):
self.floorplan[y*self.cols+x] = Param.obstacle
def addUnit(self,x,y):
self.floorplan[y*self.cols+x] = Param.unit
def getElementIndex(self,x,y):
return self.floorplan[y*self.cols+x]
def getTermPosition(self,name):
for i in range(self.cols):
for j in range(self.rows):
if self.getElementIndex(i,j) == self.therms[name]['index']:
return j,i
def emptyElement(self,x,y):
if self.floorplan[y*self.cols+x]:
return False
else:
return True
def getColorByIndex(self, index):
return self.heaters[int(index)-1][2]
def sortTherms(self):
for therm in sorted(self.therms.items()):
print therm
'''Metoda wypisująca tekstowo wyglad płytki'''
def printTextFloorplan(self):
k = 0
for i in range(self.rows):
if i!=0:
sys.stdout.write(str(i) + "\t")
for j in range(self.cols):
if i==0:
sys.stdout.write("\t" + str(j+1))
else:
sys.stdout.write(str(self.floorplan[self.cols*(i-1) + j]) + " ")
k += 1
sys.stdout.write("\n")
print "Razem bloków funkcyjnych: " + str(k)
|
./gradlew clean bootJar
docker-compose build linux
docker-compose push linux
|
package com.ervin.litepal.utils;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import java.io.InputStream;
/**
* Created by Ervin on 2015/12/22.
*/
public class Tools {
public static Bitmap readBitMap(Context context, int resId){
BitmapFactory.Options opt = new BitmapFactory.Options();
opt.inPreferredConfig = Bitmap.Config.RGB_565;
opt.inPurgeable = true;
opt.inInputShareable = true;
//获取资源图片
InputStream is = context.getResources().openRawResource(resId);
return BitmapFactory.decodeStream(is,null,opt);
}
}
|
<reponame>hapify/cli<gh_stars>1-10
import 'reflect-metadata';
import { expect } from '@hapi/code';
import 'mocha';
import { CLI, Sandbox } from './helpers';
describe('generate command', () => {
it('success', async () => {
const sandbox = new Sandbox();
sandbox.clear();
// Clone repository first
const responseNew = await CLI('new', [
'--dir',
sandbox.getPath(),
'--boilerplate',
'hapijs_tractr',
'--preset',
'60104aabe0fe50001033f10e', // User
'--preset',
'60104aabe0fe50001033f10f', // Place
'--project-name',
'The Name',
'--project-desc',
'The Description',
]);
expect(responseNew.stderr).to.be.empty();
expect(responseNew.code).to.equal(0);
expect(responseNew.stdout).to.contains('Created 1 new dynamic boilerplate');
// Generate code
const response = await CLI('generate', ['--dir', sandbox.getPath()]);
expect(response.stderr).to.be.empty();
expect(response.code).to.equal(0);
expect(response.stdout).to.contains(['Generated', 'files', 'for channel', 'HapiJS']);
expect(sandbox.fileExists(['routes', 'user', 'create.js'])).to.be.true();
expect(sandbox.fileExists(['routes', 'place', 'delete.js'])).to.be.true();
expect(sandbox.fileExists(['cmd', 'setup', 'indexes.json'])).to.be.true();
});
it('success with two boilerplates', async () => {
const sandbox = new Sandbox();
sandbox.clear();
// Clone repository first
const responseNew = await CLI('new', [
'--dir',
sandbox.getPath(),
'--boilerplate-url',
'https://github.com/Tractr/boilerplate-hapijs.git',
'--boilerplate-url',
'https://github.com/Tractr/boilerplate-ngx-components.git',
'--preset',
'60104aabe0fe50001033f10e', // User
'--project-name',
'The Name',
'--project-desc',
'The Description',
]);
expect(responseNew.stderr).to.be.empty();
expect(responseNew.code).to.equal(0);
expect(responseNew.stdout).to.contains('Created 2 new dynamic boilerplates');
// Generate code
const response = await CLI('generate', ['--dir', sandbox.getPath()]);
expect(response.stderr).to.be.empty();
expect(response.code).to.equal(0);
expect(response.stdout).to.contains(['Generated', 'files', 'for channel', 'HapiJS', 'Angular Components']);
expect(sandbox.fileExists(['boilerplate-hapijs', 'routes', 'user', 'create.js'])).to.be.true();
expect(sandbox.fileExists(['boilerplate-ngx-components', 'src', 'app', 'models', 'user', 'user.ts'])).to.be.true();
});
it('error during generation', async () => {
const sandbox = new Sandbox();
sandbox.clear();
// Clone repository first
const responseNew = await CLI('new', [
'--dir',
sandbox.getPath(),
'--boilerplate',
'hapijs_tractr',
'--preset',
'60104aabe0fe50001033f10e', // User
'--preset',
'60104aabe0fe50001033f10f', // Place
'--project-name',
'The Name',
'--project-desc',
'The Description',
]);
expect(responseNew.stderr).to.be.empty();
expect(responseNew.code).to.equal(0);
expect(responseNew.stdout).to.contains('Created 1 new dynamic boilerplate');
// Introduce an error in file
const path = ['hapify', 'routes', 'model', 'create.js.hpf'];
const content = sandbox.getFileContent(path);
const newContent = `${content}\n\n<<@ S f>>\n...\n<<@>>`;
sandbox.setFileContent(path, newContent);
// Generate code
const response = await CLI('generate', ['--dir', sandbox.getPath()]);
expect(response.stderr).to.contains(['SyntaxEvaluationError', 'S is not defined', 'Column', 'Line', 'File']);
expect(response.code).to.equal(1);
expect(response.stdout).to.be.a.string();
});
it('empty templates', async () => {
const sandbox = new Sandbox();
sandbox.clear();
// Clone repository first
const responseNew = await CLI('new', [
'--dir',
sandbox.getPath(),
'--boilerplate',
'hapijs_tractr',
'--preset',
'60104aabe0fe50001033f10e', // User
'--project-name',
'The Name',
'--project-desc',
'The Description',
]);
expect(responseNew.stderr).to.be.empty();
expect(responseNew.code).to.equal(0);
// Empty template
const path = ['hapify', 'routes', 'model', 'create.js.hpf'];
sandbox.setFileContent(path, '\t\n <<# This is a comment >> <<< function testFunc () {} >>> \n');
// Generate code
const response = await CLI('generate', ['--dir', sandbox.getPath()]);
expect(response.stderr).to.be.empty();
expect(response.code).to.equal(0);
expect(response.stdout).to.contains(['Generated', 'files', 'for channel', 'HapiJS']);
expect(sandbox.fileExists(['routes', 'user', 'create.js'])).to.be.false();
expect(sandbox.fileExists(['routes', 'user', 'delete.js'])).to.be.true();
expect(sandbox.fileExists(['cmd', 'setup', 'indexes.json'])).to.be.true();
});
});
|
//#####################################################################
// Copyright 2009, <NAME>.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
// Class ADVECTION_SEMI_LAGRANGIAN_COLLIDABLE_FACE_SLIP_UNIFORM
//#####################################################################
#include <PhysBAM_Tools/Advection/ADVECTION.h>
#include <PhysBAM_Tools/Grids_Uniform/UNIFORM_GRID_ITERATOR_FACE.h>
#include <PhysBAM_Tools/Grids_Uniform_Interpolation/AVERAGING_UNIFORM.h>
#include <PhysBAM_Geometry/Basic_Geometry/RAY.h>
#include <PhysBAM_Geometry/Grids_Uniform_Advection_Collidable/ADVECTION_SEMI_LAGRANGIAN_COLLIDABLE_FACE_SLIP_UNIFORM.h>
#include <PhysBAM_Geometry/Grids_Uniform_Interpolation_Collidable/FACE_LOOKUP_COLLIDABLE_SLIP_UNIFORM.h>
#include <PhysBAM_Geometry/Grids_Uniform_Interpolation_Collidable/LINEAR_INTERPOLATION_COLLIDABLE_FACE_UNIFORM.h>
using namespace PhysBAM;
//#####################################################################
// Constructor
//#####################################################################
template<class T_GRID,class T_FACE_LOOKUP> ADVECTION_SEMI_LAGRANGIAN_COLLIDABLE_FACE_SLIP_UNIFORM<T_GRID,T_FACE_LOOKUP>::
ADVECTION_SEMI_LAGRANGIAN_COLLIDABLE_FACE_SLIP_UNIFORM(T_GRID_BASED_COLLISION_GEOMETRY& body_list_input)
:body_list(body_list_input)
{
}
//#####################################################################
// Destructor
//#####################################################################
template<class T_GRID,class T_FACE_LOOKUP> ADVECTION_SEMI_LAGRANGIAN_COLLIDABLE_FACE_SLIP_UNIFORM<T_GRID,T_FACE_LOOKUP>::
~ADVECTION_SEMI_LAGRANGIAN_COLLIDABLE_FACE_SLIP_UNIFORM()
{
}
//#####################################################################
// Function Update_Advection_Equation_Face_Lookup
//#####################################################################
template<class T_GRID,class T_FACE_LOOKUP> void ADVECTION_SEMI_LAGRANGIAN_COLLIDABLE_FACE_SLIP_UNIFORM<T_GRID,T_FACE_LOOKUP>::
Update_Advection_Equation_Face_Lookup(const T_GRID& grid,T_FACE_ARRAYS_SCALAR& Z,const T_FACE_LOOKUP& Z_ghost,const T_FACE_LOOKUP& face_velocities,T_BOUNDARY& boundary,const T dt,const T time,
const T_FACE_LOOKUP* Z_min_ghost,const T_FACE_LOOKUP* Z_max_ghost,T_FACE_ARRAYS_SCALAR* Z_min,T_FACE_ARRAYS_SCALAR* Z_max)
{
PHYSBAM_ASSERT(!Z_min_ghost && !Z_max_ghost && !Z_min && !Z_max);
for(FACE_ITERATOR iterator(grid);iterator.Valid();iterator.Next()){
TV_INT face=iterator.Face_Index();int axis=iterator.Axis();
if(!body_list.Occupied_Face_Center(iterator)){
TV grid_point_location=iterator.Location(),length_and_direction=-dt*averaging.Face_To_Face_Vector(grid,axis,face,face_velocities.Nested()),
interpolation_point=grid_point_location+length_and_direction;
Z(axis,face)=linear_interpolation.Clamped_To_Array_Face_Component(axis,grid,Z_ghost.Nested().Starting_Point_Face(axis,face),interpolation_point);}
else{
FACE_ITERATOR lookup_iterator(grid,axis,face);
const typename T_FACE_LOOKUP::LOOKUP& lookup=Z_ghost.Starting_Point_Face(axis,face);
TV velocity=AVERAGING_UNIFORM<T_GRID,T_FACE_LOOKUP>::Average_Face_To_Face_Vector_Helper(grid,iterator,lookup);
TV length_and_direction=-dt*velocity;
TV_INT adjacent_cell_center=iterator.First_Cell_Index();
if((*body_list.outside_fluid)(iterator.First_Cell_Index()))
adjacent_cell_center=iterator.Second_Cell_Index();
TV interpolation_point=iterator.Location()+length_and_direction;
TV cell_center_location=grid.Center(adjacent_cell_center);
length_and_direction=interpolation_point-cell_center_location;
RAY<TV> backtrace_ray;COLLISION_GEOMETRY_ID body_id;
if(RAY<TV>::Create_Non_Degenerate_Ray(cell_center_location,length_and_direction,backtrace_ray) && body_list.Closest_Non_Intersecting_Point_Of_Any_Body(backtrace_ray,body_id)){
int aggregate_id=0;
body_list.collision_geometry_collection.Intersection_Between_Points(cell_center_location,cell_center_location+length_and_direction,body_id,aggregate_id,interpolation_point);
Z(axis,face)=body_list.Object_Velocity(body_id,aggregate_id,interpolation_point)[axis];}
else if(body_list.Inside_Any_Body(cell_center_location,body_id)){
COLLISION_GEOMETRY<TV>& collision_geometry=body_list.collision_geometry_collection(body_id);
int simplex_id;
if(!collision_geometry.Has_Volumetric_Geometry())
Z(axis,face)=collision_geometry.Pointwise_Object_Velocity(cell_center_location)[axis];
else if(collision_geometry.Inside_Any_Simplex(cell_center_location,simplex_id))
Z(axis,face)=collision_geometry.Pointwise_Object_Velocity(simplex_id,cell_center_location)[axis];
else
PHYSBAM_FATAL_ERROR("Inconsistent inside checks");}
else{
const typename T_FACE_LOOKUP::LOOKUP lookup(Z_ghost,Z_ghost.nested_face_lookup);
Z(axis,face)=linear_interpolation_collidable.From_Block_Face_Component(axis,grid,BLOCK_UNIFORM<T_GRID>(grid,interpolation_point,lookup.Number_Of_Ghost_Cells()),lookup,interpolation_point);}}}
}
//#####################################################################
// Function Average_To_Invalidated_Face
//#####################################################################
template<class T_GRID,class T_FACE_LOOKUP> void ADVECTION_SEMI_LAGRANGIAN_COLLIDABLE_FACE_SLIP_UNIFORM<T_GRID,T_FACE_LOOKUP>::
Average_To_Invalidated_Face(const T_GRID& grid,T_FACE_ARRAYS_SCALAR& face_values,T_FACE_ARRAYS_BOOL* faces_not_to_revalidate)
{
return;
PHYSBAM_FATAL_ERROR("Not doing this");
}
template class ADVECTION_SEMI_LAGRANGIAN_COLLIDABLE_FACE_SLIP_UNIFORM<GRID<VECTOR<float,1> >,FACE_LOOKUP_COLLIDABLE_SLIP_UNIFORM<GRID<VECTOR<float,1> >,FACE_LOOKUP_UNIFORM<GRID<VECTOR<float,1> > > > >;
template class ADVECTION_SEMI_LAGRANGIAN_COLLIDABLE_FACE_SLIP_UNIFORM<GRID<VECTOR<float,2> >,FACE_LOOKUP_COLLIDABLE_SLIP_UNIFORM<GRID<VECTOR<float,2> >,FACE_LOOKUP_UNIFORM<GRID<VECTOR<float,2> > > > >;
template class ADVECTION_SEMI_LAGRANGIAN_COLLIDABLE_FACE_SLIP_UNIFORM<GRID<VECTOR<float,3> >,FACE_LOOKUP_COLLIDABLE_SLIP_UNIFORM<GRID<VECTOR<float,3> >,FACE_LOOKUP_UNIFORM<GRID<VECTOR<float,3> > > > >;
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
template class ADVECTION_SEMI_LAGRANGIAN_COLLIDABLE_FACE_SLIP_UNIFORM<GRID<VECTOR<double,1> >,FACE_LOOKUP_COLLIDABLE_SLIP_UNIFORM<GRID<VECTOR<double,1> >,FACE_LOOKUP_UNIFORM<GRID<VECTOR<double,1> > > > >;
template class ADVECTION_SEMI_LAGRANGIAN_COLLIDABLE_FACE_SLIP_UNIFORM<GRID<VECTOR<double,2> >,FACE_LOOKUP_COLLIDABLE_SLIP_UNIFORM<GRID<VECTOR<double,2> >,FACE_LOOKUP_UNIFORM<GRID<VECTOR<double,2> > > > >;
template class ADVECTION_SEMI_LAGRANGIAN_COLLIDABLE_FACE_SLIP_UNIFORM<GRID<VECTOR<double,3> >,FACE_LOOKUP_COLLIDABLE_SLIP_UNIFORM<GRID<VECTOR<double,3> >,FACE_LOOKUP_UNIFORM<GRID<VECTOR<double,3> > > > >;
#endif
|
/* eslint-disable no-shadow */
import { execSync, spawn } from "node:child_process";
import { existsSync, lstatSync, readFileSync, writeFileSync } from "node:fs";
import { tmpdir } from "node:os";
import { join } from "node:path";
import { URL } from "node:url";
import { watch } from "chokidar";
import { getType } from "mime";
import { buildWorker } from "../pages/functions/buildWorker";
import { generateConfigFromFileTree } from "../pages/functions/filepath-routing";
import { writeRoutesModule } from "../pages/functions/routes";
import openInBrowser from "./open-in-browser";
import { toUrlPath } from "./paths";
import type { Config } from "../pages/functions/routes";
import type { Headers, Request, fetch } from "@miniflare/core";
import type { BuildResult } from "esbuild";
import type { MiniflareOptions } from "miniflare";
import type { BuilderCallback } from "yargs";
// Defer importing miniflare until we really need it. This takes ~0.5s
// and also modifies some `stream/web` and `undici` prototypes, so we
// don't want to do this if pages commands aren't being called.
const EXIT_CALLBACKS: (() => void)[] = [];
const EXIT = (message?: string, code?: number) => {
if (message) console.log(message);
if (code) process.exitCode = code;
EXIT_CALLBACKS.forEach((callback) => callback());
process.exit(code);
};
process.on("SIGINT", () => EXIT());
process.on("SIGTERM", () => EXIT());
function isWindows() {
return process.platform === "win32";
}
const SECONDS_TO_WAIT_FOR_PROXY = 5;
async function sleep(ms: number) {
await new Promise((resolve) => setTimeout(resolve, ms));
}
function getPids(pid: number) {
const pids: number[] = [pid];
let command: string, regExp: RegExp;
if (isWindows()) {
command = `wmic process where (ParentProcessId=${pid}) get ProcessId`;
regExp = new RegExp(/(\d+)/);
} else {
command = `pgrep -P ${pid}`;
regExp = new RegExp(/(\d+)/);
}
try {
const newPids = (
execSync(command)
.toString()
.split("\n")
.map((line) => line.match(regExp))
.filter((line) => line !== null) as RegExpExecArray[]
).map((match) => parseInt(match[1]));
pids.push(...newPids.map(getPids).flat());
} catch {}
return pids;
}
function getPort(pid: number) {
let command: string, regExp: RegExp;
if (isWindows()) {
command = "\\windows\\system32\\netstat.exe -nao";
regExp = new RegExp(`TCP\\s+.*:(\\d+)\\s+.*:\\d+\\s+LISTENING\\s+${pid}`);
} else {
command = "lsof -nPi";
regExp = new RegExp(`${pid}\\s+.*TCP\\s+.*:(\\d+)\\s+\\(LISTEN\\)`);
}
try {
const matches = execSync(command)
.toString()
.split("\n")
.map((line) => line.match(regExp))
.filter((line) => line !== null) as RegExpExecArray[];
const match = matches[0];
if (match) return parseInt(match[1]);
} catch (thrown) {
console.error(
`Error scanning for ports of process with PID ${pid}: ${thrown}`
);
}
}
async function spawnProxyProcess({
port,
command,
}: {
port?: number;
command: (string | number)[];
}): Promise<void | number> {
if (command.length === 0)
return EXIT(
"Must specify a directory of static assets to serve or a command to run.",
1
);
console.log(`Running ${command.join(" ")}...`);
const proxy = spawn(
command[0].toString(),
command.slice(1).map((value) => value.toString()),
{
shell: isWindows(),
env: {
BROWSER: "none",
...process.env,
},
}
);
EXIT_CALLBACKS.push(() => {
proxy.kill();
});
proxy.stdout.on("data", (data) => {
console.log(`[proxy]: ${data}`);
});
proxy.stderr.on("data", (data) => {
console.error(`[proxy]: ${data}`);
});
proxy.on("close", (code) => {
console.error(`Proxy exited with status ${code}.`);
});
// Wait for proxy process to start...
while (!proxy.pid) {}
if (port === undefined) {
console.log(
`Sleeping ${SECONDS_TO_WAIT_FOR_PROXY} seconds to allow proxy process to start before attempting to automatically determine port...`
);
console.log("To skip, specify the proxy port with --proxy.");
await sleep(SECONDS_TO_WAIT_FOR_PROXY * 1000);
port = getPids(proxy.pid)
.map(getPort)
.filter((port) => port !== undefined)[0];
if (port === undefined) {
return EXIT(
"Could not automatically determine proxy port. Please specify the proxy port with --proxy.",
1
);
} else {
console.log(`Automatically determined the proxy port to be ${port}.`);
}
}
return port;
}
function escapeRegex(str: string) {
return str.replace(/[-/\\^$*+?.()|[]{}]/g, "\\$&");
}
type Replacements = Record<string, string>;
function replacer(str: string, replacements: Replacements) {
for (const [replacement, value] of Object.entries(replacements)) {
str = str.replace(`:${replacement}`, value);
}
return str;
}
function generateRulesMatcher<T>(
rules?: Record<string, T>,
replacer: (match: T, replacements: Replacements) => T = (match) => match
) {
// TODO: How can you test cross-host rules?
if (!rules) return () => [];
const compiledRules = Object.entries(rules)
.map(([rule, match]) => {
const crossHost = rule.startsWith("https://");
rule = rule.split("*").map(escapeRegex).join("(?<splat>.*)");
const host_matches = rule.matchAll(
/(?<=^https:\\\/\\\/[^/]*?):([^\\]+)(?=\\)/g
);
for (const match of host_matches) {
rule = rule.split(match[0]).join(`(?<${match[1]}>[^/.]+)`);
}
const path_matches = rule.matchAll(/:(\w+)/g);
for (const match of path_matches) {
rule = rule.split(match[0]).join(`(?<${match[1]}>[^/]+)`);
}
rule = "^" + rule + "$";
try {
const regExp = new RegExp(rule);
return [{ crossHost, regExp }, match];
} catch {}
})
.filter((value) => value !== undefined) as [
{ crossHost: boolean; regExp: RegExp },
T
][];
return ({ request }: { request: Request }) => {
const { pathname, host } = new URL(request.url);
return compiledRules
.map(([{ crossHost, regExp }, match]) => {
const test = crossHost ? `https://${host}${pathname}` : pathname;
const result = regExp.exec(test);
if (result) {
return replacer(match, result.groups || {});
}
})
.filter((value) => value !== undefined) as T[];
};
}
function generateHeadersMatcher(headersFile: string) {
if (existsSync(headersFile)) {
const contents = readFileSync(headersFile).toString();
// TODO: Log errors
const lines = contents
.split("\n")
.map((line) => line.trim())
.filter((line) => !line.startsWith("#") && line !== "");
const rules: Record<string, Record<string, string>> = {};
let rule: { path: string; headers: Record<string, string> } | undefined =
undefined;
for (const line of lines) {
if (/^([^\s]+:\/\/|^\/)/.test(line)) {
if (rule && Object.keys(rule.headers).length > 0) {
rules[rule.path] = rule.headers;
}
const path = validateURL(line);
if (path) {
rule = {
path,
headers: {},
};
continue;
}
}
if (!line.includes(":")) continue;
const [rawName, ...rawValue] = line.split(":");
const name = rawName.trim().toLowerCase();
const value = rawValue.join(":").trim();
if (name === "") continue;
if (!rule) continue;
const existingValues = rule.headers[name];
rule.headers[name] = existingValues
? `${existingValues}, ${value}`
: value;
}
if (rule && Object.keys(rule.headers).length > 0) {
rules[rule.path] = rule.headers;
}
const rulesMatcher = generateRulesMatcher(rules, (match, replacements) =>
Object.fromEntries(
Object.entries(match).map(([name, value]) => [
name,
replacer(value, replacements),
])
)
);
return (request: Request) => {
const matches = rulesMatcher({
request,
});
if (matches) return matches;
};
} else {
return () => undefined;
}
}
function generateRedirectsMatcher(redirectsFile: string) {
if (existsSync(redirectsFile)) {
const contents = readFileSync(redirectsFile).toString();
// TODO: Log errors
const lines = contents
.split("\n")
.map((line) => line.trim())
.filter((line) => !line.startsWith("#") && line !== "");
const rules = Object.fromEntries(
lines
.map((line) => line.split(" "))
.filter((tokens) => tokens.length === 2 || tokens.length === 3)
.map((tokens) => {
const from = validateURL(tokens[0], true, false, false);
const to = validateURL(tokens[1], false, true, true);
let status: number | undefined = parseInt(tokens[2]) || 302;
status = [301, 302, 303, 307, 308].includes(status)
? status
: undefined;
return from && to && status ? [from, { to, status }] : undefined;
})
.filter((rule) => rule !== undefined) as [
string,
{ to: string; status?: number }
][]
);
const rulesMatcher = generateRulesMatcher(
rules,
({ status, to }, replacements) => ({
status,
to: replacer(to, replacements),
})
);
return (request: Request) => {
const match = rulesMatcher({
request,
})[0];
if (match) return match;
};
} else {
return () => undefined;
}
}
function extractPathname(
path = "/",
includeSearch: boolean,
includeHash: boolean
) {
if (!path.startsWith("/")) path = `/${path}`;
const url = new URL(`//${path}`, "relative://");
return `${url.pathname}${includeSearch ? url.search : ""}${
includeHash ? url.hash : ""
}`;
}
function validateURL(
token: string,
onlyRelative = false,
includeSearch = false,
includeHash = false
) {
const host = /^https:\/\/+(?<host>[^/]+)\/?(?<path>.*)/.exec(token);
if (host && host.groups && host.groups.host) {
if (onlyRelative) return;
return `https://${host.groups.host}${extractPathname(
host.groups.path,
includeSearch,
includeHash
)}`;
} else {
if (!token.startsWith("/") && onlyRelative) token = `/${token}`;
const path = /^\//.exec(token);
if (path) {
try {
return extractPathname(token, includeSearch, includeHash);
} catch {}
}
}
return "";
}
function hasFileExtension(pathname: string) {
return /\/.+\.[a-z0-9]+$/i.test(pathname);
}
async function generateAssetsFetch(directory: string): Promise<typeof fetch> {
// Defer importing miniflare until we really need it
const { Headers, Request, Response } = await import("@miniflare/core");
const headersFile = join(directory, "_headers");
const redirectsFile = join(directory, "_redirects");
const workerFile = join(directory, "_worker.js");
const ignoredFiles = [headersFile, redirectsFile, workerFile];
const assetExists = (path: string) => {
path = join(directory, path);
return (
existsSync(path) &&
lstatSync(path).isFile() &&
!ignoredFiles.includes(path)
);
};
const getAsset = (path: string) => {
if (assetExists(path)) {
return join(directory, path);
}
};
let redirectsMatcher = generateRedirectsMatcher(redirectsFile);
let headersMatcher = generateHeadersMatcher(headersFile);
watch([headersFile, redirectsFile], {
persistent: true,
}).on("change", (path) => {
switch (path) {
case headersFile: {
console.log("_headers modified. Re-evaluating...");
headersMatcher = generateHeadersMatcher(headersFile);
break;
}
case redirectsFile: {
console.log("_redirects modified. Re-evaluating...");
redirectsMatcher = generateRedirectsMatcher(redirectsFile);
break;
}
}
});
const serveAsset = (file: string) => {
return readFileSync(file);
};
const generateResponse = (request: Request) => {
const url = new URL(request.url);
const deconstructedResponse: {
status: number;
headers: Headers;
body?: Buffer;
} = {
status: 200,
headers: new Headers(),
body: undefined,
};
const match = redirectsMatcher(request);
if (match) {
const { status, to } = match;
let location = to;
let search;
if (to.startsWith("/")) {
search = new URL(location, "http://fakehost").search;
} else {
search = new URL(location).search;
}
location = `${location}${search ? "" : url.search}`;
if (status && [301, 302, 303, 307, 308].includes(status)) {
deconstructedResponse.status = status;
} else {
deconstructedResponse.status = 302;
}
deconstructedResponse.headers.set("Location", location);
return deconstructedResponse;
}
if (!request.method?.match(/^(get|head)$/i)) {
deconstructedResponse.status = 405;
return deconstructedResponse;
}
const notFound = () => {
let cwd = url.pathname;
while (cwd) {
cwd = cwd.slice(0, cwd.lastIndexOf("/"));
if ((asset = getAsset(`${cwd}/404.html`))) {
deconstructedResponse.status = 404;
deconstructedResponse.body = serveAsset(asset);
deconstructedResponse.headers.set(
"Content-Type",
getType(asset) || "application/octet-stream"
);
return deconstructedResponse;
}
}
if ((asset = getAsset(`/index.html`))) {
deconstructedResponse.body = serveAsset(asset);
deconstructedResponse.headers.set(
"Content-Type",
getType(asset) || "application/octet-stream"
);
return deconstructedResponse;
}
deconstructedResponse.status = 404;
return deconstructedResponse;
};
let asset;
if (url.pathname.endsWith("/")) {
if ((asset = getAsset(`${url.pathname}/index.html`))) {
deconstructedResponse.body = serveAsset(asset);
deconstructedResponse.headers.set(
"Content-Type",
getType(asset) || "application/octet-stream"
);
return deconstructedResponse;
} else if (
(asset = getAsset(`${url.pathname.replace(/\/$/, ".html")}`))
) {
deconstructedResponse.status = 301;
deconstructedResponse.headers.set(
"Location",
`${url.pathname.slice(0, -1)}${url.search}`
);
return deconstructedResponse;
}
}
if (url.pathname.endsWith("/index")) {
deconstructedResponse.status = 301;
deconstructedResponse.headers.set(
"Location",
`${url.pathname.slice(0, -"index".length)}${url.search}`
);
return deconstructedResponse;
}
if ((asset = getAsset(url.pathname))) {
if (url.pathname.endsWith(".html")) {
const extensionlessPath = url.pathname.slice(0, -".html".length);
if (getAsset(extensionlessPath) || extensionlessPath === "/") {
deconstructedResponse.body = serveAsset(asset);
deconstructedResponse.headers.set(
"Content-Type",
getType(asset) || "application/octet-stream"
);
return deconstructedResponse;
} else {
deconstructedResponse.status = 301;
deconstructedResponse.headers.set(
"Location",
`${extensionlessPath}${url.search}`
);
return deconstructedResponse;
}
} else {
deconstructedResponse.body = serveAsset(asset);
deconstructedResponse.headers.set(
"Content-Type",
getType(asset) || "application/octet-stream"
);
return deconstructedResponse;
}
} else if (hasFileExtension(url.pathname)) {
notFound();
return deconstructedResponse;
}
if ((asset = getAsset(`${url.pathname}.html`))) {
deconstructedResponse.body = serveAsset(asset);
deconstructedResponse.headers.set(
"Content-Type",
getType(asset) || "application/octet-stream"
);
return deconstructedResponse;
}
if ((asset = getAsset(`${url.pathname}/index.html`))) {
deconstructedResponse.status = 301;
deconstructedResponse.headers.set(
"Location",
`${url.pathname}/${url.search}`
);
return deconstructedResponse;
} else {
notFound();
return deconstructedResponse;
}
};
const attachHeaders = (
request: Request,
deconstructedResponse: { status: number; headers: Headers; body?: Buffer }
) => {
const headers = deconstructedResponse.headers;
const newHeaders = new Headers({});
const matches = headersMatcher(request) || [];
matches.forEach((match) => {
Object.entries(match).forEach(([name, value]) => {
newHeaders.append(name, `${value}`);
});
});
const combinedHeaders = {
...Object.fromEntries(headers.entries()),
...Object.fromEntries(newHeaders.entries()),
};
deconstructedResponse.headers = new Headers({});
Object.entries(combinedHeaders).forEach(([name, value]) => {
if (value) deconstructedResponse.headers.set(name, value);
});
};
return async (input, init) => {
const request = new Request(input, init);
const deconstructedResponse = generateResponse(request);
attachHeaders(request, deconstructedResponse);
const headers = new Headers();
[...deconstructedResponse.headers.entries()].forEach(([name, value]) => {
if (value) headers.set(name, value);
});
return new Response(deconstructedResponse.body, {
headers,
status: deconstructedResponse.status,
});
};
}
const RUNNING_BUILDERS: BuildResult[] = [];
async function buildFunctions({
scriptPath,
outputConfigPath,
functionsDirectory,
minify = false,
sourcemap = false,
fallbackService = "ASSETS",
watch = false,
onEnd,
}: {
scriptPath: string;
outputConfigPath?: string;
functionsDirectory: string;
minify?: boolean;
sourcemap?: boolean;
fallbackService?: string;
watch?: boolean;
onEnd?: () => void;
}) {
RUNNING_BUILDERS.forEach(
(runningBuilder) => runningBuilder.stop && runningBuilder.stop()
);
const routesModule = join(tmpdir(), "./functionsRoutes.mjs");
const baseURL = toUrlPath("/");
const config: Config = await generateConfigFromFileTree({
baseDir: functionsDirectory,
baseURL,
});
if (outputConfigPath) {
writeFileSync(
outputConfigPath,
JSON.stringify({ ...config, baseURL }, null, 2)
);
}
await writeRoutesModule({
config,
srcDir: functionsDirectory,
outfile: routesModule,
});
RUNNING_BUILDERS.push(
await buildWorker({
routesModule,
outfile: scriptPath,
minify,
sourcemap,
fallbackService,
watch,
onEnd,
})
);
}
export const pages: BuilderCallback<unknown, unknown> = (yargs) => {
return yargs
.command(
"dev [directory] [-- command]",
"🧑💻 Develop your full-stack Pages application locally",
(yargs) => {
return yargs
.positional("directory", {
type: "string",
demandOption: undefined,
description: "The directory of static assets to serve",
})
.positional("command", {
type: "string",
demandOption: undefined,
description: "The proxy command to run",
})
.options({
local: {
type: "boolean",
default: true,
description: "Run on my machine",
},
port: {
type: "number",
default: 8788,
description: "The port to listen on (serve from)",
},
proxy: {
type: "number",
description:
"The port to proxy (where the static assets are served)",
},
"script-path": {
type: "string",
default: "_worker.js",
description:
"The location of the single Worker script if not using functions",
},
binding: {
type: "array",
description: "Bind variable/secret (KEY=VALUE)",
alias: "b",
},
kv: {
type: "array",
description: "KV namespace to bind",
alias: "k",
},
do: {
type: "array",
description: "Durable Object to bind (NAME=CLASS)",
alias: "o",
},
"live-reload": {
type: "boolean",
default: false,
description: "Auto reload HTML pages when change is detected",
},
// TODO: Miniflare user options
});
},
async ({
local,
directory,
port,
proxy: requestedProxyPort,
"script-path": singleWorkerScriptPath,
binding: bindings = [],
kv: kvs = [],
do: durableObjects = [],
"live-reload": liveReload,
_: [_pages, _dev, ...remaining],
}) => {
if (!local) {
console.error("Only local mode is supported at the moment.");
return;
}
const functionsDirectory = "./functions";
const usingFunctions = existsSync(functionsDirectory);
const command = remaining as (string | number)[];
let proxyPort: number | void;
if (directory === undefined) {
proxyPort = await spawnProxyProcess({
port: requestedProxyPort,
command,
});
if (proxyPort === undefined) return undefined;
}
let miniflareArgs: MiniflareOptions = {};
let scriptReadyResolve: () => void;
const scriptReadyPromise = new Promise<void>(
(resolve) => (scriptReadyResolve = resolve)
);
if (usingFunctions) {
const scriptPath = join(tmpdir(), "./functionsWorker.js");
console.log(`Compiling worker to "${scriptPath}"...`);
await buildFunctions({
scriptPath,
functionsDirectory,
sourcemap: true,
watch: true,
onEnd: () => scriptReadyResolve(),
});
watch([functionsDirectory], {
persistent: true,
ignoreInitial: true,
}).on("all", async () => {
await buildFunctions({
scriptPath,
functionsDirectory,
sourcemap: true,
watch: true,
onEnd: () => scriptReadyResolve(),
});
});
miniflareArgs = {
scriptPath,
};
} else {
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
scriptReadyResolve!();
const scriptPath =
directory !== undefined
? join(directory, singleWorkerScriptPath)
: singleWorkerScriptPath;
if (existsSync(scriptPath)) {
miniflareArgs = {
scriptPath,
};
} else {
console.log("No functions. Shimming...");
miniflareArgs = {
// TODO: The fact that these request/response hacks are necessary is ridiculous.
// We need to eliminate them from env.ASSETS.fetch (not sure if just local or prod as well)
script: `
export default {
async fetch(request, env, context) {
const response = await env.ASSETS.fetch(request.url, request)
return new Response(response.body, response)
}
}`,
};
}
}
// Defer importing miniflare until we really need it
const { Miniflare, Log, LogLevel } = await import("miniflare");
const { Response, fetch } = await import("@miniflare/core");
// Wait for esbuild to finish building before starting Miniflare.
// This must be before the call to `new Miniflare`, as that will
// asynchronously start loading the script. `await startServer()`
// internally just waits for that promise to resolve.
await scriptReadyPromise;
// `assetsFetch()` will only be called if there is `proxyPort` defined.
// We only define `proxyPort`, above, when there is no `directory` defined.
const assetsFetch =
directory !== undefined
? await generateAssetsFetch(directory)
: invalidAssetsFetch;
const miniflare = new Miniflare({
port,
watch: true,
modules: true,
log: new Log(LogLevel.ERROR, { prefix: "pages" }),
logUnhandledRejections: true,
sourceMap: true,
kvNamespaces: kvs.map((kv) => kv.toString()),
durableObjects: Object.fromEntries(
durableObjects.map((durableObject) =>
durableObject.toString().split("=")
)
),
// User bindings
bindings: {
...Object.fromEntries(
bindings.map((binding) => binding.toString().split("="))
),
},
// env.ASSETS.fetch
serviceBindings: {
async ASSETS(request: Request) {
if (proxyPort) {
try {
const url = new URL(request.url);
url.host = `localhost:${proxyPort}`;
return await fetch(url, request);
} catch (thrown) {
console.error(`Could not proxy request: ${thrown}`);
// TODO: Pretty error page
return new Response(
`[wrangler] Could not proxy request: ${thrown}`,
{ status: 502 }
);
}
} else {
try {
return await assetsFetch(request);
} catch (thrown) {
console.error(`Could not serve static asset: ${thrown}`);
// TODO: Pretty error page
return new Response(
`[wrangler] Could not serve static asset: ${thrown}`,
{ status: 502 }
);
}
}
},
},
kvPersist: true,
durableObjectsPersist: true,
cachePersist: true,
liveReload,
...miniflareArgs,
});
try {
// `startServer` might throw if user code contains errors
const server = await miniflare.startServer();
console.log(`Serving at http://localhost:${port}/`);
if (process.env.BROWSER !== "none") {
await openInBrowser(`http://localhost:${port}/`);
}
if (directory !== undefined && liveReload) {
watch([directory], {
persistent: true,
ignoreInitial: true,
}).on("all", async () => {
await miniflare.reload();
});
}
EXIT_CALLBACKS.push(() => {
server.close();
miniflare.dispose().catch((err) => miniflare.log.error(err));
});
} catch (e) {
miniflare.log.error(e as Error);
EXIT("Could not start Miniflare.", 1);
}
}
)
.command("functions", false, (yargs) =>
// we hide this command from help output because
// it's not meant to be used directly right now
yargs.command(
"build [directory]",
"Compile a folder of Cloudflare Pages Functions into a single Worker",
(yargs) =>
yargs
.positional("directory", {
type: "string",
default: "functions",
description: "The directory of Pages Functions",
})
.options({
"script-path": {
type: "string",
default: "_worker.js",
description: "The location of the output Worker script",
},
"output-config-path": {
type: "string",
description: "The location for the output config file",
},
minify: {
type: "boolean",
default: false,
description: "Minify the output Worker script",
},
sourcemap: {
type: "boolean",
default: false,
description:
"Generate a sourcemap for the output Worker script",
},
"fallback-service": {
type: "string",
default: "ASSETS",
description:
"The service to fallback to at the end of the `next` chain. Setting to '' will fallback to the global `fetch`.",
},
watch: {
type: "boolean",
default: false,
description:
"Watch for changes to the functions and automatically rebuild the Worker script",
},
}),
async ({
directory,
"script-path": scriptPath,
"output-config-path": outputConfigPath,
minify,
sourcemap,
fallbackService,
watch,
}) => {
await buildFunctions({
scriptPath,
outputConfigPath,
functionsDirectory: directory,
minify,
sourcemap,
fallbackService,
watch,
});
}
)
);
};
const invalidAssetsFetch: typeof fetch = () => {
throw new Error(
"Trying to fetch assets directly when there is no `directory` option specified, and not in `local` mode."
);
};
|
//获取应用实例
var app = getApp();
Page({
data: {
imageCtx: app.globalData.imageCtx,
start: '',
end: '',
list: [],
page: 1,
stateList: [
{ state: 1, name: '接单中' },
{ state: 2, name: '生产中' },
{ state: 3, name: '生产完成' },
{ state: 4, name: '已发货' },
{ state: 5, name: '已完成' },
{ state: 6, name: '已取消' },
{ state: 7, name: '不接单' },
],
state: 1,
hidden: false,
emptyShow: false,
hasMore: true,
scrollTop: 0,
scrollHeight: 0
},
onLoad: function () {
var that = this;
wx.getSystemInfo({
success: function (res) {
that.setData({
scrollHeight: res.windowHeight
});
}
});
that.getOrders();
},
scroll: function (e) {
// 该方法绑定了页面滚动时的事件,这里记录了当前的position.y的值,为了请求数据之后把页面定位到这里来。
this.setData({
scrollTop: e.detail.scrollTop
});
},
search: function (e) {
this.setData({
search: e.detail.value
})
},
getOrders: function (e) {
var that = this;
if (e != undefined) {
var state = e.currentTarget.dataset.state;
if (state != undefined) {
that.setData({
state: state
})
}
}
that.setData({
list: [],
page: 1,
emptyShow: false,
hidden: false,
scrollTop: 0
})
that.commonSearch(that, 'one');
},
bindDownLoad: function () {
var that = this;
that.commonSearch(that, 'more');
},
goTop: function () {
this.setData({
scrollTop: 0
});
},
bindStartChange: function (e) {
this.setData({
start: e.detail.value
})
},
bindEndChange: function (e) {
this.setData({
end: e.detail.value
})
},
commonSearch: function (that, difference) {
var params = {}, page = that.data.page,adminObj = app.globalData.adminObj;
params.state = that.data.state;
params.start = that.data.start;
params.end = that.data.end;
params.pageNumber = page;
params.phone = adminObj.phone;
params.password = <PASSWORD>;
params.sessionId = adminObj.sessionId;
wx.request({
url: app.globalData.requestUrl + "weixinMerchant/getPrintOrders",
data: params,
success: function (res) {
that.setData({
hidden: true
})
if (res.data.code == '0') {
var mapResults = res.data.mapResults;
that.setData({
start: mapResults.start,
end: mapResults.end
})
var reqList = mapResults.orders.list;
if (reqList != null && reqList.length > 0) {
var listNew = that.data.list.concat(reqList);
that.setData({
list: listNew,
page: page + 1,
hasMore: true
})
} else {
that.setData({
hasMore: false
})
//one为点击切换订单状态,more为上拉加载
if (difference == 'one') {
that.setData({
emptyShow: true
})
} else {
that.setData({
emptyShow: false
})
}
}
} else {
app.noLogin(res.data.msg);
}
},
fail: function (res) {
that.setData({
hidden: true
})
app.warning("服务器无响应");
}
})
}
})
|
#include "libft.h"
char *ft_strndup(const char *str, size_t n)
{
char *temp;
temp = (char *)ft_calloc(1, sizeof(char) * n + 1);
if (temp == NULL)
return (NULL);
else
ft_strlcpy(temp, str, n + 1);
return (temp);
}
|
mkdir -p ~/.streamlit/
echo "\
[general]\n\
email = \"quimicokalfa@gmail.com\"\n\
" > ~/.streamlit/credentials.toml
echo "\
[server]\n\
headless = true\n\
enableCORS=false\n\
port= $PORT\n\
" > ~/.streamlit/config.toml
|
#! /bin/sh
project="unity-sdk-travis"
echo "Attempting to install credentials"
git clone https://$CREDENTIALS_GITHUB_TOKEN@github.ibm.com/germanatt/sdk-credentials.git Travis/sdk-credentials/
if [ $? = 0 ] ; then
echo "Credentials install SUCCEEDED! Exited with $?"
else
echo "Credentials install FAILED! Exited with $?"
exit 1
fi
echo "Attempting to install IBM Watson SDK for Unity into the test project..."
mkdir -p Travis/UnityTestProject/Assets/Watson/
git clone https://github.com/watson-developer-cloud/unity-sdk.git Travis/UnityTestProject/Assets/Watson/
#git clone -b feature-97-integrationTesting --single-branch https://github.com/watson-developer-cloud/unity-sdk.git Travis/UnityTestProject/Assets/Watson/
if [ $? = 0 ] ; then
echo "WDC Unity SDK install SUCCEEDED! Exited with $?"
echo "Attempting to remove TravisIntegrationTests from Travis directory..."
rm Travis/TravisIntegrationTests.cs
if [ $? = 0 ] ; then
echo "Removing travis build script SUCCEEDED! Exited with $?"
else
echo "Removing travis build script FAILED! Exited with $?"
exit 1
fi
echo "Attempting to create Travis/UnityTestProject/Assets/Scripts/Editor/"
mkdir -p Travis/UnityTestProject/Assets/Scripts/Editor/
if [ $? = 0 ] ; then
echo "Creating Travis/UnityTestProject/Assets/Scripts/Editor/ SUCCEEDED! Exited with $?"
echo "Attempting to move integration tests script..."
mv Travis/UnityTestProject/Assets/Watson/Travis/TravisIntegrationTests.cs Travis/UnityTestProject/Assets/Scripts/Editor/TravisIntegrationTests.cs
if [ $? = 0 ] ; then
echo "Moving travis integration tests script SUCCEEDED! Exited with $?"
exit 0
else
echo "Moving travis integration tests script FAILED! Exited with $?"
exit 1
fi
else
echo "Creating Travis/UnityTestProject/Assets/Scripts/Editor/ FAILED! EXITED WITH $?"
fi
else
echo "WDC Unity SDK install FAILED! Exited with $?"
exit 1
fi
|
<reponame>tdm1223/Algorithm
// 2204. 도비의 난독증 테스트
// 2021.06.10
// 문자열
#include<iostream>
#include<string>
#include<algorithm>
using namespace std;
int main()
{
int n;
string s;
while (1)
{
cin >> n;
if (n == 0)
{
break;
}
string ans = "";
cin >> ans;
for (int i = 1; i < n; i++)
{
cin >> s;
string sToLower = s;
string ansToLower = ans;
transform(sToLower.begin(), sToLower.end(), sToLower.begin(), ::tolower);
transform(ansToLower.begin(), ansToLower.end(), ansToLower.begin(), ::tolower);
if (ansToLower > sToLower)
{
ans = s;
}
}
cout << ans << endl;
}
return 0;
}
|
#! /bin/sh -e
# This script downgrades MSVC 2008 projects to MSVC 2005 projects, allowing
# people with MSVC 2005 to open them. Otherwise, MSVC 2005 simply refuses to
# open projects created with 2008. We run this as part of our release process.
# If you obtained the code direct from version control and you want to use
# MSVC 2005, you may have to run this manually. (Hint: Use Cygwin or MSYS.)
for file in *.sln; do
echo "downgrading $file..."
sed -i -re 's/Format Version 10.00/Format Version 9.00/g;
s/Visual Studio 2008/Visual Studio 2005/g;' $file
myunix2dos $file
done
for file in *.vcproj; do
echo "downgrading $file..."
sed -i -re 's/Version="9.00"/Version="8.00"/g;' $file
done
# Yes, really, that's it.
|
package org.rs2server.rs2.domain.service.impl;
import com.google.common.eventbus.EventBus;
import com.google.common.eventbus.SubscriberExceptionContext;
import com.google.common.eventbus.SubscriberExceptionHandler;
import com.google.inject.Inject;
import org.rs2server.rs2.domain.service.api.HookService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.annotation.Nonnull;
/**
* @author tommo
*/
public class HookServiceImpl implements HookService, SubscriberExceptionHandler {
private static final Logger logger = LoggerFactory.getLogger(HookServiceImpl.class);
private final EventBus eventBus = new EventBus(this);
@Inject
HookServiceImpl() {
}
@Override
public void register(@Nonnull Object subscriber) {
eventBus.register(subscriber);
}
@Override
public void post(@Nonnull Object event) {
eventBus.post(event);
}
@Override
public void unregister(@Nonnull Object subscriber) {
eventBus.unregister(subscriber);
}
@Override
public void handleException(Throwable exception, SubscriberExceptionContext context) {
logger.error("Exception occurred in hook service at "
+ context.getSubscriber().getClass().getSimpleName() + "#" + context.getSubscriberMethod().getName(),
exception);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.