text
stringlengths 1
1.05M
|
|---|
sync; dd if=/dev/zero of=/tmp/ramdisk/ bs=1M count=1024; sync
dd: failed to open '/tmp/ramdisk/': Is a directory
ysdede@instance-1:~/jesse-projects/git/jesse_strategies/Optuna/OttBands5minFixedOttOptuna1Test$ sync; dd if=/dev/zero of=/tmp/ramdisk/benchfile bs=1M count=1024; sync
1024+0 records in
1024+0 records out
1073741824 bytes (1.1 GB, 1.0 GiB) copied, 0.360476 s, 3.0 GB/s
ysdede@instance-1:~/jesse-projects/git/jesse_strategies/Optuna/OttBands5minFixedOttOptuna1Test$ dd if=/tmp/ramdisk/benchfile of=/dev/null bs=1M count=1024
1024+0 records in
1024+0 records out
1073741824 bytes (1.1 GB, 1.0 GiB) copied, 0.178526 s, 6.0 GB/s
|
<filename>src/main/java/seoul/democracy/post/service/PostService.java<gh_stars>10-100
package seoul.democracy.post.service;
import com.mysema.query.types.Expression;
import com.mysema.query.types.Predicate;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import seoul.democracy.common.exception.NotFoundException;
import seoul.democracy.post.domain.Post;
import seoul.democracy.post.domain.PostType;
import seoul.democracy.post.dto.PostCreateDto;
import seoul.democracy.post.dto.PostDto;
import seoul.democracy.post.dto.PostUpdateDto;
import seoul.democracy.post.repository.PostRepository;
@Service
@Transactional(readOnly = true)
public class PostService {
private final PostRepository postRepository;
@Autowired
public PostService(PostRepository postRepository) {
this.postRepository = postRepository;
}
public Page<PostDto> getPosts(Predicate predicate, Pageable pageable, Expression<PostDto> projection) {
return postRepository.findAll(predicate, pageable, projection);
}
public PostDto getPost(Predicate predicate, Expression<PostDto> projection) {
return postRepository.findOne(predicate, projection);
}
/**
* 글 등록
*/
@Transactional
@PreAuthorize("hasRole('ADMIN')")
public Post create(PostType type, PostCreateDto createDto) {
Post post = Post.create(type, createDto);
return postRepository.save(post);
}
/**
* 글 수정
*/
@Transactional
@PreAuthorize("hasRole('ADMIN')")
public Post update(PostUpdateDto updateDto) {
Post post = postRepository.findOne(updateDto.getId());
if (post == null) throw new NotFoundException("해당 글을 찾을 수 없습니다.");
return post.update(updateDto);
}
@Transactional
public void increaseViewCount(Long id) {
postRepository.increaseViewCount(id);
}
}
|
<reponame>noear/solon_demo<filename>demo03.solon_mvc/src/main/java/webapp/controller/more/CmdService.java
package webapp.controller.more;
/**
* @author noear 2021/6/28 created
*/
public interface CmdService {
String name(String name);
}
|
#!/usr/bin/env sh
# This file has been written 2010 by thomas.koch@ymc.ch and is released under
# the following license:
# DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
# Version 2, December 2004
#
# Copyright (C) 2004 Sam Hocevar
# 14 rue de Plaisance, 75014 Paris, France
# Everyone is permitted to copy and distribute verbatim or modified
# copies of this license document, and changing it is allowed as long
# as the name is changed.
#
# DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
# TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
#
# 0. You just DO WHAT THE FUCK YOU WANT TO.
#
# Read more about this license at http://sam.zoy.org/wtfpl/
# You can also use it under any other license that complies to the Debian Free
# Software Guidelines
# This script should be called from a cron job to purge old log and snapshot
# files from zookeeper.
KEEPCOUNT=${KEEPCOUNT:-3}
ZKENV=${ZKENV:-"/etc/zookeeper/conf/environment"}
. $ZKENV
ZOOCFG=${ZOOCFG:-$ZOOCFGDIR/zoo.cfg}
if [ ! -r $ZOOCFG ]
then
echo "$ZOOCFG is not readable"
exit 1
fi
eval DATADIR=${DATADIR:-$(grep -e "^dataDir=" $ZOOCFG|sed s/.*dataDir.*=//)}
eval DATALOGDIR=${DATALOGDIR:-$(grep -e "^dataLogDir=" $ZOOCFG|sed s/.*dataLogDir.*=//)}
if [ ! -w $DATADIR ]
then
echo "DATADIR $DATADIR is not writable"
exit 1
fi
if [ "x$DATALOGDIR" = "x" ]
then
DATALOGDIR=$DATADIR
elif [ ! -w $DATALOGDIR ]
then
echo "DATALOGDIR $DATALOGDIR is not writable"
exit 1
fi
java -cp $CLASSPATH $JVMFLAGS \
org.apache.zookeeper.server.PurgeTxnLog $DATALOGDIR $DATADIR -c $KEEPCOUNT
|
#!/bin/bash
echo "Started" `date` | tee -a /root/install.log
echo "Checking for Happy Resolver..."
while ping -c 2 repo.saltstack.com 2>&1 | grep -q "unknown host" ;do echo waiting for network resolution...; done
## fix authorized hosts
##cut -d " " -f 14,15 /root/.ssh/authorized_keys > /root/.ssh/tmp
#mv -f /root/.ssh/tmp /root/.ssh/authorized_keys
(
echo "--> Installing utilities" - `date +%R`
yum install -y bind-utils git ntp sshpass mailx haproxy unzip tmux
curl -L -o jq https://github.com/stedolan/jq/releases/download/jq-1.5/jq-linux64
chmod u+x jq
mv jq /usr/bin
systemctl start ntpd
mv /root/haproxy.cfg /etc/haproxy/haproxy.cfg
## downloading and installing pip, kubectl and helm
echo "downloading and installing pip, kubectl and helm..."
curl -o /tmp/get-pip.py https://bootstrap.pypa.io/get-pip.py
python /tmp/get-pip.py
## "uninstalling" some libs that break the pip install otherwise
rm -rf /usr/lib/python2.7/site-packages/requests-2.6.0-py2.7.egg-info
rm -rf /usr/lib64/python2.7/site-packages/PyYAML-3.10-py2.7.egg-info
systemctl start ntpd
echo "Installing utilities <-- done" - `date +%R`
) 2>&1 | tee -a /root/install.log
## Set root password and open up password authentication thru ssh - security hole fixed later in the process
echo `date +%R` " --> Set root password & open up ssh password authentication" 2>&1 | tee -a /root/install.log
(
echo "scality0"
) | passwd --stdin root
sed -i 's/PasswordAuthentication no/PasswordAuthentication yes/' /etc/ssh/sshd_config
systemctl restart sshd
echo `date +%R` " & open up ssh password authentication <-- done"| tee -a /root/install.log
echo "Finished" `date` | tee -a /root/install.log
|
<gh_stars>1-10
s = int(input())
n = 1
while n * (n+1) // 2 <= s:
n += 1
print(n-1)
|
<reponame>lgoldstein/communitychest
/*
*
*/
package net.community.chest.jfree.jfreechart.axis.value;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import net.community.chest.util.collection.CollectionsUtils;
import org.jfree.chart.axis.DateTickUnitType;
/**
* <P>Copyright GPLv2</P>
*
* <P>Encapsulates the {@link DateTickUnitType} units as an {@link Enum}</P>
* @author <NAME>.
* @since May 5, 2009 2:54:45 PM
*/
public enum DateTickUnitTypeEnum {
YEAR(DateTickUnitType.YEAR),
MONTH(DateTickUnitType.MONTH),
DAY(DateTickUnitType.DAY),
HOUR(DateTickUnitType.HOUR),
MINUTE(DateTickUnitType.MINUTE),
SECOND(DateTickUnitType.SECOND),
MSEC(DateTickUnitType.MILLISECOND);
private final DateTickUnitType _u;
public final DateTickUnitType getUnitType ()
{
return _u;
}
DateTickUnitTypeEnum (DateTickUnitType u)
{
_u = u;
}
public static final List<DateTickUnitTypeEnum> VALUES=Collections.unmodifiableList(Arrays.asList(values()));
public static final DateTickUnitTypeEnum fromString (final String s)
{
return CollectionsUtils.fromString(VALUES, s, false);
}
public static final DateTickUnitTypeEnum fromUnitType (final DateTickUnitType u)
{
if (null == u)
return null;
for (final DateTickUnitTypeEnum v : VALUES)
{
if ((v != null) && u.equals(v.getUnitType()))
return v;
}
return null;
}
}
|
impl Surface {
fn new_surface(nsview: *mut Object, apply_pixel_scale: bool) -> Surface {
let inner = SurfaceInner { nsview };
let inner_arc = Arc::new(inner);
Surface {
inner: inner_arc,
apply_pixel_scale,
}
}
}
|
import kss
def count_word_occurrences(text, word):
# Split the text into sentences using the Korean Sentence Splitter (kss) library
sentences = kss.split_sentences(text)
# Convert the word and sentences to lowercase for case-insensitive comparison
word = word.lower()
sentences = [sentence.lower() for sentence in sentences]
# Initialize a counter for word occurrences
occurrences = 0
# Iterate through each sentence and count the occurrences of the word
for sentence in sentences:
occurrences += sentence.count(word)
return occurrences
# Test the function with the given example
text = '나는 관대하다. 관 씨 집안 3대 독자다. 내 이름은 대하다. 그래서 나는 관대하다'
word = '관대하다'
print(count_word_occurrences(text, word)) # Output: 2
|
import { FilterMultiplePassType } from './filterMultiplePassType.enum';
import { FilterMultiplePassTypeString } from './filterMultiplePassTypeString';
export interface CollectionOption {
/**
* Optionally add a blank entry to the beginning of the collection.
* Useful when we want to return all data by setting an empty filter that might not exist in the original collection
*/
addBlankEntry?: boolean;
/**
* When the collection is inside an object descendant property
* we can optionally pass a dot (.) notation string to pull the collection from an object property.
* For example if our output data is:
* myData = { someProperty: { myCollection: [] }, otherProperty: 'something' }
* We can pass the dot notation string
* collectionInObjectProperty: 'someProperty.myCollection'
*/
collectionInObjectProperty?: string;
/**
* Defaults to "chain", when using multiple "collectionFilterBy", do we want to "merge" or "chain" the result after each pass?
* For example if we have 2 filters to pass by, and we start with pass 1 returning 7 items and last pass returning 5 items
* "chain" is the default and will return 5 items, since the result of each pass is sent used by the next pass
* "merge" would return the merge of the 7 items & 5 items (without duplicates), since some item might be the same the result is anywhere between 5 to 13 items
*/
filterResultAfterEachPass?: FilterMultiplePassType | FilterMultiplePassTypeString;
/** defaults to empty, when using label with prefix/suffix, do we want to add a separator between each text (like a white space) */
separatorBetweenTextLabels?: string;
/** defaults to false, should the selected value include the prefix/suffix in the output format */
includePrefixSuffixToSelectedValues?: boolean;
}
|
<gh_stars>0
/* Copyright (c) 2015, <NAME>. All rights reserved.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>
*/
#ifndef __AVL_NODE__
#define __AVL_NODE__
#define MAX(x, y) ((x < y) ? y : x)
/*
* Implementation of a single node of an AVL tree.<p>
* Such a node holds a single value and references to both child nodes and
* to its parent node. Furthermore, such a node has a balance, that depends
* on the nodes' height in the tree. This balance value is essential for
* tree rotations.
*
* @author <NAME>
* @version 1.0, 2015-07-29
* @see AVLTree#rotate(Node, boolean)
* @see #getBalance()
*/
typedef struct __Node__ {
/* Single value hold by this node. */
int value;
/* Pointer to the left child of this node. */
struct __Node__ *left;
/* Pointer to the right child of this node. */
struct __Node__ *right;
/* Pointer to the parent node of this node. */
struct __Node__ *parent;
} Node;
/* For documentation of the below functions please check out 'node.c'. */
Node *newNode();
int getBalance(Node*);
void printNode(Node*);
#endif
|
import Helper from '@ember/component/helper';
export default Helper.extend({
compute([index, active]) {
return index === active ? 'active' : 'inactive';
}
});
|
<filename>chromewhip/protocol/layertree.py<gh_stars>10-100
# noinspection PyPep8
# noinspection PyArgumentList
"""
AUTO-GENERATED BY `scripts/generate_protocol.py` using `data/browser_protocol.json`
and `data/js_protocol.json` as inputs! Please do not modify this file.
"""
import logging
from typing import Any, Optional, Union
from chromewhip.helpers import PayloadMixin, BaseEvent, ChromeTypeBase
log = logging.getLogger(__name__)
from chromewhip.protocol import dom as DOM
# LayerId: Unique Layer identifier.
LayerId = str
# SnapshotId: Unique snapshot identifier.
SnapshotId = str
# ScrollRect: Rectangle where scrolling happens on the main thread.
class ScrollRect(ChromeTypeBase):
def __init__(self,
rect: Union['DOM.Rect'],
type: Union['str'],
):
self.rect = rect
self.type = type
# StickyPositionConstraint: Sticky position constraints.
class StickyPositionConstraint(ChromeTypeBase):
def __init__(self,
stickyBoxRect: Union['DOM.Rect'],
containingBlockRect: Union['DOM.Rect'],
nearestLayerShiftingStickyBox: Optional['LayerId'] = None,
nearestLayerShiftingContainingBlock: Optional['LayerId'] = None,
):
self.stickyBoxRect = stickyBoxRect
self.containingBlockRect = containingBlockRect
self.nearestLayerShiftingStickyBox = nearestLayerShiftingStickyBox
self.nearestLayerShiftingContainingBlock = nearestLayerShiftingContainingBlock
# PictureTile: Serialized fragment of layer picture along with its offset within the layer.
class PictureTile(ChromeTypeBase):
def __init__(self,
x: Union['float'],
y: Union['float'],
picture: Union['str'],
):
self.x = x
self.y = y
self.picture = picture
# Layer: Information about a compositing layer.
class Layer(ChromeTypeBase):
def __init__(self,
layerId: Union['LayerId'],
offsetX: Union['float'],
offsetY: Union['float'],
width: Union['float'],
height: Union['float'],
paintCount: Union['int'],
drawsContent: Union['bool'],
parentLayerId: Optional['LayerId'] = None,
backendNodeId: Optional['DOM.BackendNodeId'] = None,
transform: Optional['[]'] = None,
anchorX: Optional['float'] = None,
anchorY: Optional['float'] = None,
anchorZ: Optional['float'] = None,
invisible: Optional['bool'] = None,
scrollRects: Optional['[ScrollRect]'] = None,
stickyPositionConstraint: Optional['StickyPositionConstraint'] = None,
):
self.layerId = layerId
self.parentLayerId = parentLayerId
self.backendNodeId = backendNodeId
self.offsetX = offsetX
self.offsetY = offsetY
self.width = width
self.height = height
self.transform = transform
self.anchorX = anchorX
self.anchorY = anchorY
self.anchorZ = anchorZ
self.paintCount = paintCount
self.drawsContent = drawsContent
self.invisible = invisible
self.scrollRects = scrollRects
self.stickyPositionConstraint = stickyPositionConstraint
# PaintProfile: Array of timings, one per paint step.
PaintProfile = [float]
class LayerTree(PayloadMixin):
"""
"""
@classmethod
def compositingReasons(cls,
layerId: Union['LayerId'],
):
"""Provides the reasons why the given layer was composited.
:param layerId: The id of the layer for which we want to get the reasons it was composited.
:type layerId: LayerId
"""
return (
cls.build_send_payload("compositingReasons", {
"layerId": layerId,
}),
cls.convert_payload({
"compositingReasons": {
"class": [],
"optional": False
},
})
)
@classmethod
def disable(cls):
"""Disables compositing tree inspection.
"""
return (
cls.build_send_payload("disable", {
}),
None
)
@classmethod
def enable(cls):
"""Enables compositing tree inspection.
"""
return (
cls.build_send_payload("enable", {
}),
None
)
@classmethod
def loadSnapshot(cls,
tiles: Union['[PictureTile]'],
):
"""Returns the snapshot identifier.
:param tiles: An array of tiles composing the snapshot.
:type tiles: [PictureTile]
"""
return (
cls.build_send_payload("loadSnapshot", {
"tiles": tiles,
}),
cls.convert_payload({
"snapshotId": {
"class": SnapshotId,
"optional": False
},
})
)
@classmethod
def makeSnapshot(cls,
layerId: Union['LayerId'],
):
"""Returns the layer snapshot identifier.
:param layerId: The id of the layer.
:type layerId: LayerId
"""
return (
cls.build_send_payload("makeSnapshot", {
"layerId": layerId,
}),
cls.convert_payload({
"snapshotId": {
"class": SnapshotId,
"optional": False
},
})
)
@classmethod
def profileSnapshot(cls,
snapshotId: Union['SnapshotId'],
minRepeatCount: Optional['int'] = None,
minDuration: Optional['float'] = None,
clipRect: Optional['DOM.Rect'] = None,
):
"""
:param snapshotId: The id of the layer snapshot.
:type snapshotId: SnapshotId
:param minRepeatCount: The maximum number of times to replay the snapshot (1, if not specified).
:type minRepeatCount: int
:param minDuration: The minimum duration (in seconds) to replay the snapshot.
:type minDuration: float
:param clipRect: The clip rectangle to apply when replaying the snapshot.
:type clipRect: DOM.Rect
"""
return (
cls.build_send_payload("profileSnapshot", {
"snapshotId": snapshotId,
"minRepeatCount": minRepeatCount,
"minDuration": minDuration,
"clipRect": clipRect,
}),
cls.convert_payload({
"timings": {
"class": [PaintProfile],
"optional": False
},
})
)
@classmethod
def releaseSnapshot(cls,
snapshotId: Union['SnapshotId'],
):
"""Releases layer snapshot captured by the back-end.
:param snapshotId: The id of the layer snapshot.
:type snapshotId: SnapshotId
"""
return (
cls.build_send_payload("releaseSnapshot", {
"snapshotId": snapshotId,
}),
None
)
@classmethod
def replaySnapshot(cls,
snapshotId: Union['SnapshotId'],
fromStep: Optional['int'] = None,
toStep: Optional['int'] = None,
scale: Optional['float'] = None,
):
"""Replays the layer snapshot and returns the resulting bitmap.
:param snapshotId: The id of the layer snapshot.
:type snapshotId: SnapshotId
:param fromStep: The first step to replay from (replay from the very start if not specified).
:type fromStep: int
:param toStep: The last step to replay to (replay till the end if not specified).
:type toStep: int
:param scale: The scale to apply while replaying (defaults to 1).
:type scale: float
"""
return (
cls.build_send_payload("replaySnapshot", {
"snapshotId": snapshotId,
"fromStep": fromStep,
"toStep": toStep,
"scale": scale,
}),
cls.convert_payload({
"dataURL": {
"class": str,
"optional": False
},
})
)
@classmethod
def snapshotCommandLog(cls,
snapshotId: Union['SnapshotId'],
):
"""Replays the layer snapshot and returns canvas log.
:param snapshotId: The id of the layer snapshot.
:type snapshotId: SnapshotId
"""
return (
cls.build_send_payload("snapshotCommandLog", {
"snapshotId": snapshotId,
}),
cls.convert_payload({
"commandLog": {
"class": [],
"optional": False
},
})
)
class LayerPaintedEvent(BaseEvent):
js_name = 'Layertree.layerPainted'
hashable = ['layerId']
is_hashable = True
def __init__(self,
layerId: Union['LayerId', dict],
clip: Union['DOM.Rect', dict],
):
if isinstance(layerId, dict):
layerId = LayerId(**layerId)
self.layerId = layerId
if isinstance(clip, dict):
clip = DOM.Rect(**clip)
self.clip = clip
@classmethod
def build_hash(cls, layerId):
kwargs = locals()
kwargs.pop('cls')
serialized_id_params = ','.join(['='.join([p, str(v)]) for p, v in kwargs.items()])
h = '{}:{}'.format(cls.js_name, serialized_id_params)
log.debug('generated hash = %s' % h)
return h
class LayerTreeDidChangeEvent(BaseEvent):
js_name = 'Layertree.layerTreeDidChange'
hashable = []
is_hashable = False
def __init__(self,
layers: Union['[Layer]', dict, None] = None,
):
if isinstance(layers, dict):
layers = [Layer](**layers)
self.layers = layers
@classmethod
def build_hash(cls):
raise ValueError('Unable to build hash for non-hashable type')
|
<filename>src/wanderer.h
#ifndef __WANDERER_AGENT__H
#define __WANDERER_AGENT__H
#include <string>
#include <math.h>
#include "enviro.h"
#include <string.h>
namespace
{
using namespace enviro;
class Forward : public State, public AgentInterface
{
public:
void entry(const Event &e) {
std:: cout << "Forward : \n ";
initialFront =sensor_value(0);
initialSide =sensor_value(1);
}
void during()
{
track_velocity(1,0);
if (sensor_value(0)>11.0 && sensor_value(1)> 11.0){
emit(Event("slighty"));
}
if (sensor_value(0)<11.0 && sensor_value(1)>11.0){
emit(Event("hardy"));
}
if(sensor_value(0)<11.0 && sensor_value(1)<11.0){
emit(Event("corny"));
}
if(sensor_value(0)>11.0 && sensor_value(1)<11.0){
emit(Event("lslighty"));
}
}
void exit(const Event &e) {};
double initialFront;
double initialSide ;
double initialBack;
void set_tick_name(std::string s) { tick_name = s; }
std::string tick_name;
};
class Hardleft : public State, public AgentInterface
{
public:
void entry(const Event &e) {
std:: cout << "Hardleft : \n ";
initialFront =sensor_value(0);
initialSide =sensor_value(1);
}
void during()
{
std:: cout << "Side Sensor: " << sensor_value(1) << "\n";
track_velocity(0,-0.5);
if(sensor_value(1)<11.0){
emit(Event("straight"));
}
}
void exit(const Event &e) {}
double rate;
double initialFront;
double initialSide ;
double initialBack;
void set_tick_name(std::string s) { tick_name = s; }
std::string tick_name;
};
class Corner : public State, public AgentInterface
{
public:
void entry(const Event &e) {
std:: cout << "Corner: \n ";
initialFront =sensor_value(0);
initialSide =sensor_value(1);
}
void during()
{
track_velocity(0,-.5);
if (sensor_value(1)< 11.0){
emit(Event("straight"));
}
}
void exit(const Event &e) {}
double rate;
double initialFront;
double initialSide ;
double initialBack;
void set_tick_name(std::string s) { tick_name = s; }
std::string tick_name;
};
class SlightRight : public State, public AgentInterface
{
public:
void entry(const Event &e) {
std:: cout << "Slight Right : \n ";
initialFront =sensor_value(0);
initialSide =sensor_value(1);
}
void during()
{
track_velocity(0.5,0.15);
if(sensor_value(0)<11.0 || sensor_value(1)<11.0){
emit(Event("straight"));
}
}
void exit(const Event &e) {}
double rate;
double initialFront;
double initialSide ;
double initialBack;
void set_tick_name(std::string s) { tick_name = s; }
std::string tick_name;
};
class SlightLeft : public State, public AgentInterface
{
public:
void entry(const Event &e) {
std:: cout << "Slight Left : \n ";
initialFront =sensor_value(0);
initialSide =sensor_value(1);
}
void during()
{
track_velocity(0.5,-0.2);
std:: cout << "Side Sensor: " << sensor_value(1) << "\n";
if(sensor_value(0)<11.0 || sensor_value(1)>initialSide*0.9){
emit(Event("straight"));
}
}
void exit(const Event &e) {}
double rate;
double initialFront;
double initialSide ;
double initialBack;
void set_tick_name(std::string s) { tick_name = s; }
std::string tick_name;
};
class FindWall : public State, public AgentInterface
{
public:
void entry(const Event &e) {
std:: cout << "Find Wall : \n ";
initialFront =sensor_value(0);
initialSide =sensor_value(1);
}
void during()
{
track_velocity(1,0);
std:: cout << "Front Sensor: " << sensor_value(0) << "\n";
if(sensor_value(0)<11.0 ){
emit(Event("rotate"));
}
}
void exit(const Event &e) {}
double rate;
double initialFront;
double initialSide ;
double initialBack;
void set_tick_name(std::string s) { tick_name = s; }
std::string tick_name;
};
class WandererController : public StateMachine, public AgentInterface
{
public:
WandererController() : StateMachine()
{
set_initial(findwall);
tick_name = "tick_" + std::to_string(rand() % 1000); // use an agent specific generated
// event name in case there are
// multiple instances of this class
add_transition("hardy", forward, hardleft);
add_transition("corny", forward, corner);
add_transition("slighty", forward, slightright);
add_transition("lslighty", forward, slightleft);
add_transition("rotate",findwall,corner);
add_transition("straight", hardleft, forward);
add_transition("straight", corner, forward);
add_transition("straight", slightright, forward);
add_transition("straight", slightleft, forward);
}
//void update()
//{
// if (rand() % 100 <= 5)
// {
// emit(Event("tick"));
// }
// StateMachine::update();
//}
Forward forward;
Hardleft hardleft;
Corner corner;
SlightRight slightright;
SlightLeft slightleft;
FindWall findwall;
//DoNot donot;
std::string tick_name;
};
class Wanderer : public Agent
{
public:
Wanderer(json spec, World &world) : Agent(spec, world)
{
add_process(wc);
}
WandererController wc;
};
DECLARE_INTERFACE(Wanderer);
}
double round(double var)
{
double value = (int)(var * 100 +0.5);
return (double)value / 100;
}
#endif
|
/**
* RELOAD TOOLS
*
* Copyright (c) 2003 <NAME>, <NAME>, <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
* Project Management Contact:
*
* <NAME>
* Bolton Institute of Higher Education
* Deane Road
* Bolton BL3 5AB
* UK
*
* e-mail: <EMAIL>
*
*
* Technical Contact:
*
* <NAME>
* e-mail: <EMAIL>
*
* Web: http://www.reload.ac.uk
*
*/
package org.olat.modules.scorm.server.servermodels;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Iterator;
import org.jdom2.Document;
import org.jdom2.Element;
import org.jdom2.JDOMException;
import org.jdom2.Namespace;
import org.jdom2.input.SAXBuilder;
import org.jdom2.output.Format;
import org.jdom2.output.XMLOutputter;
/**
* Some useful XML Utilities that leverage the JDOM Package<br>
*
* @author <NAME>
* @version $Id: XMLUtils.java,v 1.2 2004/12/09 10:36:56 phillipus Exp $
*/
public final class XMLUtils {
/**
* The XSI Namespace
*/
public static Namespace XSI_Namespace = Namespace.getNamespace("xsi", "http://www.w3.org/2001/XMLSchema-instance");
/**
* The Old XSI Namespace
*/
public static Namespace XSI_NamespaceOLD = Namespace.getNamespace("xsi", "http://www.w3.org/2000/10/XMLSchema-instance");
/**
* The schemaLocation String
*/
public static String XSI_SchemaLocation = "schemaLocation";
/**
* Writes a JDOM Document to file
* @param doc The JDOM Document to write
* @param file The file to write to
* @throws IOException
*/
public static void write2XMLFile(Document doc, File file) throws IOException {
// This gets rid of junk characters
Format format = Format.getCompactFormat();
format.setIndent(" ");
XMLOutputter outputter = new XMLOutputter(format);
// Create parent folder if it doesn't exist
File parent = file.getParentFile();
if(parent != null) {
parent.mkdirs();
}
FileOutputStream out = new FileOutputStream(file);
outputter.output(doc, out);
out.close();
}
/**
* Reads and returns a JDOM Document from file without Schema Validation
* @param file The XML File
* @return The JDOM Document or null if not found
* @throws FileNotFoundException
* @throws JDOMException
* @throws IOException
*/
public static Document readXMLFile(File file) throws IOException, JDOMException {
Document doc = null;
SAXBuilder builder = new SAXBuilder();
builder.setExpandEntities(false);
// This allows UNC mapped locations to load
doc = builder.build(new FileInputStream(file));
return doc;
}
/**
* @return The root Namespace in the Document or null if not found
*/
public static Namespace getDocumentNamespace(Document doc) {
Namespace ns = null;
if(doc.hasRootElement()) {
ns = doc.getRootElement().getNamespace();
}
return ns;
}
/**
* Hunt for a Namespace in the Document searching all additional Namespaces and
* Elements in case the Namespace is declared "in-line" at the Element level
* @param doc
* @param ns
* @return true if found
*/
public static boolean containsNamespace(Document doc, Namespace ns) {
return containsNamespace(doc.getRootElement(), ns);
}
/**
* Hunt for a Namespace in the Element searching all sub-Elements in case the Namespace
* is declared "in-line" at the Element level
* @param element
* @param ns
* @return true if found
*/
private static boolean containsNamespace(Element element, Namespace ns) {
// Element Namespace?
if(ns.equals(element.getNamespace())) {
return true;
}
// Additional Namespace?
Iterator<Namespace> it = element.getAdditionalNamespaces().iterator();
while(it.hasNext()) {
Namespace ns1 = it.next();
if(ns1.equals(ns)) {
return true;
}
}
// Recurse children
Iterator<Element> i = element.getChildren().iterator();
while(i.hasNext()) {
Element child = i.next();
boolean found = containsNamespace(child, ns);
if(found) {
return true;
}
}
return false;
}
}
|
/*
* MIT License
*
* Copyright (c) 2021 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package net.jamsimulator.jams.gui.util;
import javafx.scene.control.ScrollPane;
import javafx.scene.control.skin.ScrollPaneSkin;
import javafx.scene.input.ScrollEvent;
import javafx.scene.layout.Region;
import net.jamsimulator.jams.utils.NumericUtils;
/**
* This is the skin that makes a {@link PixelScrollPaneSkin} to behave properly.
*
* @see PixelScrollPane
*/
public class PixelScrollPaneSkin extends ScrollPaneSkin {
/**
* The default increment.
*/
public static final double DEFAULT_INCREMENT = 30;
private double increment;
/**
* Creates the skin.
*
* @param scrollPane the {@link ScrollPane} to handle.
*/
public PixelScrollPaneSkin(final ScrollPane scrollPane) {
this(scrollPane, DEFAULT_INCREMENT);
}
/**
* Creates the skin.
*
* @param scrollPane the {@link ScrollPane} to handle.
* @param increment the amount of pixels to scroll each time.
*/
public PixelScrollPaneSkin(final ScrollPane scrollPane, double increment) {
super(scrollPane);
this.increment = increment;
filterScrollEvents();
}
/**
* Returns the amount of pixels to scroll each time.
*
* @return the amount of pixels.
*/
public double getIncrement() {
return increment;
}
/**
* Sets the amount of pixels to scroll each time.
*
* @param increment the amount of pixels.
*/
public void setIncrement(double increment) {
this.increment = increment;
}
/**
* Returns the increment used to move the vertical scroll bar natively.
*
* @return the increment.
*/
public double getRelativeVerticalIncrement() {
var pane = (ScrollPane) getNode();
var region = getRegionToScroll();
return region == null ? 0 : increment / (region.getHeight() - pane.getViewportBounds().getHeight());
}
/**
* Returns the increment used to move the horizontal scroll bar natively.
*
* @return the increment.
*/
public double getRelativeHorizontalIncrement() {
var pane = (ScrollPane) getNode();
var region = getRegionToScroll();
return region == null ? 0 : increment / (region.getWidth() - pane.getViewportBounds().getWidth());
}
private Region getRegionToScroll() {
var pane = (ScrollPane) getNode();
var content = pane.getContent();
Region region = null;
if (content instanceof Region) {
region = (Region) content;
} else if (content instanceof ScalableNode) {
if (((ScalableNode) content).getNode() instanceof Region) {
region = (Region) ((ScalableNode) content).getNode();
}
}
return region;
}
private void filterScrollEvents() {
getSkinnable().addEventFilter(ScrollEvent.SCROLL, event -> {
if (event.getDeltaX() < 0) {
incrementHorizontal();
} else if (event.getDeltaX() > 0) {
decrementHorizontal();
}
if (event.getDeltaY() < 0) {
incrementVertical();
} else if (event.getDeltaY() > 0) {
decrementVertical();
}
event.consume();
});
}
private void incrementVertical() {
var v = getVerticalScrollBar();
v.setValue(NumericUtils.clamp(v.getMin(), v.getValue() + getRelativeVerticalIncrement(), v.getMax()));
}
private void decrementVertical() {
var v = getVerticalScrollBar();
v.setValue(NumericUtils.clamp(v.getMin(), v.getValue() - getRelativeVerticalIncrement(), v.getMax()));
}
private void incrementHorizontal() {
var h = getHorizontalScrollBar();
h.setValue(NumericUtils.clamp(h.getMin(), h.getValue() + getRelativeHorizontalIncrement(), h.getMax()));
}
private void decrementHorizontal() {
var h = getHorizontalScrollBar();
h.setValue(NumericUtils.clamp(h.getMin(), h.getValue() - getRelativeHorizontalIncrement(), h.getMax()));
}
}
|
// Package firesearch is a Go client for Firesearch.
package firesearch
|
<gh_stars>0
import axios from "axios";
export const freelancerSignUp = (data) => {
return axios.post(
`${process.env.REACT_APP_BASEURL}api/freelancer/Post`,
data,
{
["axios-retry"]: {
retries: 5,
},
}
);
};
export const freelancerProjectPortfolio = (data) => {
return axios.post(
`${process.env.REACT_APP_BASEURL}api/FreelancerPortfolio/Post`,
data,
{
["axios-retry"]: {
retries: 5,
},
}
);
};
export const freelancerProjectPortfolioUpdate = (data) => {
return axios.put(
`${process.env.REACT_APP_BASEURL}api/FreelancerPortfolio/Put`,
data,
{
["axios-retry"]: {
retries: 5,
},
}
);
};
export const freelancerProjectPortfolioDelete = (data) => {
return axios.put(
`${process.env.REACT_APP_BASEURL}api/FreelancerPortfolio/Delete`,
data,
{
["axios-retry"]: {
retries: 5,
},
}
);
};
export const freelancerProjectPortfoliosList = (freelanceId) => {
return axios.get(
`${process.env.REACT_APP_BASEURL}api/FreelancerPortfolio/Search?limit=5&page=1&FreelancerId=${freelanceId}`,
{
["axios-retry"]: {
retries: 5,
},
}
);
};
export const freelancerPersonalDetailsUpdate = (data) => {
return axios.put(
`${process.env.REACT_APP_BASEURL}api/freelancer/PersonalDetail`,
data,
{
["axios-retry"]: {
retries: 5,
},
}
);
};
export const freelancerGetById = (id) => {
return axios.get(
`${process.env.REACT_APP_BASEURL}api/freelancer/GetById?Id=${id}`,
{
headers: {
["Content-Type"]: "application/json",
},
["axios-retry"]: {
retries: 5,
},
}
);
};
export const freelacerProfessionalDetailsUpdate = (data) => {
return axios.put(
`${process.env.REACT_APP_BASEURL}api/freelancer/UpdateMainProfessionalDetail`,
data,
{
["axios-retry"]: {
retries: 5,
},
}
);
};
// export const freelancerSkillUpdate = (data) => {
// return axios.post(
// `${process.env.REACT_APP_BASEURL}api/FreelancerSkill/Post`,
// data,
// {
// ["axios-retry"]: {
// retries: 5,
// },
// }
// );
// };
export const freelancerSkillUpdate = (data) => {
return axios.post(
`${process.env.REACT_APP_BASEURL}api/FreelancerSkill/MultiPost`,
data,
{
["axios-retry"]: {
retries: 5,
},
}
);
};
export const freelancerSkillDelete = (data) => {
return axios.put(
`${process.env.REACT_APP_BASEURL}api/FreelancerSkill/Delete`,
data,
{
["axios-retry"]: {
retries: 5,
},
}
);
};
export const freelancerQualificationUpdate = (data) => {
return axios.post(
`${process.env.REACT_APP_BASEURL}api/FreelancerQualification/Post`,
data,
{
["axios-retry"]: {
retries: 5,
},
}
);
};
export const freelancerQualificationEdit = (data) => {
return axios.put(
`${process.env.REACT_APP_BASEURL}api/FreelancerQualification/Put`,
data,
{
["axios-retry"]: {
retries: 5,
},
}
);
};
export const freelancerQualificationDelete = (data) => {
return axios.put(
`${process.env.REACT_APP_BASEURL}api/FreelancerQualification/Delete`,
data,
{
["axios-retry"]: {
retries: 5,
},
}
);
};
export const freelancerCertificateUpdate = (data) => {
return axios.post(
`${process.env.REACT_APP_BASEURL}api/FreelancerCertificate/Post`,
data,
{
["axios-retry"]: {
retries: 5,
},
}
);
};
export const freelancerCertificateEdit = (data) => {
return axios.put(
`${process.env.REACT_APP_BASEURL}api/FreelancerCertificate/Put`,
data,
{
["axios-retry"]: {
retries: 5,
},
}
);
};
export const freelancerCertificateDelete = (data) => {
return axios.put(
`${process.env.REACT_APP_BASEURL}api/FreelancerCertificate/Delete`,
data,
{
["axios-retry"]: {
retries: 5,
},
}
);
};
export const freelancerExperienceUpdate = (data) => {
return axios.post(
`${process.env.REACT_APP_BASEURL}api/FreelancerExperience/Post`,
data,
{
["axios-retry"]: {
retries: 5,
},
}
);
};
export const freelancerExperienceEdit = (data) => {
return axios.put(
`${process.env.REACT_APP_BASEURL}api/FreelancerExperience/Put`,
data,
{
["axios-retry"]: {
retries: 5,
},
}
);
};
export const freelancerExperienceDelete = (data) => {
return axios.put(
`${process.env.REACT_APP_BASEURL}api/FreelancerExperience/Delete`,
data,
{
["axios-retry"]: {
retries: 5,
},
}
);
};
export const freelancerProjects = (
page,
limit,
title,
skill,
jobLocation,
jobLocationLat,
jobLocationLng,
dateOfPosting,
currentDate,
selectedRadius
) => {
const params = {};
if (page) params.page = page;
if (limit) params.limit = limit;
if (title) params.title = title;
if (dateOfPosting) params.dateOfPosting = dateOfPosting;
if (skill.length > 0) params.skillId = skill.toString();
if (jobLocation) params.location = jobLocation;
if (currentDate) params.startDate = currentDate;
if (selectedRadius) params.radius = selectedRadius;
if (jobLocationLat) params.latitude = jobLocationLat;
if (jobLocationLng) params.longitude = jobLocationLng;
return axios.get(`${process.env.REACT_APP_BASEURL}api/project/Search`, {
params,
["axios-retry"]: {
retries: 5,
},
});
};
export const freelancerProjectsForHomePage = (page, limit, title) => {
const params = {};
if (page) params.page = page;
if (limit) params.limit = limit;
if (title) params.title = title;
return axios.get(`${process.env.REACT_APP_BASEURL}api/project/Search`, {
params,
["axios-retry"]: {
retries: 5,
},
});
};
export const getAllInterestedProjectByFreelancerId = (id, page, limit) => {
const params = {};
if (id) params.Id = id;
if (page) params.page = page;
if (limit) params.limit = limit;
return axios.get(`${process.env.REACT_APP_BASEURL}api/ProjectDetail/GetAllInterestedProjectByFreelancerId`, {
params,
["axios-retry"]: {
retries: 5,
},
});
};
export const getAllAppliedProjectByFreelancerId = (id, page, limit) => {
const params = {};
if (id) params.Id = id;
if (page) params.page = page;
if (limit) params.limit = limit;
return axios.get(`${process.env.REACT_APP_BASEURL}api/ProjectDetail/GetAllIsAppliedProjectByFreelancerId`, {
params,
["axios-retry"]: {
retries: 5,
},
});
};
|
<filename>src/components/video/Player/index.js
/**
* @module React
*/
import React, { Component } from 'react'
/**
* @module PropTypes
*/
import PropTypes from 'prop-types'
/**
* @module classNames
*/
import classNames from 'utils/classnames'
/**
* @module PlayButton
*/
import PlayButton from 'components/video/Button'
/**
* @module isInViewport
*/
import { isInViewport } from 'utils/imageutils'
/**
* @module debounce
*/
import debounce from 'utils/debounce'
import ReactJWPlayer from 'react-jw-player'
import { PLAYERSCRIPT } from 'data/consts'
/**
* @class
* @name Player
* @extends {Component}
*/
class Player extends Component {
constructor (props) {
super(props)
// Initial state
this.state = {
showPlayButton: true
}
// Local variables
this.videoRef = null
this.mounted = false
// Bind custom fns
this.toggleVideo = this.toggleVideo.bind(this)
this.playVideo = this.playVideo.bind(this)
this.pauseVideo = this.pauseVideo.bind(this)
this.bindScrollEvent = this.bindScrollEvent.bind(this)
this.unBindScrollEvent = this.unBindScrollEvent.bind(this)
this.checkViewport = this.checkViewport.bind(this)
this.debouncedResize = debounce(this.checkViewport)
}
componentDidMount () {
this.mounted = true
this.checkViewport()
this.bindScrollEvent()
}
componentWillUnmount () {
this.mounted = false
// Unbind scroll event
this.unBindScrollEvent()
this.debouncedResize = null
}
componentWillReceiveProps () {
this.checkViewport()
}
/**
* @name bindScrollEvent
*/
bindScrollEvent () {
window.addEventListener('scroll', this.debouncedResize, false)
}
/**
* @name unBindScrollEvent
*/
unBindScrollEvent () {
window.removeEventListener('scroll', this.debouncedResize, false)
}
/**
* @name checkViewport
*/
checkViewport () {
if (!this.videoRef || !this.mounted) {
return false
}
if (!isInViewport(this.videoRef)) {
this.pauseVideo()
} else
if (this.props.autoPlay) {
this.playVideo()
}
return null
}
/**
* toggleVideo
* @param {Object} event
* @description Will toggle the video between playing and pausing
* @return {Void}
*/
toggleVideo (event) {
if (event) {
event.stopPropagation()
}
if (this.state.showPlayButton) {
this.playVideo()
} else {
this.pauseVideo()
}
}
/**
* playVideo
* @description Will play the video
*/
playVideo () {
this.videoRef.play()
this.setState({
showPlayButton: false
})
}
/**
* pauseVideo
* @description Will play the video
*/
pauseVideo () {
this.videoRef.pause()
this.setState({
showPlayButton: true
})
}
handleChildClick (e) {
e.stopPropagation()
}
render () {
const {
children,
className,
modifier,
loop,
muted,
playsInline,
src,
poster,
preload,
playerId
} = this.props
const {
showPlayButton
} = this.state
// Constructs classnames from the base name
const modifiedClassNames = classNames('video', className, modifier)
return (
<div className={modifiedClassNames} onClick={this.handleChildClick}>
<ReactJWPlayer
playerId='unique_id'
playerScript={PLAYERSCRIPT}
file={src}
/>
</div>
)
}
}
/**
* defaultProps
* @type {Object}
*/
Player.defaultProps = {
className: '',
modifier: '',
loop: true,
preload: 'none',
autoPlay: false,
playsInline: true,
poster: ''
}
/**
* propTypes
* @type {Object}
*/
Player.propTypes = {
className: PropTypes.oneOfType([
PropTypes.string,
PropTypes.arrayOf(PropTypes.string)
]),
modifier: PropTypes.oneOfType([
PropTypes.string,
PropTypes.arrayOf(PropTypes.string)
]),
loop: PropTypes.bool,
muted: PropTypes.bool,
autoPlay: PropTypes.bool,
playsInline: PropTypes.bool,
src: PropTypes.string,
poster: PropTypes.string,
preload: PropTypes.oneOf([
'auto', 'metadata', 'none'
])
}
/**
* @module Player
*/
export default Player
|
<filename>src/java/grails/plugin/jesque/JesqueJobArtefactHandler.java
package grails.plugin.jesque;
import org.codehaus.groovy.grails.commons.ArtefactHandlerAdapter;
import java.lang.reflect.Method;
public class JesqueJobArtefactHandler extends ArtefactHandlerAdapter {
public static final String TYPE = "JesqueJob";
public static final String PERFORM = "perform";
public JesqueJobArtefactHandler() {
super(TYPE, GrailsJesqueJobClass.class, DefaultGrailsJesqueJobClass.class, null);
}
public boolean isArtefactClass(Class clazz) {
// class shouldn't be null and should end with Job suffix
if(clazz == null || !clazz.getName().endsWith(DefaultGrailsJesqueJobClass.JOB))
return false;
// and should have a perform() method with any signature
//Method method = ReflectionUtils.findMethod(clazz, PERFORM, null);
for( Method method : clazz.getDeclaredMethods() ) {
if( method.getName().equals(PERFORM) )
return true;
}
return false;
}
}
|
// Write code to create a function that accepts a string containing only 0s and 1s
// Return true if there are an equal number of 0s and 1s
// Else return false
var zeroesAndOnes = function(str) {
var arr = str.split("")
var ones = 0
var zeros = 0
for (var i = 0; i < arr.length; i++) {
var currentNum = parseInt(arr[i])
// console.log(currentNum)
if(currentNum === 1){
ones += 1
}
else if (currentNum === 0) {
zeros += 1
}
}
console.log(ones, zeros)
if (ones === zeros) {
return true
}
else if (ones !== zeros) {
return false
}
};
|
<gh_stars>1-10
require('dotenv').config();
const { I18n } = require('i18n');
const { i18nConfig } = require('../../config/i18n.config');
const iconv = require('iconv-lite');
const Scrapper = require('../services/scrapper');
const EmailService = require('../services/email-service');
const SubscriptionService = require('../services/subscription-service');
const SubscriberService = require('../services/subscriber-service');
const { Newsletter } = require('../models');
const { jobLogger } = require('../../config/logger');
class NewsletterSenderJob {
constructor() {
this.subscriptionService = new SubscriptionService();
this.subscriberService = new SubscriberService();
this.emailService = new EmailService(process.env.APPLICATION_LOCALE, false);
this.emailTemplate = {
content: null,
htmlContent: null,
encoding: 'utf8'
};
}
async sendNewsletters() {
const newsletters = await Newsletter.findAll({ where: { active: true } });
for (let newsletter of newsletters) {
try {
jobLogger.info(`Processing newsletter '${newsletter.name}'`);
const subscriptions = await this.subscriptionService.findSubscriptionsToSend(newsletter);
if (subscriptions.length === 0) {
jobLogger.info(`No subscriptions were found to newsletter '${newsletter.name}'`);
continue;
}
const scrapper = new Scrapper(newsletter);
const posts = await scrapper.getPosts();
if (posts.length === 0) {
jobLogger.info('No posts were found');
continue;
}
for (const post of posts) {
jobLogger.info(`Starting of data collect of post '${post.title}'`);
const completePost = await scrapper.scrapPost(post);
jobLogger.info(`Sending post '${completePost.title}' to subscribers`);
const subscribers = await this.subscriberService.findBySubscriptions(subscriptions);
if (subscribers.length === 0) {
continue;
}
const postData = { newsletter, subscriptions, subscribers, post: completePost };
const statistics = await this.sendPostToSubscribers(postData)
jobLogger.info(`Post '${completePost.title}' were sent to ${statistics.sent} subscribers of a total of ${statistics.total} successfully`);
}
} catch (err) {
jobLogger.error(`Error when processing newsletter '${newsletter.name}': ${err}`,
{ errorMessage: err.message, errorStack: err.stack });
console.error(err);
}
}
}
async sendPostToSubscribers(postData) {
const { newsletter, subscriptions, subscribers, post } = postData;
const emailData = this.handleFixedEmailData(post, newsletter);
const handledPost = this.handlePost(post, newsletter);
const statistics = {
sent: 0,
failed: 0,
total: subscribers.length
};
const promises = [];
for (const subscriber of subscribers) {
const subscriptionsFounded = subscriptions.filter(el => el.subscriberId === subscriber.id);
if (subscriptionsFounded.length === 0) continue;
const subscriptionData = { subscription: subscriptionsFounded[0], subscriber };
promises.push(this.sendPost(subscriptionData, emailData, handledPost, statistics));
}
await Promise.allSettled(promises);
return statistics;
}
handleFixedEmailData(post, newsletter) {
const emailData = { ...this.emailTemplate };
emailData.subject = post.title;
const i18n = new I18n(i18nConfig);
i18n.setLocale(newsletter.getLanguage());
const defaultEmailContent = i18n.__('post.default-content');
emailData.content = defaultEmailContent;
emailData.htmlContent = `<html><body><p>${defaultEmailContent}</p></body></html>`;
return emailData;
}
handlePost(post, newsletter) {
const i18n = new I18n(i18nConfig);
i18n.setLocale(newsletter.getLanguage());
post.locale = newsletter.getCurrentLocale();
post.fileName = `(${newsletter.name}) ${post.title}.html`;
post.fileEncoding = newsletter.getEncoding();
post.unsubscriptionLabel = i18n.__('post.label.unsubscription-link')
return post;
}
async sendPost(subscriptionData, emailData, post, statistics) {
const finalEmailData = this.handleDynamicEmailData(subscriptionData, post, emailData);
await this.emailService.sendMail(finalEmailData)
.then(() => statistics.sent++)
.catch((err) => {
statistics.failed++;
console.warn(err);
});
}
handleDynamicEmailData(subscriptionData, post, emailData) {
const { subscription, subscriber } = subscriptionData;
const dynamicEmailData = { ...emailData };
dynamicEmailData.toEmail = subscriber.kindleEmail;
dynamicEmailData.attachments = [];
dynamicEmailData.attachments.push({
filename: post.fileName,
content: this.handleContent(subscription, post)
});
return dynamicEmailData;
}
handleContent(subscription, post) {
const contentSufix = '<br>'
+ '<em>' + post.unsubscriptionLabel + ': ' + this.getUnsubscriptionLink(subscription) + '</em>'
+ '</p></footer></body></html>';
const content = post.htmlContent + contentSufix;
return post.fileEncoding !== 'utf8'
? iconv.encode(content, post.fileEncoding)
: content;
}
getUnsubscriptionLink(subscription) {
const unsubscriptionLink = this.subscriptionService.getUnsubscriptionLink(subscription.token);
return '<a href="' + unsubscriptionLink + '" target="_blank">' + unsubscriptionLink + '</a>';
}
}
module.exports = NewsletterSenderJob;
|
def isPalindrome(s):
s = s.lower()
return s == s[::-1]
string_input = "madam"
palindrome_status = isPalindrome(string_input)
if palindrome_status:
print("The given string is a Palindrome")
else:
print("The given string is not a Palindrome")
|
package org.slos.battle;
public enum GameState {
VIRGIN,
INITALIZING_SETUP,
ATTACKING,
GAME_COMPLETED
}
|
#!/usr/bin/env bash
# Syntax: check-mergeability
# Check whether branch is feature/bugfix and is merged to main/master
# shellcheck disable=SC2155
declare -ir success=0
declare -ir wrong_branch_error=1
case "$GITHUB_BASE_REF" in
master|main)
if [[ "$GITHUB_HEAD_REF" =~ ^(feature|bugfix)/([[:digit:]]+)$ ]]; then
declare owner="${GITHUB_REPOSITORY%/*}"
declare repository="${GITHUB_REPOSITORY#*/}"
declare issue="${BASH_REMATCH[2]}"
declare responce="$(curl --silent --header "Accept: application/vnd.github.v3+json" \
--write-out '%{http_code}' \
"https://api.github.com/repos/$owner/$repository/issues/$issue")"
declare http_body="$(echo "$responce" | head --lines -1)"
declare http_code="$(echo "$responce" | tail --lines 1)"
if [[ "$http_code" -ne 200 || "$(echo "$http_body" | \
jq --monochrome-output '.pull_request' )" != null ]]; then
echo "⛔ Source branch must be 'feature/<issue-id>'|'bugfix/<issue-id>' branch \
to be able merged to 'master' or 'main' with real issue id, but now it is '$GITHUB_HEAD_REF'" >&2
exit "$wrong_branch_error"
fi
else
echo "⛔ Source branch must be 'feature/<number>'|'bugfix/<number>' branch \
to be able merged merged to 'master' or 'main', but now it is '$GITHUB_HEAD_REF'" >&2
exit "$wrong_branch_error"
fi
;;
*)
echo "⛔ Base branch must be 'master'|'main', but now it is '$GITHUB_BASE_REF'." >&2
exit "$wrong_branch_error"
;;
esac
echo "✅ Branches are correct."
exit "$success"
|
"use strict";
var util = require("util");
var EventEmitter = require("events").EventEmitter;
function Events() {
EventEmitter.call(this);
}
util.inherits(Events, EventEmitter);
module.exports = new Events();
|
<gh_stars>1-10
package org.egovframe.rte.psl.dataaccess.ibatis;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.util.Properties;
import org.egovframe.rte.psl.dataaccess.TestBase;
import org.egovframe.rte.psl.dataaccess.dao.EmpDAO;
import org.egovframe.rte.psl.dataaccess.rowhandler.FileWritingRowHandler;
import javax.annotation.Resource;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.core.io.ClassPathResource;
import org.springframework.core.io.DefaultResourceLoader;
import org.springframework.core.io.ResourceLoader;
import org.springframework.jdbc.datasource.init.ScriptUtils;
import org.springframework.test.annotation.Rollback;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.transaction.annotation.Transactional;
/**
* == 개정이력(Modification Information) ==
*
* 수정일 수정자 수정내용
* ------- -------- ---------------------------
* 2014.01.22 권윤정 SimpleJdbcTestUtils -> JdbcTestUtils 변경
* 2014.01.22 권윤정 SimpleJdbcTemplate -> JdbcTemplate 변경
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = { "classpath*:META-INF/spring/context-*.xml" })
@Transactional
public class RowHandlerTest extends TestBase {
@Resource(name = "schemaProperties")
Properties schemaProperties;
@Resource(name = "empDAO")
EmpDAO empDAO;
// fileWritingRowHandler 는 prototype 으로 선언했음
@Resource(name = "fileWritingRowHandler")
FileWritingRowHandler rowHandler;
boolean isHsql = true;
@Before
public void onSetUp() throws Exception {
ScriptUtils.executeSqlScript(dataSource.getConnection(), new ClassPathResource("META-INF/testdata/sample_schema_ddl_" + usingDBMS + ".sql"));
// init data
ScriptUtils.executeSqlScript(dataSource.getConnection(), new ClassPathResource("META-INF/testdata/sample_schema_initdata_" + usingDBMS + ".sql"));
}
@SuppressWarnings("deprecation")
@Rollback(false)
@Test
public void testRowHandlerForOutFileWriting() throws Exception {
// select to outFile using rowHandler
empDAO.getSqlMapClientTemplate().queryWithRowHandler("selectEmpListToOutFileUsingRowHandler", null, rowHandler);
// check
ResourceLoader resourceLoader = new DefaultResourceLoader();
org.springframework.core.io.Resource resource = resourceLoader.getResource("file:./src/test/resources/META-INF/testdata/" + schemaProperties.getProperty("outResultFile"));
// BufferedOutputStream flush 및 close
rowHandler.releaseResource();
assertEquals(38416, rowHandler.getTotalCount());
File file = resource.getFile();
assertNotNull(file);
// 대용량 out file size 체크
assertTrue(1000000 < file.length());
}
}
|
<filename>mtp_api/apps/core/management/commands/clean_up.py
import datetime
import textwrap
from django.core.management import BaseCommand, call_command
from django.utils.timezone import now
from mtp_common.stack import StackException, is_first_instance
from mtp_auth.models import Login
class Command(BaseCommand):
"""
Perform periodic clean-up removing expired DB models.
This is designed to run on only one instance in an auto-scaling group.
"""
help = textwrap.dedent(__doc__).strip()
def handle(self, *args, **options):
verbosity = options['verbosity']
try:
run_cleanup_tasks = is_first_instance()
except StackException:
run_cleanup_tasks = True
if run_cleanup_tasks:
if verbosity:
self.stdout.write('Performing clean-up tasks')
call_command('clearsessions', verbosity=verbosity)
call_command('clear_oauth2_tokens', verbosity=verbosity)
call_command('clear_password_change_requests', verbosity=verbosity)
call_command('clear_abandoned_payments', age=7, verbosity=verbosity)
Login.objects.filter(created__lt=now() - datetime.timedelta(days=365)).delete()
elif verbosity:
self.stdout.write('Clean-up tasks do not run on secondary instances')
|
#!/bin/sh
remove_if_directory_exists() {
if [ -d "$1" ]; then rm -Rf "$1"; fi
}
case "$1" in
"unstable")
BRANCH="unstable";;
*)
BRANCH="master";;
esac
REPOSITORY='https://github.com/tradingview/charting_library/'
LATEST_HASH=$(git ls-remote $REPOSITORY $BRANCH | grep -Eo '^[[:alnum:]]+')
remove_if_directory_exists "$LATEST_HASH"
git clone -q --depth 1 -b "$BRANCH" $REPOSITORY "$LATEST_HASH"
remove_if_directory_exists "public/charting_library"
remove_if_directory_exists "public/datafeeds"
remove_if_directory_exists "src/charting_library"
cp -r "$LATEST_HASH/charting_library" public
cp -r "$LATEST_HASH/datafeeds" public
cp -r "$LATEST_HASH/charting_library" src
remove_if_directory_exists "$LATEST_HASH"
|
declare -a RNG=(12345 12355 12365 12375)
#declare -a RNG=(12345)
if [ "X$1" == "X" ]; then
pattern="test_"
else
pattern=$1
fi
ls | grep $pattern | sort -k2 -t_ -n | while read i; do
cd $i
resolution="`cat RES`"
for idx in "${!RNG[@]}"; do
echo "Submit $i $resolution $idx ${RNG[$idx]}"
qsub -v "OUTPUT_PREFIX=sim${idx}","SEED=${RNG[$idx]}" -N "dd1_res_${resolution}_$idx" run_job.sh
done
cd $OLDPWD
done
|
package com.me.DataStructure.BloomFilter;
import java.util.Arrays;
import java.util.BitSet;
/**
* @author zs
* @date 2021/9/5.
布隆过滤器Demo
*/
public class MyBloomFilter {
//你的布隆过滤器容量
private static final int DEFAULT_SIZE = 2 << 28;
//bit数组,用来存放key
private static BitSet bitSet = new BitSet(DEFAULT_SIZE);
//后面hash函数会用到,用来生成不同的hash值,可随意设置,别问我为什么这么多8,图个吉利
private static final int[] ints = {1, 6, 16, 38, 58, 68};
//add方法,计算出key的hash值,并将对应下标置为true
public void add(Object key) {
Arrays.stream(ints).forEach(i -> bitSet.set(hash(key, i)));
}
//判断key是否存在,true不一定说明key存在,但是false一定说明不存在
public boolean isContain(Object key) {
boolean result = true;
for (int i : ints) {
//短路与,只要有一个bit位为false,则返回false
result = result && bitSet.get(hash(key, i));
}
return result;
}
//hash函数,借鉴了hashmap的扰动算法,强烈建议大家把这个hash算法看懂,这个设计真的牛皮加闪电
private int hash(Object key, int i) {
int h;
return key == null ? 0 : (i * (DEFAULT_SIZE - 1) & ((h = key.hashCode()) ^ (h >>> 16)));
}
}
|
import {FC} from 'react'
import * as React from 'react'
import styled from 'styled-components'
import {useStore} from "reto";
import {SlideStore} from "@/stores/slide.store";
import {SlidePreview} from '@/components/slide/slide-preview'
import {EditPageStore} from '@/stores/edit-page.store'
import {Box} from '@/components/box'
import {customScrollbar} from '@/utils/custom-scrollbar'
const Container = styled.div`
height: 100%;
display: flex;
flex-direction: column;
`
const PreviewList = styled.div`
display: flex;
justify-content: flex-start;
align-items: center;
overflow-x: scroll;
overflow-y: hidden;
height: 100%;
padding-right: 30px;
${customScrollbar};
> * {
flex: none;
}
`
const PreviewSpace = styled.div`
width: 30px;
height: 10px;
`
export const Previewer: FC = () => {
const {slideTexts} = useStore(SlideStore)
const editorStore = useStore(EditPageStore)
return (
<Container>
<Box role='row' style={{display: 'flex', justifyContent: 'center', alignItems: 'center'}}>
{slideTexts[editorStore.selectedPreview] && (
<SlidePreview
markdown={slideTexts[editorStore.selectedPreview]}
scale={0.6}
pageIndex={editorStore.selectedPreview}
/>
)}
</Box>
<Box role='row' style={{height: '200px', flex: 'none'}}>
<PreviewList>
<PreviewSpace/>
{slideTexts.map((text, index) => (
<React.Fragment key={index}>
<SlidePreview
markdown={text}
refIndex={index}
pageIndex={index}
scale={0.2}
selected={editorStore.selectedPreview === index}
onClick={() => {
editorStore.setSelectedPreview(index)
}}
/>
<PreviewSpace/>
</React.Fragment>
))}
</PreviewList>
</Box>
</Container>
)
}
|
<reponame>shaunakpp/voices-of-consent
class RenameLocationTypeToLocationType < ActiveRecord::Migration[5.2]
def change
rename_column :locations, :type, :location_type
end
end
|
#!/bin/bash
source .project/yaml.sh
create_variables ./config.yml
ORG_NAME=$project_org
if [ -z "${ORG_NAME##*/*}" ] ;then
export REL_PATH_TO_GOPATH=../../../..
else
#echo "'$ORG_NAME' does not contain: '/'."
export REL_PATH_TO_GOPATH=../../..
fi
echo $REL_PATH_TO_GOPATH
|
import React, { useState } from 'react';
const ReverseApp = () => {
const [text, setText] = useState('');
const [reversedText, setReversedText] = useState('');
const handleTextChange = (event) => {
setText(event.target.value);
};
const handleSubmit = (event) => {
event.preventDefault();
setReversedText(reverseText(text));
};
const reverseText = (text) => {
let reversedText = '';
for (let i = 0; i < text.length; i++) {
reversedText = text[i] + reversedText;
}
return reversedText;
};
return (
<div>
<form onSubmit={handleSubmit}>
<label>
Text:
<input type="text" onChange={handleTextChange} />
</label>
<input type="submit" value="Submit" />
</form>
<p>{reversedText}</p>
</div>
);
};
export default ReverseApp;
|
#!/usr/bin/env bash
DIR=$(dirname "${BASH_SOURCE[0]}")
. "$DIR"/common.sh
${DUB} remove fs-json-dubpackage --non-interactive 2>/dev/null || true
${DUB} remove fs-sdl-dubpackage --non-interactive 2>/dev/null || true
echo "Trying to get fs-sdl-dubpackage (1.0.5)"
${DUB} fetch fs-sdl-dubpackage --version=1.0.5 --skip-registry=all --registry=file://"$DIR"/issue1401-file-system-pkg-supplier
if ! ${DUB} remove fs-sdl-dubpackage@1.0.5 2>/dev/null; then
die $LINENO 'DUB did not install package from file system.'
fi
echo "Trying to get fs-sdl-dubpackage (latest)"
${DUB} fetch fs-sdl-dubpackage --skip-registry=all --registry=file://"$DIR"/issue1401-file-system-pkg-supplier
if ! ${DUB} remove fs-sdl-dubpackage@1.0.6 2>/dev/null; then
die $LINENO 'DUB did not install latest package from file system.'
fi
echo "Trying to get fs-json-dubpackage (1.0.7)"
${DUB} fetch fs-json-dubpackage@1.0.7 --skip-registry=all --registry=file://"$DIR"/issue1401-file-system-pkg-supplier
if ! ${DUB} remove fs-json-dubpackage@1.0.7 2>/dev/null; then
die $LINENO 'DUB did not install package from file system.'
fi
|
<reponame>huxi/ratpack
/*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ratpack.path.internal;
import com.google.common.collect.ImmutableMap;
import ratpack.path.PathBinding;
import ratpack.path.PathTokens;
public class DefaultPathBinding implements PathBinding {
private final String binding;
private final String pastBinding;
private final PathTokens tokens;
private final PathTokens allTokens;
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DefaultPathBinding that = (DefaultPathBinding) o;
return binding.equals(that.binding) && pastBinding.equals(that.pastBinding) && allTokens.equals(that.allTokens);
}
@Override
public int hashCode() {
int result = binding.hashCode();
result = 31 * result + pastBinding.hashCode();
result = 31 * result + allTokens.hashCode();
return result;
}
public DefaultPathBinding(String binding, ImmutableMap<String, String> tokens, PathBinding parent) {
this.binding = binding;
this.tokens = DefaultPathTokens.of(tokens);
this.allTokens = parent.getAllTokens().isEmpty() ? this.tokens : DefaultPathTokens.of(ImmutableMap.<String, String>builder().putAll(parent.getAllTokens()).putAll(tokens).build());
String bindingWithSlash = binding.concat("/");
String path = parent.getPastBinding();
if (path.equals(binding)) {
pastBinding = "";
} else if (path.startsWith(bindingWithSlash)) {
pastBinding = path.substring(bindingWithSlash.length());
} else {
throw new IllegalArgumentException(String.format("Path '%s' is not a child of '%s'", path, binding));
}
}
public String getPastBinding() {
return pastBinding;
}
public String getBoundTo() {
return binding;
}
public PathTokens getTokens() {
return tokens;
}
public PathTokens getAllTokens() {
return allTokens;
}
}
|
package main
import (
"bufio"
"errors"
"fmt"
"log"
"os"
"os/user"
"runtime"
"strings"
"github.com/djhworld/gomeboycolor-glfw/saves"
"github.com/djhworld/gomeboycolor/cartridge"
"github.com/djhworld/gomeboycolor/config"
"github.com/djhworld/gomeboycolor/gbc"
"gopkg.in/urfave/cli.v1"
)
const TITLE string = "gomeboycolor"
var VERSION string = "0.0.1"
func main() {
app := cli.NewApp()
app.Name = "gomeboycolor"
app.Usage = "Gameboy Color emulator"
app.ArgsUsage = "<path-to-ROM>"
app.Version = VERSION
app.UsageText = "gomeboycolor [flags] <path-to-ROM-file>"
app.Action = run
app.Flags = []cli.Flag{
cli.StringFlag{
Name: "title",
Value: TITLE,
Usage: "Title to use",
},
cli.BoolFlag{
Name: "showfps",
Usage: "Calculate and display frames per second",
},
cli.BoolFlag{
Name: "skipboot",
Usage: "Skip boot sequence",
},
cli.BoolFlag{
Name: "no-color",
Usage: "Disable Gameboy Color Hardware",
},
cli.BoolFlag{
Name: "headless",
Usage: "Run emulator without output",
},
cli.Int64Flag{
Name: "fpslock",
Value: 59,
Usage: "Lock framerate to this. Going higher than default might be unstable!",
},
cli.IntFlag{
Name: "size",
Value: 1,
Usage: "Screen size multiplier",
},
cli.BoolFlag{
Name: "debug",
Usage: "Enable debugger",
},
cli.BoolFlag{
Name: "dump",
Usage: "Print state of machine after each cycle (WARNING - WILL RUN SLOW)",
},
cli.StringFlag{
Name: "b",
Value: "0x0000",
Usage: "Break into debugger when PC equals a given value between 0x0000 and 0xFFFF",
},
}
err := app.Run(os.Args)
if err != nil {
log.Fatal(err)
}
}
func run(c *cli.Context) error {
runtime.GOMAXPROCS(runtime.NumCPU())
fmt.Printf("%s. %s\n", TITLE, VERSION)
fmt.Println("Copyright (c) 2018. <NAME>.")
fmt.Println("http://djhworld.github.io/gomeboycolor")
fmt.Println(strings.Repeat("*", 120))
if c.NArg() != 1 {
return errors.New("Please specify the location of a ROM to boot")
}
var colorMode bool = true
if c.Bool("no-color") {
colorMode = false
}
//Parse and validate settings file (if found)
conf := &config.Config{
Title: TITLE,
ScreenSize: c.Int("size"),
SkipBoot: c.Bool("skipboot"),
DisplayFPS: c.Bool("showfps"),
ColorMode: colorMode,
Debug: c.Bool("debug"),
BreakOn: c.String("b"),
DumpState: c.Bool("dump"),
Headless: c.Bool("headless"),
FrameRateLock: c.Int64("fpslock"),
}
fmt.Println(conf)
cart, err := createCartridge(c.Args().Get(0))
if err != nil {
return err
}
log.Println("Starting emulator")
emulator, err := gbc.Init(cart, getSaveStore(), conf, NewGlfwIO(conf.FrameRateLock, conf.Headless, conf.DisplayFPS))
if err != nil {
return err
}
dumbFrameRunnerWrapper := func(doFrame func()) {
doFrame()
}
//Starts emulator code in a goroutine
go emulator.Run(dumbFrameRunnerWrapper)
//lock the OS thread here
runtime.LockOSThread()
//set the IO controller to run indefinitely (it waits for screen updates)
emulator.RunIO()
log.Println("Goodbye!")
return nil
}
func getSaveStore() *saves.FileSystemStore {
user, _ := user.Current()
saveDir := user.HomeDir + "/.gomeboycolor/saves"
os.MkdirAll(saveDir, os.ModeDir)
return saves.NewFileSystemStore(saveDir)
}
func createCartridge(romFilename string) (*cartridge.Cartridge, error) {
romContents, err := retrieveROM(romFilename)
if err != nil {
return nil, err
}
return cartridge.NewCartridge(romFilename, romContents)
}
func retrieveROM(filename string) ([]byte, error) {
file, err := os.Open(filename)
if err != nil {
return nil, err
}
defer file.Close()
stats, statsErr := file.Stat()
if statsErr != nil {
return nil, statsErr
}
var size int64 = stats.Size()
bytes := make([]byte, size)
bufr := bufio.NewReader(file)
_, err = bufr.Read(bytes)
return bytes, err
}
|
<reponame>dinhtuyen/PRML01
import numpy as np
class Tensor(object):
"""
a base class for tensor object
"""
__array_ufunc__ = None
def __init__(self, value, function=None):
"""
construct Tensor object
Parameters
----------
value : array_like
value of this tensor
function : Function
function output this tensor
"""
if not isinstance(value, (int, float, np.number, np.ndarray)):
raise TypeError(
"Unsupported class for Tensor: {}".format(type(value))
)
self.value = value
self.function = function
def __format__(self, *args, **kwargs):
return self.__repr__()
def __repr__(self):
if isinstance(self.value, np.ndarray):
return (
"{0}(shape={1.shape}, dtype={1.dtype})"
.format(self.__class__.__name__, self.value)
)
else:
return (
"{0}(value={1})".format(self.__class__.__name__, self.value)
)
@property
def ndim(self):
if hasattr(self.value, "ndim"):
return self.value.ndim
else:
return 0
@property
def shape(self):
if hasattr(self.value, "shape"):
return self.value.shape
else:
return ()
@property
def size(self):
if hasattr(self.value, "size"):
return self.value.size
else:
return 1
def backward(self, delta=1, **kwargs):
"""
back-propagate error
Parameters
----------
delta : array_like
derivative with respect to this array
"""
if isinstance(delta, np.ndarray):
if delta.shape != self.shape:
raise ValueError(
"shapes {} and {} not aligned"
.format(delta.shape, self.shape)
)
elif isinstance(delta, (int, float, np.number)):
if self.shape != ():
raise ValueError(
"delta must be np.ndarray"
)
else:
raise TypeError(
"unsupported class for delta"
)
self._backward(delta, **kwargs)
def _backward(self, delta, **kwargs):
if hasattr(self.function, "backward"):
self.function.backward(delta, **kwargs)
|
<filename>google/cloud/websecurityscanner/v1alpha/google-cloud-websecurityscanner-v1alpha-ruby/lib/google/cloud/websecurityscanner/v1alpha/web_security_scanner_pb.rb
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/websecurityscanner/v1alpha/web_security_scanner.proto
require 'google/api/annotations_pb'
require 'google/api/client_pb'
require 'google/api/field_behavior_pb'
require 'google/api/resource_pb'
require 'google/cloud/websecurityscanner/v1alpha/crawled_url_pb'
require 'google/cloud/websecurityscanner/v1alpha/finding_pb'
require 'google/cloud/websecurityscanner/v1alpha/finding_type_stats_pb'
require 'google/cloud/websecurityscanner/v1alpha/scan_config_pb'
require 'google/cloud/websecurityscanner/v1alpha/scan_run_pb'
require 'google/protobuf/empty_pb'
require 'google/protobuf/field_mask_pb'
require 'google/protobuf'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/cloud/websecurityscanner/v1alpha/web_security_scanner.proto", :syntax => :proto3) do
add_message "google.cloud.websecurityscanner.v1alpha.CreateScanConfigRequest" do
optional :parent, :string, 1
optional :scan_config, :message, 2, "google.cloud.websecurityscanner.v1alpha.ScanConfig"
end
add_message "google.cloud.websecurityscanner.v1alpha.DeleteScanConfigRequest" do
optional :name, :string, 1
end
add_message "google.cloud.websecurityscanner.v1alpha.GetScanConfigRequest" do
optional :name, :string, 1
end
add_message "google.cloud.websecurityscanner.v1alpha.ListScanConfigsRequest" do
optional :parent, :string, 1
optional :page_token, :string, 2
optional :page_size, :int32, 3
end
add_message "google.cloud.websecurityscanner.v1alpha.UpdateScanConfigRequest" do
optional :scan_config, :message, 2, "google.cloud.websecurityscanner.v1alpha.ScanConfig"
optional :update_mask, :message, 3, "google.protobuf.FieldMask"
end
add_message "google.cloud.websecurityscanner.v1alpha.ListScanConfigsResponse" do
repeated :scan_configs, :message, 1, "google.cloud.websecurityscanner.v1alpha.ScanConfig"
optional :next_page_token, :string, 2
end
add_message "google.cloud.websecurityscanner.v1alpha.StartScanRunRequest" do
optional :name, :string, 1
end
add_message "google.cloud.websecurityscanner.v1alpha.GetScanRunRequest" do
optional :name, :string, 1
end
add_message "google.cloud.websecurityscanner.v1alpha.ListScanRunsRequest" do
optional :parent, :string, 1
optional :page_token, :string, 2
optional :page_size, :int32, 3
end
add_message "google.cloud.websecurityscanner.v1alpha.ListScanRunsResponse" do
repeated :scan_runs, :message, 1, "google.cloud.websecurityscanner.v1alpha.ScanRun"
optional :next_page_token, :string, 2
end
add_message "google.cloud.websecurityscanner.v1alpha.StopScanRunRequest" do
optional :name, :string, 1
end
add_message "google.cloud.websecurityscanner.v1alpha.ListCrawledUrlsRequest" do
optional :parent, :string, 1
optional :page_token, :string, 2
optional :page_size, :int32, 3
end
add_message "google.cloud.websecurityscanner.v1alpha.ListCrawledUrlsResponse" do
repeated :crawled_urls, :message, 1, "google.cloud.websecurityscanner.v1alpha.CrawledUrl"
optional :next_page_token, :string, 2
end
add_message "google.cloud.websecurityscanner.v1alpha.GetFindingRequest" do
optional :name, :string, 1
end
add_message "google.cloud.websecurityscanner.v1alpha.ListFindingsRequest" do
optional :parent, :string, 1
optional :filter, :string, 2
optional :page_token, :string, 3
optional :page_size, :int32, 4
end
add_message "google.cloud.websecurityscanner.v1alpha.ListFindingsResponse" do
repeated :findings, :message, 1, "google.cloud.websecurityscanner.v1alpha.Finding"
optional :next_page_token, :string, 2
end
add_message "google.cloud.websecurityscanner.v1alpha.ListFindingTypeStatsRequest" do
optional :parent, :string, 1
end
add_message "google.cloud.websecurityscanner.v1alpha.ListFindingTypeStatsResponse" do
repeated :finding_type_stats, :message, 1, "google.cloud.websecurityscanner.v1alpha.FindingTypeStats"
end
end
end
module Google
module Cloud
module Websecurityscanner
module V1alpha
CreateScanConfigRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.websecurityscanner.v1alpha.CreateScanConfigRequest").msgclass
DeleteScanConfigRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.websecurityscanner.v1alpha.DeleteScanConfigRequest").msgclass
GetScanConfigRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.websecurityscanner.v1alpha.GetScanConfigRequest").msgclass
ListScanConfigsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.websecurityscanner.v1alpha.ListScanConfigsRequest").msgclass
UpdateScanConfigRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.websecurityscanner.v1alpha.UpdateScanConfigRequest").msgclass
ListScanConfigsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.websecurityscanner.v1alpha.ListScanConfigsResponse").msgclass
StartScanRunRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.websecurityscanner.v1alpha.StartScanRunRequest").msgclass
GetScanRunRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.websecurityscanner.v1alpha.GetScanRunRequest").msgclass
ListScanRunsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.websecurityscanner.v1alpha.ListScanRunsRequest").msgclass
ListScanRunsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.websecurityscanner.v1alpha.ListScanRunsResponse").msgclass
StopScanRunRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.websecurityscanner.v1alpha.StopScanRunRequest").msgclass
ListCrawledUrlsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.websecurityscanner.v1alpha.ListCrawledUrlsRequest").msgclass
ListCrawledUrlsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.websecurityscanner.v1alpha.ListCrawledUrlsResponse").msgclass
GetFindingRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.websecurityscanner.v1alpha.GetFindingRequest").msgclass
ListFindingsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.websecurityscanner.v1alpha.ListFindingsRequest").msgclass
ListFindingsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.websecurityscanner.v1alpha.ListFindingsResponse").msgclass
ListFindingTypeStatsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.websecurityscanner.v1alpha.ListFindingTypeStatsRequest").msgclass
ListFindingTypeStatsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.websecurityscanner.v1alpha.ListFindingTypeStatsResponse").msgclass
end
end
end
end
|
<reponame>chrishumboldt/rocket-utility
/**
* @author <NAME>
*/
import { RocketString } from './string.utility';
describe('Rocket String Utility:', () => {
// Tests.
it('Should lowercase an entire string.', () => {
expect(RocketString.lowercase.all('Some text.')).toEqual('some text.');
});
it('Should lowercase the first character of a string.', () => {
expect(RocketString.lowercase.first('Some text.')).toEqual('some text.');
});
it('Should lowercase the last character of a string.', () => {
expect(RocketString.lowercase.last('some texT')).toEqual('some text');
});
it('Should remove the first character of a string.', () => {
expect(RocketString.remove.first('Some text.')).toEqual('ome text.');
});
it('Should remove the first and last character of a string.', () => {
expect(RocketString.remove.firstAndLast('Some text.')).toEqual('ome text');
});
it('Should remove the last character of a string.', () => {
expect(RocketString.remove.last('Some text.')).toEqual('Some text');
});
it('Should remove the spaces in a string.', () => {
expect(RocketString.remove.spaces('Some cool text.')).toEqual(
'Somecooltext.'
);
});
it('Should uppercase an entire string.', () => {
expect(RocketString.uppercase.all('Some text.')).toEqual('SOME TEXT.');
});
it('Should uppercase the first character of a string.', () => {
expect(RocketString.uppercase.first('some text.')).toEqual('Some text.');
});
it('Should uppercase the last character of a string.', () => {
expect(RocketString.uppercase.last('some text')).toEqual('some texT');
});
});
|
#!/bin/sh
[ $PKG = rpm ] && { whiptail --msgbox "Your $PKG isn't supported yet" 8 32; exit; }
[ "$1" = update ] && [ $PKG = deb ] && { apt-get update; $install mopidy; pip install --upgrade -y Mopidy-Mopify; whiptail --msgbox "Mopidy updated!" 8 32; break; }
[ "$1" = remove ] && { $remove mopidy; pip uninstall -y Mopidy-Mopify; whiptail --msgbox "Mopidy removed." 8 32; break; }
# Defining the ports
MPDport=$(whiptail --title "MPD server port" --inputbox "Set a port number for the MPD server" 8 48 "6600" 3>&1 1>&2 2>&3)
port=$(whiptail --title "Mopify web client port" --inputbox "Set a port number for the Mopify web client" 8 48 "6680" 3>&1 1>&2 2>&3)
if [ $PKG = deb ] ;then
# Add the archive’s GPG key
wget -q -O - https://apt.mopidy.com/mopidy.gpg | sudo apt-key add -
case "$DIST$DIST_VER" in
# For Debian wheezy or Ubuntu 12.04 LTS
debian7*|*ubuntu*12.04*) wget -q -O /etc/apt/sources.list.d/mopidy.list https://apt.mopidy.com/wheezy.list;;
# For any newer Debian/Ubuntu distro
*) wget -q -O /etc/apt/sources.list.d/mopidy.list https://apt.mopidy.com/jessie.list;;
esac
# Install Mopidy and all dependencies
apt-get update
fi
[ $PKG = rpm ] && whiptail --yesno "Your have $DIST. $PKG based OS aren't supported yet." 8 32
[ $PKG != rpm ] || break
$install mopidy
# Install Mopify, a web client for Mopidy
pip install Mopidy-Mopify
[ $IP = $LOCALIP ] && access=$IP || access=::
cat > /etc/mopidy/mopidy.conf <<EOF
[http]
hostname = $access
port = $port
[mpd]
hostname = $access
port = $MPDport
max_connections = 40
connection_timeout = 120
EOF
# Start the service and enable it to start up at boot
systemctl enable mopidy
systemctl restart mopidy
whiptail --msgbox "Modipy installed!
The MPD server port is $MPDport
Open http://$URL:$port in your browser" 12 64
|
activity_main.xml
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical">
<ProgressBar
android:id="@+id/progress_bar"
android:layout_width="match_parent"
android:layout_height="match_parent" />
<LinearLayout
android:id="@+id/data_layout"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="vertical"
android:visibility="gone">
<TextView
android:id="@+id/data_text_view"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
</LinearLayout>
</LinearLayout>
MainActivity.java
public class MainActivity extends AppCompatActivity {
private ProgressBar progressBar;
private LinearLayout dataLayout;
private TextView dataTextView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
progressBar = findViewById(R.id.progress_bar);
dataLayout = findViewById(R.id.data_layout);
dataTextView = findViewById(R.id.data_text_view);
showLoading();
fetchData();
}
private void showLoading() {
progressBar.setVisibility(View.VISIBLE);
dataLayout.setVisibility(View.GONE);
}
private void fetchData() {
// Fetch data from API and update UI
...
hideLoading();
}
private void hideLoading() {
progressBar.setVisibility(View.GONE);
dataLayout.setVisibility(View.VISIBLE);
dataTextView.setText(data);
}
}
|
RSpec.describe Ducksky do
it "has a version number" do
expect(Ducksky::VERSION).not_to be nil
end
end
RSpec.describe Ducksky::Client do
describe "#time_machine_request" do
let(:api_key) { '1234qwer' }
subject { described_class.new(api_key: api_key) }
before do
VCR.insert_cassette("time_machine_request/success")
end
after { VCR.eject_cassette }
context 'without required params' do
it 'raises error'
end
context 'with required params' do
it 'sends a GET request to the darksky api' do
expect(described_class)
.to receive(:get)
.with("/forecast/#{api_key}/5.498384,118.280899,2020-04-07T00:00:00+00:00")
.and_call_original
subject.time_machine_request(
lat: 5.498384,
lon: 118.280899,
time: DateTime.new(2020, 04, 07)
)
end
it 'converts response to a Ducksky::Response class' do
response = subject.time_machine_request(
lat: 5.498384,
lon: 118.280899,
time: DateTime.new(2020, 04, 07)
)
expect(response).to be_instance_of(Ducksky::Response)
expect(response.latitude).to eq(5.498384)
end
it 'converts weather information to DataPoint instances' do
response = subject.time_machine_request(
lat: 5.498384,
lon: 118.280899,
time: DateTime.new(2020, 04, 07)
)
expect(response.currently).to be_instance_of(Ducksky::DataPoint)
expect(response.currently.summary).to eq('Humid and Mostly Cloudy')
end
end
end
end
|
/**
* Created by lvjianyao on 16/6/27.
*/
var config = require('./../config');
var mongojs = require('mongojs');
var db = mongojs(config.mongo.databaseUrl);
var getCollection = function (channelUuid){
return db.collection(channelUuid);
}
module.exports = {
logChannels: getCollection('logChannels')
};
db.on('error', function (err) {
if (/ECONNREFUSED/.test(err.message) ||
/no primary server available/.test(err.message)) {
console.error('FATAL: database error', err);
process.exit(1);
}
})
|
def classify_language(text):
# Create a dictionary of languages
languages = {
'English': set(['hello', 'good', 'great']),
'French': set(['bonjour', 'comment', 'ça']),
'Spanish': set(['hola', 'cómo', 'estás'])
}
# Split the text into words
words = text.split(' ')
# Initialize language
language = None
# Iterate through the dictionaty
for key, value in languages.items():
# If the word is present in the languages
if set(words).issubset(value):
language = key
break
return language
# Test text
text = "Bonjour, comment ça va ?"
# Call the function
lang = classify_language(text)
# Print the language
print("The language of the text is {}".format(lang))
|
<filename>ods-main/src/main/java/cn/stylefeng/guns/onlineaccess/modular/mapper/ApplicationFileMapper.java
package cn.stylefeng.guns.onlineaccess.modular.mapper;
import cn.stylefeng.guns.onlineaccess.modular.entity.ApplicationFile;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
@Mapper
public interface ApplicationFileMapper extends BaseMapper<ApplicationFile> {
}
|
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
set -e
yum groupinstall -y "Development Tools"
yum install -y cmake python27 python27-setuptools
git clone https://github.com/opencv/opencv
cd opencv
mkdir -p build
cd build
cmake -D BUILD_opencv_gpu=OFF -D WITH_EIGEN=ON -D WITH_TBB=ON -D WITH_CUDA=OFF -D WITH_1394=OFF -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=/usr/local ..
make PREFIX=/usr/local install
|
#!/bin/bash
wget -N -i - <<EOF
https://downloads.arduino.cc/arduino-1.8.13-linux64.tar.xz
https://downloads.arduino.cc/arduino-1.8.13-linuxarm.tar.xz
https://downloads.arduino.cc/arduino-1.8.13-linuxaarch64.tar.xz
https://dl.google.com/go/go1.15.5.linux-amd64.tar.gz
https://dl.google.com/go/go1.15.5.linux-armv6l.tar.gz
https://dl.google.com/go/go1.15.5.linux-arm64.tar.gz
https://nodejs.org/dist/v14.15.1/node-v14.15.1-linux-x64.tar.xz
https://nodejs.org/dist/v14.15.1/node-v14.15.1-linux-armv7l.tar.xz
https://nodejs.org/dist/v14.15.1/node-v14.15.1-linux-arm64.tar.xz
EOF
sha256sum *z >checksums.txt
|
CREATE PROCEDURE update_book(IN bookID INT, IN bookTitle VARCHAR(255))
BEGIN
UPDATE book
SET title = bookTitle
WHERE bookID = bookID;
END;
|
<filename>src/main/java/de/semsoft/xfactory/services/file/TransformFile.java
package de.semsoft.xfactory.services.file;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Date;
import java.util.stream.StreamSupport;
import org.apache.commons.io.FileUtils;
import de.semsoft.xfactory.util.FileSemaphore;
public class TransformFile extends FileSemaphore {
private String area;
private String slot;
private String fileName;
private String fullFileName;
private long age;
public TransformFile() {
}
public TransformFile(String fileName) {
this.file = new File(fileName);
setAttributes();
}
public TransformFile(File file) {
this.file = file;
setAttributes();
}
private void setAttributes() {
if (this.file != null) {
this.fileName = this.file.getName();
this.fullFileName = this.file.getAbsolutePath();
final String[] dirs = splitPath(this.file.getAbsolutePath());
if (dirs.length >= 2) {
this.slot = dirs[dirs.length - 2];
this.area = dirs[dirs.length - 3];
}
this.age = new Date().getTime() - this.file.lastModified();
}
}
private String[] splitPath(String pathString) {
final Path path = Paths.get(pathString);
return StreamSupport.stream(path.spliterator(), false).map(Path::toString).toArray(String[]::new);
}
public Path getPath() {
return Paths.get(this.fullFileName);
}
public void delete() {
new File(this.getFullFileName()).delete();
}
public TransformFile getOriginalFile() {
for (final String status : TransformFile.STATUSLIST) {
if (this.fullFileName.endsWith(status)) {
final String orgFileName = this.fullFileName.substring(0,
this.fullFileName.length() - status.length() - 1);
return new TransformFile(orgFileName);
}
}
return null;
}
public String getContent() throws IOException {
return FileUtils.readFileToString(this.file, StandardCharsets.UTF_8);
}
public String getArea() {
return area;
}
public void setArea(String area) {
this.area = area;
}
public String getSlot() {
return slot;
}
public void setSlot(String slot) {
this.slot = slot;
}
public String getFileName() {
return fileName;
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
public String getFullFileName() {
return fullFileName;
}
public void setFullFileName(String fullFileName) {
this.fullFileName = fullFileName;
}
public File getFile() {
return file;
}
public void setFile(File file) {
this.file = file;
}
public long getAgeSecond() {
return age;
}
public long getAgeMilliSecond() {
return age / 1000;
}
}
|
#include <limits.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#define DEFAULT_OUTPUT_FILE_PREFIX "ef_out"
#define DEFAULT_OUTPUT_FILE_NAME_SQL_SUFFIX "sql"
#define DEFAULT_OUTPUT_FILE_NAME_LOAD_SUFFIX "load"
#define DEFAULT_ALLELE ""
#define DEFAULT_LENGTH ""
#define DEFAULT_PROTEIN_ID ""
#define DEFAULT_PROTEIN_REFERENCE ""
#define DEFAULT_PROTEIN_FILE_NAME ""
#define DEFAULT_INPUT_FILE_NAME "-"
typedef enum boolean_t
{
FALSE = 0,
TRUE = 1
} boolean_t;
#define STRING_MAX_LENGTH (64+1)
typedef char string_t[STRING_MAX_LENGTH];
typedef enum model_t
{
MODEL_NETMHC30C,
MODEL_NETMHC34A,
MODEL_NETMHCII22,
MODEL_NETMHCPAN20C,
MODEL_NETMHCPAN24A,
MODEL_NETMHCPAN28A,
MODEL_NETMHCIIPAN10B,
MODEL_NETMHCIIPAN20B,
MODEL_NETMHCIIPAN30C,
MODEL_NETMHCIIPAN31A,
MODEL_UNKNOWN
} model_t;
typedef struct profile_t
{
boolean_t numeric;
FILE* input;
FILE* output_sql;
FILE* output_load;
char* allele;
char* length;
char* protein_id;
char* protein_reference;
char* protein_file_name;
} profile_t;
#define MAX_ROW_SIZE 1024
typedef char row_t[MAX_ROW_SIZE+1];
#define ERROR(format, ...) \
do \
{ \
fprintf(stderr, "Error: " format "\n", ##__VA_ARGS__); \
exit(EXIT_FAILURE); \
} \
while(FALSE) \
FILE* open_sql_file(char* output_file_prefix)
{
char name[PATH_MAX];
FILE* file;
snprintf(name, PATH_MAX, "%s.%s", output_file_prefix, DEFAULT_OUTPUT_FILE_NAME_SQL_SUFFIX);
file = fopen(name, "a");
if(file == NULL)
ERROR("Cannot create output file \'%s\'", name);
return file;
}
FILE* open_load_file(char* output_file_prefix)
{
char name[PATH_MAX];
FILE* file;
snprintf(name, PATH_MAX, "%s.%s", output_file_prefix, DEFAULT_OUTPUT_FILE_NAME_LOAD_SUFFIX);
file = fopen(name, "a");
if(file == NULL)
ERROR("Cannot create output file \'%s\'", name);
return file;
}
void dump
(
profile_t* profile,
char* name,
int position,
char* epitope,
int pos,
char* core,
float aff_1,
float aff_2,
float rank,
char* binding
)
{
string_t pos_string;
string_t rank_string;
string_t binding_string_sql;
char* table;
string_t binding_string_load;
if(pos < 0)
snprintf(pos_string, STRING_MAX_LENGTH, "NULL");
else
snprintf(pos_string, STRING_MAX_LENGTH, "%d", pos);
if(rank < 0.0)
snprintf(rank_string, STRING_MAX_LENGTH, "NULL");
else
snprintf(rank_string, STRING_MAX_LENGTH, "%0.2f", rank);
if(profile->output_sql != NULL)
{
if(binding == NULL)
snprintf(binding_string_sql, STRING_MAX_LENGTH, "NULL");
else
snprintf(binding_string_sql, STRING_MAX_LENGTH, "\'%s\'", binding);
if(profile->numeric == TRUE)
table = "epitope_numeric";
else
table = "epitope";
fprintf
(
profile->output_sql,
"INSERT INTO %s(protein_id, protein_reference, protein_file_name, position, epitope, pos, core, aff_log, aff, rank, binding, type, allele, length) VALUES(\'%s\', \'%s\', \'%s\', \'%d\', \'%s\', %s, \'%s\', \'%0.3f\', \'%0.2f\', %s, %s, \'%s\', \'%s\', \'%s\');\n",
table,
profile->protein_id,
profile->protein_reference,
profile->protein_file_name,
position,
epitope,
pos_string,
core,
aff_1,
aff_2,
rank_string,
binding_string_sql,
name,
profile->allele,
profile->length
);
fflush(profile->output_sql);
}
if(binding == NULL)
snprintf(binding_string_load, STRING_MAX_LENGTH, "NULL");
else
snprintf(binding_string_load, STRING_MAX_LENGTH, "%s", binding);
fprintf
(
profile->output_load,
"%s\t%s\t%s\t%d\t%s\t%s\t%s\t%0.3f\t%0.2f\t%s\t%s\t%s\t%s\t%s\n",
profile->protein_id,
profile->protein_reference,
profile->protein_file_name,
position,
epitope,
pos_string,
core,
aff_1,
aff_2,
rank_string,
binding_string_load,
name,
profile->allele,
profile->length
);
fflush(profile->output_load);
}
void work_netmhc30c(profile_t* profile)
{
char* name;
row_t row;
int row_number;
string_t epitope;
float aff_1;
float aff_2;
string_t bind_place;
string_t bind;
string_t identify;
string_t hla;
name = "netmhc-3.0c";
while(fgets(row, MAX_ROW_SIZE, profile->input) != NULL)
if
(
sscanf
(
row,
" %d %64[^ \t] %f %f%11[ a-zA-Z] %64[^ \t] %64s",
&row_number,
epitope,
&aff_1,
&aff_2,
bind_place,
identify,
hla
) == 7
)
{
if(sscanf(bind_place, "%s", bind) != 1)
snprintf(bind, STRING_MAX_LENGTH, "");
if(strcmp(bind, "SB") == 0)
dump
(
profile,
name,
row_number+1,
epitope,
-1,
"",
aff_1,
aff_2,
-1.0,
"SB"
);
else if(strcmp(bind, "WB") == 0)
dump
(
profile,
name,
row_number+1,
epitope,
-1,
"",
aff_1,
aff_2,
-1.0,
"WB"
);
else if(profile->numeric == TRUE)
dump
(
profile,
name,
row_number+1,
epitope,
-1,
"",
aff_1,
aff_2,
-1.0,
NULL
);
}
else if
(
sscanf
(
row,
" %d %64[^ \t] %f%11[ a-zA-Z] %64[^ \t] %64s",
&row_number,
epitope,
&aff_1,
bind_place,
identify,
hla
) == 6
)
{
if(sscanf(bind_place, "%s", bind) != 1)
snprintf(bind, STRING_MAX_LENGTH, "");
if(strcmp(bind, "SB") == 0)
dump
(
profile,
name,
row_number+1,
epitope,
-1,
"",
aff_1,
-1.0,
-1.0,
"SB"
);
else if(strcmp(bind, "WB") == 0)
dump
(
profile,
name,
row_number+1,
epitope,
-1,
"",
aff_1,
-1.0,
-1.0,
"WB"
);
else if(profile->numeric == TRUE)
dump
(
profile,
name,
row_number+1,
epitope,
-1,
"",
aff_1,
-1.0,
-1.0,
NULL
);
}
}
void work_netmhc34a(profile_t* profile)
{
char* name;
row_t row;
int row_number;
string_t epitope;
float aff_1;
float aff_2;
string_t bind_place;
string_t bind;
string_t identify;
string_t hla;
name = "netmhc-3.4a";
while(fgets(row, MAX_ROW_SIZE, profile->input) != NULL)
if
(
sscanf
(
row,
" %d %64[^ \t] %f %f%11[ a-zA-Z] %64[^ \t] %64s",
&row_number,
epitope,
&aff_1,
&aff_2,
bind_place,
identify,
hla
) == 7
)
{
if(sscanf(bind_place, "%s", bind) != 1)
snprintf(bind, STRING_MAX_LENGTH, "");
if(strcmp(bind, "SB") == 0)
dump
(
profile,
name,
row_number+1,
epitope,
-1,
"",
aff_1,
aff_2,
-1.0,
"SB"
);
else if(strcmp(bind, "WB") == 0)
dump
(
profile,
name,
row_number+1,
epitope,
-1,
"",
aff_1,
aff_2,
-1.0,
"WB"
);
else if(profile->numeric == TRUE)
dump
(
profile,
name,
row_number+1,
epitope,
-1,
"",
aff_1,
aff_2,
-1.0,
NULL
);
}
}
void work_netmhcii22(profile_t* profile)
{
char* name;
row_t row;
string_t hla;
int row_number;
string_t epitope;
string_t core;
float aff_1;
float aff_2;
string_t bind_place;
string_t bind;
float rank;
string_t identify;
name = "netmhcii-2.2";
while(fgets(row, MAX_ROW_SIZE, profile->input) != NULL)
if
(
sscanf
(
row,
" %64[^ \t] %d %64[^ \t] %64[^ \t] %f %f%11[ a-zA-Z] %f %64s",
hla,
&row_number,
epitope,
core,
&aff_1,
&aff_2,
bind_place,
&rank,
identify
) == 9
)
{
if(sscanf(bind_place, "%s", bind) != 1)
snprintf(bind, STRING_MAX_LENGTH, "");
if(strcmp(bind, "SB") == 0)
dump
(
profile,
name,
row_number+1,
epitope,
-1,
core,
aff_1,
aff_2,
rank,
"SB"
);
else if(strcmp(bind, "WB") == 0)
dump
(
profile,
name,
row_number+1,
epitope,
-1,
core,
aff_1,
aff_2,
rank,
"WB"
);
else if(profile->numeric == TRUE)
dump
(
profile,
name,
row_number+1,
epitope,
-1,
core,
aff_1,
aff_2,
rank,
NULL
);
}
}
void work_netmhcpan20c(profile_t* profile)
{
char* name;
row_t row;
int row_number;
string_t hla;
string_t epitope;
string_t identify;
float aff_1;
float aff_2;
string_t bind_place;
string_t bind;
name = "netmhcpan-2.0c";
while(fgets(row, MAX_ROW_SIZE, profile->input) != NULL)
if
(
sscanf
(
row,
" %d %64[^ \t] %64[^ \t] %64[^ \t] %f %f%64[ <=a-zA-Z\n]",
&row_number,
hla,
epitope,
identify,
&aff_1,
&aff_2,
bind_place
) == 7
)
{
if(sscanf(bind_place, " <= %s", bind) != 1)
snprintf(bind, STRING_MAX_LENGTH, "");
if(strcmp(bind, "SB") == 0)
dump
(
profile,
name,
row_number+1,
epitope,
-1,
"",
aff_1,
aff_2,
-1.0,
"SB"
);
else if(strcmp(bind, "WB") == 0)
dump
(
profile,
name,
row_number+1,
epitope,
-1,
"",
aff_1,
aff_2,
-1.0,
"WB"
);
else if(profile->numeric == TRUE)
dump
(
profile,
name,
row_number+1,
epitope,
-1,
"",
aff_1,
aff_2,
-1.0,
NULL
);
}
}
void work_netmhcpan24a(profile_t* profile)
{
char* name;
row_t row;
int row_number;
string_t hla;
string_t epitope;
string_t identify;
float aff_1;
float aff_2;
float rank;
string_t bind_place;
string_t bind;
name = "netmhcpan-2.4a";
while(fgets(row, MAX_ROW_SIZE, profile->input) != NULL)
if
(
sscanf
(
row,
" %d %64[^ \t] %64[^ \t] %64[^ \t] %f %f %f%64[ <=a-zA-Z\n]",
&row_number,
hla,
epitope,
identify,
&aff_1,
&aff_2,
&rank,
bind_place
) == 8
)
{
if(sscanf(bind_place, " <= %s", bind) != 1)
snprintf(bind, STRING_MAX_LENGTH, "");
if(strcmp(bind, "SB") == 0)
dump
(
profile,
name,
row_number+1,
epitope,
-1,
"",
aff_1,
aff_2,
rank,
"SB"
);
else if(strcmp(bind, "WB") == 0)
dump
(
profile,
name,
row_number+1,
epitope,
-1,
"",
aff_1,
aff_2,
rank,
"WB"
);
else if(profile->numeric == TRUE)
dump
(
profile,
name,
row_number+1,
epitope,
-1,
"",
aff_1,
aff_2,
rank,
NULL
);
}
}
void work_netmhcpan28a(profile_t* profile)
{
char* name;
row_t row;
int row_number;
string_t hla;
string_t epitope;
string_t identify;
float aff_1;
float aff_2;
float rank;
string_t bind_place;
string_t bind;
name = "netmhcpan-2.8a";
while(fgets(row, MAX_ROW_SIZE, profile->input) != NULL)
if
(
sscanf
(
row,
" %d %64[^ \t] %64[^ \t] %64[^ \t] %f %f %f%64[ <=a-zA-Z\n]",
&row_number,
hla,
epitope,
identify,
&aff_1,
&aff_2,
&rank,
bind_place
) == 8
)
{
if(sscanf(bind_place, " <= %s", bind) != 1)
snprintf(bind, STRING_MAX_LENGTH, "");
if(strcmp(bind, "SB") == 0)
dump
(
profile,
name,
row_number+1,
epitope,
-1,
"",
aff_1,
aff_2,
rank,
"SB"
);
else if(strcmp(bind, "WB") == 0)
dump
(
profile,
name,
row_number+1,
epitope,
-1,
"",
aff_1,
aff_2,
rank,
"WB"
);
else if(profile->numeric == TRUE)
dump
(
profile,
name,
row_number+1,
epitope,
-1,
"",
aff_1,
aff_2,
rank,
NULL
);
}
}
void work_netmhciipan10b(profile_t* profile)
{
char* name;
row_t row;
int row_number;
string_t hla;
string_t epitope;
string_t identify;
int pos;
string_t core;
float aff_1;
float aff_2;
string_t bind_place;
string_t bind;
name = "netmhciipan-1.0b";
while(fgets(row, MAX_ROW_SIZE, profile->input) != NULL)
if
(
sscanf
(
row,
" %d %64[^ \t] %64[^ \t] %64[^ \t] %d %64[^ \t] %f %f%64[ <=a-zA-Z\n]",
&row_number,
hla,
epitope,
identify,
&pos,
core,
&aff_1,
&aff_2,
bind_place
) == 9
)
{
if(sscanf(bind_place, " <= %s", bind) != 1)
snprintf(bind, STRING_MAX_LENGTH, "");
if(strcmp(bind, "SB") == 0)
dump
(
profile,
name,
row_number+1,
epitope,
pos,
core,
aff_1,
aff_2,
-1.0,
"SB"
);
else if(strcmp(bind, "WB") == 0)
dump
(
profile,
name,
row_number+1,
epitope,
pos,
core,
aff_1,
aff_2,
-1.0,
"WB"
);
else if(profile->numeric == TRUE)
dump
(
profile,
name,
row_number+1,
epitope,
pos,
core,
aff_1,
aff_2,
-1.0,
NULL
);
}
}
void work_netmhciipan20b(profile_t* profile)
{
char* name;
row_t row;
int row_number;
string_t hla;
string_t epitope;
string_t identify;
int pos;
string_t core;
float aff_1;
float aff_2;
float rank;
string_t bind_place;
string_t bind;
name = "netmhciipan-2.0b";
while(fgets(row, MAX_ROW_SIZE, profile->input) != NULL)
if
(
sscanf
(
row,
" %d %64[^ \t] %64[^ \t] %64[^ \t] %d %64[^ \t] %f %f %f%64[ <=a-zA-Z\n]",
&row_number,
hla,
epitope,
identify,
&pos,
core,
&aff_1,
&aff_2,
&rank,
bind_place
) == 10
)
{
if(sscanf(bind_place, " <= %s", bind) != 1)
snprintf(bind, STRING_MAX_LENGTH, "");
if(strcmp(bind, "SB") == 0)
dump
(
profile,
name,
row_number+1,
epitope,
pos,
core,
aff_1,
aff_2,
rank,
"SB"
);
else if(strcmp(bind, "WB") == 0)
dump
(
profile,
name,
row_number+1,
epitope,
pos,
core,
aff_1,
aff_2,
rank,
"WB"
);
else if(profile->numeric == TRUE)
dump
(
profile,
name,
row_number+1,
epitope,
pos,
core,
aff_1,
aff_2,
rank,
NULL
);
}
}
void work_netmhciipan30c(profile_t* profile)
{
char* name;
row_t row;
int row_number;
string_t hla;
string_t epitope;
string_t identify;
int pos;
string_t core;
float aff_1;
float aff_2;
float rank;
string_t bind_place;
string_t bind;
name = "netmhciipan-3.0c";
while(fgets(row, MAX_ROW_SIZE, profile->input) != NULL)
if
(
sscanf
(
row,
" %d %64[^ \t] %64[^ \t] %64[^ \t] %d %64[^ \t] %f %f %f%64[ <=a-zA-Z\n]",
&row_number,
hla,
epitope,
identify,
&pos,
core,
&aff_1,
&aff_2,
&rank,
bind_place
) == 10
)
{
if(sscanf(bind_place, " <= %s", bind) != 1)
snprintf(bind, STRING_MAX_LENGTH, "");
if(strcmp(bind, "SB") == 0)
dump
(
profile,
name,
row_number+1,
epitope,
pos,
core,
aff_1,
aff_2,
rank,
"SB"
);
else if(strcmp(bind, "WB") == 0)
dump
(
profile,
name,
row_number+1,
epitope,
pos,
core,
aff_1,
aff_2,
rank,
"WB"
);
else if(profile->numeric == TRUE)
dump
(
profile,
name,
row_number+1,
epitope,
pos,
core,
aff_1,
aff_2,
rank,
NULL
);
}
}
void work_netmhciipan31a(profile_t* profile)
{
char* name;
row_t row;
int row_number;
string_t hla;
string_t epitope;
string_t identify;
int pos;
string_t core;
float core_rel;
float aff_1;
float aff_2;
float rank;
float exp_bind;
string_t bind_place;
string_t bind;
name = "netmhciipan-3.1a";
while(fgets(row, MAX_ROW_SIZE, profile->input) != NULL)
if
(
sscanf
(
row,
" %d %64[^ \t] %64[^ \t] %64[^ \t] %d %64[^ \t] %f %f %f %f %f%64[ <=a-zA-Z\n]",
&row_number,
hla,
epitope,
identify,
&pos,
core,
&core_rel,
&aff_1,
&aff_2,
&rank,
&exp_bind,
bind_place
) == 12
)
{
if(sscanf(bind_place, " <= %s", bind) != 1)
snprintf(bind, STRING_MAX_LENGTH, "");
if(strcmp(bind, "SB") == 0)
dump
(
profile,
name,
row_number+1,
epitope,
pos,
core,
aff_1,
aff_2,
rank,
"SB"
);
else if(strcmp(bind, "WB") == 0)
dump
(
profile,
name,
row_number+1,
epitope,
pos,
core,
aff_1,
aff_2,
rank,
"WB"
);
else if(profile->numeric == TRUE)
dump
(
profile,
name,
row_number+1,
epitope,
pos,
core,
aff_1,
aff_2,
rank,
NULL
);
}
}
void usage(char* name)
{
fprintf
(
stderr,
"Usage: %s [-h] -m model [-s] [-o output_file_prefix] [-p protein_id] [-r protein_reference] [-f protein_file_name] [input_file]\n"
" -h\t\tHelp\n"
" -m\t\tModel:\n"
"\t\'1-3.0c\' for netMHC-3.0c\n"
"\t\'2-2.2\' for netMHCII-2.2\n"
"\t\'pan_1-2.0c\' for netMHCpan-2.0c\n"
"\t\'pan_1-2.4a\' for netMHCpan-2.4a\n"
"\t\'pan_1-2.8a\' for netMHCpan-2.8a\n"
"\t\'pan_2-1.0b\' for netMHCIIpan-1.0b\n"
"\t\'pan_2-2.0b\' for netMHCIIpan-2.0b\n"
"\t\'pan_2-3.0c\' for netMHCIIpan-3.0c\n"
"\t\'pan_2-3.1a\' for netMHCIIpan-3.1a\n"
" -s\t\tGenerate SQL\n"
" -n\t\tGenerate numeric format\n"
" -o\t\tOutput file prefix (default \'%s\')\n"
" -a\t\tAllele (default \'%s\')\n"
" -l\t\tLenth (default \'%s\')\n"
" -p\t\tProtein ID (default \'%s\')\n"
" -r\t\tProtein reference (default \'%s\')\n"
" -f\t\tProtein file name (default \'%s\')\n"
" input_file\tInput file (default \'%s\' aka stdin)\n",
name,
DEFAULT_OUTPUT_FILE_PREFIX,
DEFAULT_ALLELE,
DEFAULT_LENGTH,
DEFAULT_PROTEIN_ID,
DEFAULT_PROTEIN_REFERENCE,
DEFAULT_PROTEIN_FILE_NAME,
DEFAULT_INPUT_FILE_NAME
);
exit(EXIT_FAILURE);
}
int main(int arguments_number, char* arguments_values[])
{
int option;
model_t model;
boolean_t generate_sql;
char* output_file_prefix;
char* input_file_name;
profile_t profile;
model = MODEL_UNKNOWN;
generate_sql = FALSE;
profile.numeric = FALSE;
output_file_prefix = DEFAULT_OUTPUT_FILE_PREFIX;
profile.allele = DEFAULT_ALLELE;
profile.length = DEFAULT_LENGTH;
profile.protein_id = DEFAULT_PROTEIN_ID;
profile.protein_reference = DEFAULT_PROTEIN_REFERENCE;
profile.protein_file_name = DEFAULT_PROTEIN_FILE_NAME;
input_file_name = DEFAULT_INPUT_FILE_NAME;
opterr = 0;
while(TRUE)
{
option = getopt(arguments_number, arguments_values, "hm:sno:a:l:p:r:f:");
if(option == -1)
break;
switch(option)
{
case 'h':
usage(arguments_values[0]);
break;
case 'm':
if(strcasecmp(optarg, "1-3.0c") == 0)
model = MODEL_NETMHC30C;
else if(strcasecmp(optarg, "1-3.4a") == 0)
model = MODEL_NETMHC34A;
else if(strcasecmp(optarg, "2-2.2") == 0)
model = MODEL_NETMHCII22;
else if(strcasecmp(optarg, "pan_1-2.0c") == 0)
model = MODEL_NETMHCPAN20C;
else if(strcasecmp(optarg, "pan_1-2.4a") == 0)
model = MODEL_NETMHCPAN24A;
else if(strcasecmp(optarg, "pan_1-2.8a") == 0)
model = MODEL_NETMHCPAN28A;
else if(strcasecmp(optarg, "pan_2-1.0b") == 0)
model = MODEL_NETMHCIIPAN10B;
else if(strcasecmp(optarg, "pan_2-2.0b") == 0)
model = MODEL_NETMHCIIPAN20B;
else if(strcasecmp(optarg, "pan_2-3.0c") == 0)
model = MODEL_NETMHCIIPAN30C;
else if(strcasecmp(optarg, "pan_2-3.1a") == 0)
model = MODEL_NETMHCIIPAN31A;
break;
case 's':
generate_sql = TRUE;
break;
case 'n':
profile.numeric = TRUE;
break;
case 'o':
output_file_prefix = optarg;
break;
case 'a':
profile.allele = optarg;
break;
case 'l':
profile.length = optarg;
break;
case 'p':
profile.protein_id = optarg;
break;
case 'r':
profile.protein_reference = optarg;
break;
case 'f':
profile.protein_file_name = optarg;
break;
default:
usage(arguments_values[0]);
}
}
if(optind == arguments_number-1)
input_file_name = arguments_values[optind];
else if(optind != arguments_number)
usage(arguments_values[0]);
if(model == MODEL_UNKNOWN)
usage(arguments_values[0]);
if(strcmp(input_file_name, "-") == 0)
profile.input = stdin;
else
{
profile.input = fopen(input_file_name, "r");
if(profile.input == NULL)
ERROR("Cannot open input file \'%s\'", input_file_name);
}
if(generate_sql == TRUE)
profile.output_sql = open_sql_file(output_file_prefix);
else
profile.output_sql = NULL;
profile.output_load = open_load_file(output_file_prefix);
switch(model)
{
case MODEL_NETMHC30C:
work_netmhc30c(&profile);
break;
case MODEL_NETMHC34A:
work_netmhc34a(&profile);
break;
case MODEL_NETMHCII22:
work_netmhcii22(&profile);
break;
case MODEL_NETMHCPAN20C:
work_netmhcpan20c(&profile);
break;
case MODEL_NETMHCPAN24A:
work_netmhcpan24a(&profile);
break;
case MODEL_NETMHCPAN28A:
work_netmhcpan28a(&profile);
break;
case MODEL_NETMHCIIPAN10B:
work_netmhciipan10b(&profile);
break;
case MODEL_NETMHCIIPAN20B:
work_netmhciipan20b(&profile);
break;
case MODEL_NETMHCIIPAN30C:
work_netmhciipan30c(&profile);
break;
case MODEL_NETMHCIIPAN31A:
work_netmhciipan31a(&profile);
break;
}
fclose(profile.input);
if(profile.output_sql != NULL)
fclose(profile.output_sql);
fclose(profile.output_load);
return EXIT_SUCCESS;
}
|
<reponame>xeon225/CyanMD
import '../../src/pageComponent/head/index.js'
new Vue({
el:'#main',
data:{
},
methods:{
}
})
|
#!/bin/bash
#
# Get an estimate of how CPU-hoggy to be.
#
# Usage: cpus2use.sh
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, you can access it online at
# http://www.gnu.org/licenses/gpl-2.0.html.
#
# Copyright (C) IBM Corporation, 2013
#
# Authors: Paul E. McKenney <paulmck@linux.vnet.ibm.com>
ncpus=`grep '^processor' /proc/cpuinfo | wc -l`
idlecpus=`mpstat | tail -1 | \
awk -v ncpus=$ncpus '{ print ncpus * ($7 + $NF) / 100 }'`
awk -v ncpus=$ncpus -v idlecpus=$idlecpus < /dev/null '
BEGIN {
cpus2use = idlecpus;
if (cpus2use < 1)
cpus2use = 1;
if (cpus2use < ncpus / 10)
cpus2use = ncpus / 10;
if (cpus2use == int(cpus2use))
cpus2use = int(cpus2use)
else
cpus2use = int(cpus2use) + 1
print cpus2use;
}'
|
#!/bin/bash
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
# OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# NVIDIA Identify version
# reference:
# https://devtalk.nvidia.com/default/topic/1014424/jetson-tx2/identifying-tx1-and-tx2-at-runtime/
# https://devtalk.nvidia.com/default/topic/996988/jetson-tk1/chip-uid/post/5100481/#5100481
if [ -f /sys/module/tegra_fuse/parameters/tegra_chip_id ]; then
case $(cat /sys/module/tegra_fuse/parameters/tegra_chip_id) in
64)
JETSON_BOARD="TK1" ;;
33)
JETSON_BOARD="TX1" ;;
24)
JETSON_BOARD="TX2" ;;
25)
JETSON_BOARD="AGX Xavier" ;;
*)
JETSON_BOARD="UNKNOWN" ;;
esac
JETSON_DESCRIPTION="NVIDIA Jetson $JETSON_BOARD"
fi
export JETSON_BOARD
# NVIDIA Jetson version
# reference https://devtalk.nvidia.com/default/topic/860092/jetson-tk1/how-do-i-know-what-version-of-l4t-my-jetson-tk1-is-running-/
if [ -f /etc/nv_tegra_release ]; then
# L4T string
JETSON_L4T_STRING=$(head -n 1 /etc/nv_tegra_release)
# Load release and revision
JETSON_L4T_RELEASE=$(echo $JETSON_L4T_STRING | cut -f 1 -d ',' | sed 's/\# R//g' | cut -d ' ' -f1)
JETSON_L4T_REVISION=$(echo $JETSON_L4T_STRING | cut -f 2 -d ',' | sed 's/\ REVISION: //g' )
# unset variable
unset JETSON_L4T_STRING
# Write Jetson description
JETSON_L4T="$JETSON_L4T_RELEASE.$JETSON_L4T_REVISION"
# Write version of jetpack installed
# https://developer.nvidia.com/embedded/jetpack-archive
if [ "$JETSON_BOARD" = "AGX Xavier" ] ; then
case $JETSON_L4T in
"31.0.1")
JETSON_JETPACK="4.0 DP" ;;
"31.0.2")
JETSON_JETPACK="4.1 DP" ;;
"31.1.0")
JETSON_JETPACK="4.1.1 DP" ;;
*)
JETSON_JETPACK="UNKNOWN" ;;
esac
elif [ "$JETSON_BOARD" = "TX2i" ] ; then
case $JETSON_L4T in
"28.2.1")
JETSON_JETPACK="3.3 or 3.2.1" ;;
"28.2")
JETSON_JETPACK="3.2" ;;
*)
JETSON_JETPACK="UNKNOWN" ;;
esac
elif [ "$JETSON_BOARD" = "TX2" ] ; then
case $JETSON_L4T in
"32.1.0")
JETSON_JETPACK="4.2" ;;
"28.2.1")
JETSON_JETPACK="3.3 or 3.2.1" ;;
"28.2")
JETSON_JETPACK="3.2" ;;
"28.1")
JETSON_JETPACK="3.1" ;;
"27.1")
JETSON_JETPACK="3.0" ;;
*)
JETSON_JETPACK="UNKNOWN" ;;
esac
elif [ "$JETSON_BOARD" = "TX1" ] ; then
case $JETSON_L4T in
"28.2.0")
JETSON_JETPACK="3.3" ;;
"28.2")
JETSON_JETPACK="3.2 or 3.2.1" ;;
"28.1")
JETSON_JETPACK="3.1" ;;
"24.2.1")
JETSON_JETPACK="3.0 or 2.3.1" ;;
"24.2")
JETSON_JETPACK="2.3" ;;
"24.1")
JETSON_JETPACK="2.2.1 or 2.2" ;;
"23.2")
JETSON_JETPACK="2.1" ;;
"23.1")
JETSON_JETPACK="2.0" ;;
*)
JETSON_JETPACK="UNKNOWN" ;;
esac
elif [ "$JETSON_BOARD" ="TK1" ] ; then
case $JETSON_L4T in
"21.5")
JETSON_JETPACK="2.3.1 or 2.3" ;;
"21.4")
JETSON_JETPACK="2.2 or 2.1 or 2.0 or DP 1.2" ;;
"21.3")
JETSON_JETPACK="DP 1.1" ;;
"21.2")
JETSON_JETPACK="DP 1.0" ;;
*)
JETSON_JETPACK="UNKNOWN" ;;
esac
else
# Unknown board
JETSON_JETPACK="UNKNOWN"
fi
fi
# Read CUDA version
if [ -f /usr/local/cuda/version.txt ]; then
JETSON_CUDA=$(cat /usr/local/cuda/version.txt | sed 's/\CUDA Version //g')
else
JETSON_CUDA="NOT INSTALLED"
fi
# Read opencv version
pkg-config --exists opencv
if [ $? == "0" ] ; then
JETSON_OPENCV=$(pkg-config --modversion opencv)
else
JETSON_OPENCV="NOT INSTALLED"
fi
export JETSON_BOARD
export JETSON_CUDA
export JETSON_JETPACK
export JETSON_L4T
# TODO Add enviroments variables:
# - UID -> https://devtalk.nvidia.com/default/topic/996988/jetson-tk1/chip-uid/post/5100481/#5100481
# - GCID, BOARD, EABI
# - cuDNN
# - TensorRT
# - Visionworks
|
#!/usr/bin/env python
# encoding: utf-8
"""
copyright (c) 2016 Earth Advantage.
All rights reserved
"""
# Local Imports
from scourgify.normalize import (
get_geocoder_normalized_addr,
normalize_address_record,
)
|
// NgRx
import { Action } from '@ngrx/store';
// Auth
import { User } from '../models/user.model';
export enum UserActionTypes {
GetUser = '[User] Get User',
Authenticated = '[User] Authenticated',
NotAuthenticated = '[User] Not Authenticated',
AuthenticationError = '[User] Authentication Error',
IsLoggedIn = '[User] Is Logged In'
}
export class GetUser implements Action {
readonly type = UserActionTypes.GetUser;
constructor(public payload?: any) {}
}
export class Authenticated implements Action {
readonly type = UserActionTypes.Authenticated;
constructor(public payload: { user: User }) {}
}
export class NotAuthenticated implements Action {
readonly type = UserActionTypes.NotAuthenticated;
constructor(public payload?: any) {}
}
export class AuthenticationError implements Action {
readonly type = UserActionTypes.AuthenticationError;
constructor(public payload?: any) {}
}
export class IsLoggedIn implements Action {
readonly type = UserActionTypes.IsLoggedIn;
constructor(public payload?: any) {}
}
export type UserActionsUnion =
| GetUser
| Authenticated
| NotAuthenticated
| AuthenticationError
| IsLoggedIn;
|
const { existsSync, lstatSync, readdirSync, readFileSync } = require("fs");
const { dirname, join } = require("path");
// does the path to a file or directory include a hidden file or folder
const isHidden = path => path.startsWith(".") || path.includes("/.");
// by default, stop searching along a path if function returns true
const DEFAULT_STOP = ({ dirpath, from, direction }) => {
return (
isHidden(dirpath) ||
dirpath.includes("node_modules") ||
(direction === "up" && existsSync(join(from, ".git")))
);
};
const findAndRead = (
filename,
{
start,
debugLevel = 0,
encoding = null,
flag = "r",
maxSteps = 10,
stop = DEFAULT_STOP,
warn = true
} = {
start: undefined,
debugLevel: 0,
encoding: null,
stop: DEFAULT_STOP,
flag: "r",
maxSteps: 10,
warn: true
}
) => {
if (!start) {
const stackLines = Error().stack.split(/ *\n\r? */g);
if (debugLevel >= 1) console.log("stackLines:", stackLines);
const ln = stackLines[2];
4;
if (debugLevel >= 1) console.log("ln:", ln);
const callerPath = (ln.includes("(")
? ln.substring(ln.indexOf("(") + 1, ln.lastIndexOf(")"))
: ln.replace("at ", "")
).split(":")[0];
if (debugLevel >= 1) console.log("callerPath:", callerPath);
if (callerPath.startsWith("/")) {
start = dirname(callerPath);
} else if (process.env.PWD) {
start = process.env.PWD;
} else {
throw new Error(
"[find-and-read] unable to determine where to start. Please initialize findAndRead with a start parameter"
);
}
}
if (debugLevel >= 1) console.log("[find-and-read] start:", start);
let dirpaths = [{ dirpath: start, ignore: null }];
for (let i = 0; i < maxSteps; i++) {
if (debugLevel >= 2) console.log("[find-and-read] step:", i);
let found = [];
let additions = [];
for (let ii = 0; ii < dirpaths.length; ii++) {
const { dirpath, ignore } = dirpaths[ii];
if (debugLevel >= 3) console.log("[find-and-read] dirpath:", dirpath);
if (debugLevel >= 3) console.log("\t[find-and-read] ignore:", ignore);
const filepath = join(dirpath, filename);
if (existsSync(filepath) && !lstatSync(filepath).isDirectory()) {
if (debugLevel >= 3) console.log("\t[find-and-read] found:", filepath);
found.push(filepath);
} else {
const updirpath = dirname(dirpath);
if (
updirpath !== ignore &&
(typeof stop !== "function" ||
!stop({ dirpath: updirpath, from: dirpath, direction: "up" }))
) {
additions.push({ dirpath: updirpath, ignore: dirpath });
}
try {
readdirSync(dirpath, { withFileTypes: true }).forEach(dirent => {
if (debugLevel >= 3)
console.log("\t[find-and-read] dirent.name:", dirent.name);
if (debugLevel >= 3)
console.log(
"\t[find-and-read] dirent.isDirectory():",
dirent.isDirectory()
);
if (dirent.isDirectory()) {
const subdirpath = join(dirpath, dirent.name);
if (debugLevel >= 3)
console.log("\t[find-and-read] subdirpath:", subdirpath);
if (
subdirpath !== ignore &&
(typeof stop !== "function" ||
!stop({
dirpath: subdirpath,
from: dirpath,
direction: "down"
}))
) {
if (debugLevel >= 3)
console.log("\t[find-and-read] adding:", {
dirpath: subdirpath,
ignore: dirpath
});
additions.push({ dirpath: subdirpath, ignore: dirpath });
}
}
});
} catch (error) {
// might not have permission to read
}
}
}
dirpaths = additions;
if (found.length === 1) {
if (debugLevel >= 1)
console.log("[find-and-read] found in " + (i + 1) + " steps");
return readFileSync(found[0], { encoding, flag });
} else if (found.length >= 2) {
throw new Error(
`[find-and-read] can't decide between "${found.join('" and "')}"`
);
}
}
if (warn) console.warn(`[find-and-read] unable to find "${filename}"`);
};
module.exports = findAndRead;
|
import AsyncStorage from '@react-native-community/async-storage'
import localForage from 'localforage';
import { createStore, persist } from 'easy-peasy';
import { loadings } from 'src/features/request/models';
import { session } from 'src/features/wk/models';
import { reviews } from 'src/features/reviews/models';
import device from 'src/utils/device';
// This fixes the crash caused by using AsyncStorage directly
// https://github.com/ctrlplusb/easy-peasy/issues/431#issuecomment-590333685
const asyncStorage = {
async getItem(key) {
return JSON.parse(await AsyncStorage.getItem(key))
},
setItem(key, data) {
AsyncStorage.setItem(key, JSON.stringify(data))
},
removeItem(key) {
AsyncStorage.removeItem(key)
}
}
const models = {
session,
loadings,
reviews,
};
const persisted = persist(models, {
storage: device('mobile')
? asyncStorage
: localForage,
whitelist: [
'session'
]
});
export default createStore(persisted, {
name: 'JukenStore'
});
|
#!/usr/bin/env bash
#######################################################################################################
# This script is designed for use as a deployment script in a template
# https://docs.microsoft.com/en-us/azure/azure-resource-manager/templates/deployment-script-template
#
# It expects the following environment variables
# $DEPLOYMENT_MANIFEST_TEMPLATE_URL - the location of a template of an IoT Edge deployment manifest
# $PROVISIONING_TOKEN - the token used for provisioing the edge module
# $HUB_NAME - the name of the IoT Hub where the edge device is registered
# $DEVICE_ID - the name of the edge device on the IoT Hub
# $VIDEO_OUTPUT_FOLDER_ON_DEVICE - the folder where the file sink will store clips
# $VIDEO_INPUT_FOLDER_ON_DEVICE - the folder where where rtspsim will look for sample clips
# $APPDATA_FOLDER_ON_DEVICE - the folder where Video Analyzer module will store state
# $AZURE_STORAGE_ACCOUNT - the storage where the deployment manifest will be stored
# $AZ_SCRIPTS_OUTPUT_PATH - file to write output (provided by the deployment script runtime)
# $RESOURCE_GROUP - the resouce group that you are deploying in to
# $REGESTRY_PASSWORD - the password for the container registry
# $REGISTRY_USER_NAME - the user name for the container registry
# $IOT_HUB_CONNECTION_STRING - the IoT Hub connection string
# $IOT_EDGE_MODULE_NAME - the IoT avaedge module name
#
#######################################################################################################
# automatically install any extensions
az config set extension.use_dynamic_install=yes_without_prompt
# download the deployment manifest file
printf "downloading $DEPLOYMENT_MANIFEST_TEMPLATE_URL\n"
curl -s $DEPLOYMENT_MANIFEST_TEMPLATE_URL > deployment.json
# update the values in the manifest
printf "replacing value in manifest\n"
sed -i "s@\$AVA_PROVISIONING_TOKEN@${PROVISIONING_TOKEN}@g" deployment.json
sed -i "s@\$VIDEO_OUTPUT_FOLDER_ON_DEVICE@${VIDEO_OUTPUT_FOLDER_ON_DEVICE}@g" deployment.json
sed -i "s@\$VIDEO_INPUT_FOLDER_ON_DEVICE@${VIDEO_INPUT_FOLDER_ON_DEVICE}@g" deployment.json
sed -i "s@\$APPDATA_FOLDER_ON_DEVICE@${APPDATA_FOLDER_ON_DEVICE}@g" deployment.json
# Add a file to build env.txt file from
>env.txt
echo "SUBSCRIPTION_ID=$SUBSCRIPTION_ID" >> env.txt
echo "RESOUCE_GROUP=$RESOURCE_GROUP" >> env.txt
echo "AVA_PROVISIONING_TOKEN=$PROVISIONING_TOKEN">> env.txt
echo "VIDEO_INPUT_FOLDER_ON_DEVICE=$VIDEO_INPUT_FOLDER_ON_DEVICE">> env.txt
echo "VIDEO_OUTPUT_FOLDER_ON_DEVICE=$VIDEO_OUTPUT_FOLDER_ON_DEVICE" >> env.txt
echo "APPDATA_FOLDER_ON_DEVICE=$APPDATA_FOLDER_ON_DEVICE" >> env.txt
echo "CONTAINER_REGISTRY_PASSWORD_myacr=$REGISTRY_PASSWORD" >> env.txt
echo "CONTAINER_REGISTRY_USERNAME_myacr=$REGISTRY_USER_NAME" >> env.txt
>appsettings.json
echo "{" >> appsettings.json
echo "\"IoThubConnectionString\": \"$IOT_HUB_CONNECTION_STRING\"," >> appsettings.json
echo "\"deviceId\": \"$DEVICE_ID\"," >> appsettings.json
echo "\"moduleId\": \"$IOT_EDGE_MODULE_NAME\"" >> appsettings.json
echo "}" >> appsettings.json
# deploy the manifest to the iot hub
printf "deploying manifest to $DEVICE_ID on $HUB_NAME\n"
az iot edge set-modules --device-id $DEVICE_ID --hub-name $HUB_NAME --content deployment.json --only-show-error -o table
# store the manifest for later reference
printf "storing manifest for reference\n"
az storage share create --name deployment-output --account-name $AZURE_STORAGE_ACCOUNT
az storage file upload --share-name deployment-output --source deployment.json --account-name $AZURE_STORAGE_ACCOUNT
az storage file upload --share-name deployment-output --source env.txt --account-name $AZURE_STORAGE_ACCOUNT
az storage file upload --share-name deployment-output --source appsettings.json --account-name $AZURE_STORAGE_ACCOUNT
|
#!/bin/bash
# Copyright 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
set -e
# By default download to the data directory I guess
read -p "Specify download path or enter to use default (data/corenlp): " path
DOWNLOAD_PATH="${path:-data/corenlp}"
echo "Will download to: $DOWNLOAD_PATH"
# Download zip, unzip
pushd "/tmp"
wget -O "stanford-corenlp-full-2017-06-09.zip" "http://nlp.stanford.edu/software/stanford-corenlp-full-2017-06-09.zip"
unzip "stanford-corenlp-full-2017-06-09.zip"
rm "stanford-corenlp-full-2017-06-09.zip"
popd
# Put jars in DOWNLOAD_PATH
mkdir -p "$DOWNLOAD_PATH"
mv "/tmp/stanford-corenlp-full-2017-06-09/"*".jar" "$DOWNLOAD_PATH/"
# Append to bashrc, instructions
while read -p "Add to ~/.bashrc CLASSPATH (recommended)? [yes/no]: " choice; do
case "$choice" in
yes )
echo "export CLASSPATH=\$CLASSPATH:$DOWNLOAD_PATH/*" >> ~/.bashrc;
break ;;
no )
break ;;
* ) echo "Please answer yes or no." ;;
esac
done
printf "\n*** NOW RUN: ***\n\nexport CLASSPATH=\$CLASSPATH:$DOWNLOAD_PATH/*\n\n****************\n"
|
<reponame>makoru-hikage/sasquare-python
from functools import partial
from math import floor
from single_array_square.parts import *
from single_array_square.helpers import *
def get_line_indices (base):
return range(1, base + 1)
def get_row_nth_cell (base, row_index, nth_cell):
return get_intersection_index(base, row_index, nth_cell)
def get_column_nth_cell (base, column_index, nth_cell):
return get_intersection_index(base, nth_cell, column_index)
def select_row(base, row_index):
return tuple(map (
partial(get_row_nth_cell, base, row_index),
get_line_indices(base)
))
def select_column(base, column_index):
return tuple(map (
partial(get_column_nth_cell, base, column_index),
get_line_indices(base)
))
def get_even_square_center_q1(base):
return get_intersection_index(
base,
get_center_index(base),
get_center_index(base)
)
def get_even_square_center_q2(base):
return get_even_square_center_q1(base) + 1
def get_even_square_center_q3(base):
return get_even_square_center_q1(base) + base
def get_even_square_center_q4(base):
return get_even_square_center_q1(base) + base + 1
def select_center(base):
if base % 2 != 0:
return (get_intersection_index(
base,
get_center_index(base),
get_center_index(base)
),)
return (
get_even_square_center_q1(base),
get_even_square_center_q2(base),
get_even_square_center_q3(base),
get_even_square_center_q4(base),
)
def reverse_descending_index(base, descending_index):
return get_opposite_index(
count_square_slants(base),
descending_index
)
def get_desc_nth_cell(b, d, n):
"""
Get the nth cell of a descending slant.
@param int b: the base of the square
@param int d: the descending index
@param int n: the nth cell of the descending index
"""
return get_intersection_index(b,n,n) \
+ b**(2 + floor(-(d)/b))*abs(b-d)
def get_asc_nth_cell(b, a, n):
"""
Get the nth cell of an ascending slant.
@param int b: the base of the square
@param int a: the descending index
@param int n: the nth cell of the descending index
"""
return b**2 - b*n + n - (abs(b-a)*(b-1) + b**floor(a/b)*(b-a))
def select_descending_slant(base, descending_index):
cell_count = count_slant_cells(base, descending_index)
return tuple(map(
partial(get_desc_nth_cell,base, descending_index),
get_line_indices(cell_count)
))
def select_ascending_slant(base, ascending_index):
cell_count = count_slant_cells(base, ascending_index)
return tuple(map(
partial(get_asc_nth_cell, base, ascending_index),
get_line_indices(cell_count)
))
def select_topright_corner(base):
return 1
def select_topleft_corner(base):
return base
def select_bottomright_corner(base):
return base*base - (base - 1)
def select_bottomleft_corner(base):
return base*base
def select_all_corners(base):
return (
select_topright_corner(base),
select_topleft_corner(base),
select_bottomright_corner(base),
select_bottomleft_corner(base)
)
|
import { Logger } from "pino";
import { Store } from "@walletconnect/core";
import { ICore, PairingTypes } from "@walletconnect/types";
import { SIGN_CLIENT_STORAGE_PREFIX, PAIRING_CONTEXT } from "../constants";
export class Pairing extends Store<string, PairingTypes.Struct> {
constructor(public core: ICore, public logger: Logger) {
super(core, logger, PAIRING_CONTEXT, SIGN_CLIENT_STORAGE_PREFIX);
}
}
|
#!/usr/bin/env bash
# Git pre-commit hook to check staged Python files for formatting issues with
# yapf.
#
# INSTALLING: Copy this script into `.git/hooks/pre-commit`, and mark it as
# executable.
#
# This requires that yapf is installed and runnable in the environment running
# the pre-commit hook.
#
# When running, this first checks for unstaged changes to staged files, and if
# there are any, it will exit with an error. Files with unstaged changes will be
# printed.
#
# If all staged files have no unstaged changes, it will run yapf against them,
# leaving the formatting changes unstaged. Changed files will be printed.
#
# BUGS: This does not leave staged changes alone when used with the -a flag to
# git commit, due to the fact that git stages ALL unstaged files when that flag
# is used.
# Find all staged Python files, and exit early if there aren't any.
PYTHON_FILES=()
while IFS=$'\n' read -r line; do PYTHON_FILES+=("$line"); done \
< <(git diff --name-only --cached --diff-filter=AM | grep --color=never '.py$')
if [ ${#PYTHON_FILES[@]} -eq 0 ]; then
exit 0
fi
########## PIP VERSION #############
# Verify that yapf is installed; if not, warn and exit.
#if ! command -v yapf >/dev/null; then
# echo 'yapf not on path; can not format. Please install yapf:'
# echo ' pip install yapf'
# exit 2
#fi
######### END PIP VERSION ##########
########## PIPENV VERSION ##########
if ! pipenv run yapf --version 2>/dev/null 2>&1; then
echo 'yapf not on path; can not format. Please install yapf:'
echo ' pipenv install yapf'
exit 2
fi
###### END PIPENV VERSION ##########
# Check for unstaged changes to files in the index.
CHANGED_FILES=()
while IFS=$'\n' read -r line; do CHANGED_FILES+=("$line"); done \
< <(git diff --name-only "${PYTHON_FILES[@]}")
if [ ${#CHANGED_FILES[@]} -gt 0 ]; then
echo 'You have unstaged changes to some files in your commit; skipping '
echo 'auto-format. Please stage, stash, or revert these changes. You may '
echo 'find `git stash -k` helpful here.'
echo 'Files with unstaged changes:' "${CHANGED_FILES[@]}"
exit 1
fi
# Format all staged files, then exit with an error code if any have uncommitted
# changes.
echo 'Formatting staged Python files . . .'
########## PIP VERSION #############
#yapf -i -r "${PYTHON_FILES[@]}"
######### END PIP VERSION ##########
########## PIPENV VERSION ##########
pipenv run yapf -i -r "${PYTHON_FILES[@]}"
###### END PIPENV VERSION ##########
CHANGED_FILES=()
while IFS=$'\n' read -r line; do CHANGED_FILES+=("$line"); done \
< <(git diff --name-only "${PYTHON_FILES[@]}")
if [ ${#CHANGED_FILES[@]} -gt 0 ]; then
echo 'Reformatted staged files. Please review and stage the changes.'
echo 'Files updated: ' "${CHANGED_FILES[@]}"
exit 1
else
exit 0
fi
|
'use strict';
var app = require('../..');
var request = require('supertest');
var newTree;
describe('Tree API:', function() {
describe('GET /api/trees', function() {
var trees;
beforeEach(function(done) {
request(app)
.get('/api/trees')
.expect(200)
.expect('Content-Type', /json/)
.end(function(err, res) {
if (err) {
return done(err);
}
trees = res.body;
done();
});
});
it('should respond with JSON array', function() {
expect(trees).to.be.instanceOf(Array);
});
});
describe('POST /api/trees', function() {
beforeEach(function(done) {
request(app)
.post('/api/trees')
.send({
name: 'New Tree',
info: 'This is the brand new tree!!!'
})
.expect(201)
.expect('Content-Type', /json/)
.end(function(err, res) {
if (err) {
return done(err);
}
newTree = res.body;
done();
});
});
});
describe('GET /api/trees/:id', function() {
var tree;
beforeEach(function(done) {
request(app)
.get('/api/trees/' + newTree._id)
.expect(200)
.expect('Content-Type', /json/)
.end(function(err, res) {
if (err) {
return done(err);
}
tree = res.body;
done();
});
});
afterEach(function() {
tree = {};
});
});
describe('PUT /api/trees/:id', function() {
var updatedTree
beforeEach(function(done) {
request(app)
.put('/api/trees/' + newTree._id)
.send({
name: 'Updated Tree',
info: 'This is the updated tree!!!'
})
.expect(200)
.expect('Content-Type', /json/)
.end(function(err, res) {
if (err) {
return done(err);
}
updatedTree = res.body;
done();
});
});
afterEach(function() {
updatedTree = {};
});
});
describe('DELETE /api/trees/:id', function() {
});
});
|
package ch.raiffeisen.openbank.beneficiary.controller.api;
import java.util.HashSet;
import java.util.Set;
import org.springframework.hateoas.core.Relation;
import ch.raiffeisen.openbank.common.repository.model.AddressType;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
@ApiModel(description = "Information that locates and identifies a specific address, as defined by postal services.")
@Relation(value = "postalAddress", collectionRelation = "postalAddresses")
public class PostalAddress {
@ApiModelProperty(notes = "Identifies the nature of the postal address.")
private AddressType addressType;
@ApiModelProperty(notes = "Identification of a division of a large organisation or building.")
private String department;
@ApiModelProperty(notes = "Identification of a sub-division of a large organisation or building.")
private String subDepartment;
@ApiModelProperty(notes = "Name of a street or thoroughfare.")
private String streetName;
@ApiModelProperty(notes = "Number that identifies the position of a building on a street.")
private String buildingNumber;
@ApiModelProperty(notes = "Identifier consisting of a group of letters and/or numbers that is added to a postal address to assist the sorting of mail.")
private String postCode;
@ApiModelProperty(notes = "Name of a built-up area, with defined boundaries, and a local government.")
private String townName;
@ApiModelProperty(notes = "Identifies a subdivision of a country such as state, region, county.")
private String countrySubDivision;
@ApiModelProperty(notes = "Nation with its own government.")
private String country;
@ApiModelProperty(notes = "Information that locates and identifies a specific address, as defined by postal services, presented in free format text.")
private Set<String> addressLines = new HashSet<>();
public AddressType getAddressType() {
return addressType;
}
public void setAddressType(AddressType addressType) {
this.addressType = addressType;
}
public String getDepartment() {
return department;
}
public void setDepartment(String department) {
this.department = department;
}
public String getSubDepartment() {
return subDepartment;
}
public void setSubDepartment(String subDepartment) {
this.subDepartment = subDepartment;
}
public String getStreetName() {
return streetName;
}
public void setStreetName(String streetName) {
this.streetName = streetName;
}
public String getBuildingNumber() {
return buildingNumber;
}
public void setBuildingNumber(String buildingNumber) {
this.buildingNumber = buildingNumber;
}
public String getPostCode() {
return postCode;
}
public void setPostCode(String postCode) {
this.postCode = postCode;
}
public String getTownName() {
return townName;
}
public void setTownName(String townName) {
this.townName = townName;
}
public String getCountrySubDivision() {
return countrySubDivision;
}
public void setCountrySubDivision(String countrySubDivision) {
this.countrySubDivision = countrySubDivision;
}
public String getCountry() {
return country;
}
public void setCountry(String country) {
this.country = country;
}
public Set<String> getAddressLines() {
return addressLines;
}
public void setAddressLines(Set<String> addressLines) {
this.addressLines = addressLines;
}
}
|
#!/usr/bin/env bash
set -euo pipefail
for package in $(find -name "*.nupkg" | grep "test" -v); do
echo "${0##*/}": Pushing $package...
dotnet nuget push $package --source https://f.feedz.io/carter/carter/nuget/index.json --api-key $1
done
|
#!/usr/bin/env bash
if [ -z "$1" -o -z "$2" -o -z "$3" ]
then
# print help
cat <<EOF
This is a helper script for make-pretty-timed-diff-tip-only-diff.sh.
USAGE: make-each-time-file-tip-only-diff.sh MAKE-COMMAND NEW_FILENAME OLD_FILENAME
MAKE-COMMAND: The command which is used to make the library, such as
`make` or `make -j2`
NEW_FILENAME: The name of the file to store the output of timing the
compilation of the current state of the library
OLD_FILENAME: The name of the file to store the output of timing the
compilation of the state of PREV_COMMIT. The command
`git checkout` is used to obtain that state. If this
script is interrupted or finishes, `git checkout` is
used to restore the current HEAD. If there are staged
but uncommitted changes, this script will exit
immediately. PREV_COMMIT defaults to HEAD^, and is read
from the environment.
EOF
fi
# in case we're run from out of git repo
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
source "$DIR/../pushd-root.sh"
MAKECMD="$1"
NEW_FILE="$2"
OLD_FILE="$3"
if [ -z "$PREV_COMMIT" ]
then
PREV_COMMIT="HEAD^"
fi
# ensure that we have no changes
if [ ! -z "$(git status | grep '^# Changes to be committed:$')" ]
then
git status
echo 'ERROR: You have staged but uncomitted changes.'
echo ' Either `git stash` them, or `git commit` them, or remove them.'
exit 1
fi
BRANCH=$(git symbolic-ref -q HEAD || git rev-parse -q --verify HEAD)
BRANCH_DISP="$(git branch | grep '^*' | sed s'/* //')"
if [ "$BRANCH_DISP" = "(no branch)" ] || [[ "$BRANCH_DISP" == "(HEAD detached at "* ]]
then
BRANCH_DISP="$(git reflog | head -n 1 | grep -o 'moving from .* to .*' | sed s'/moving from .* to \(.*\)/\1/g')"
BRANCH_MOV="$BRANCH"
else
BRANCH_MOV="$BRANCH_DISP"
fi
echo "Tip is $BRANCH_DISP ($BRANCH)"
echo 'If this is wrong, break immediately with ^C'
# make the new version so we only get diffs
trap "exit 1" SIGHUP SIGINT SIGTERM
$MAKECMD -k
# make the old version
# if we're interrupted, first run `git checkout $HEAD` to clean up
trap "git checkout '$BRANCH_MOV' && exit 1" SIGHUP SIGINT SIGTERM
git checkout "$PREV_COMMIT" || exit 1
# run the given `make` command, passing `TIMED=1` to get timing and
# `-k` to continue even if files fail
$MAKECMD TIMED=1 -k 2>&1 | tee "$OLD_FILE"
# there is a diff, so restore the changes
git checkout "$BRANCH_MOV" || exit 1
# now if we're interrupted, we should only exit immediately
trap "exit 1" SIGHUP SIGINT SIGTERM
# run the given `make` command, passing `TIMED=1` to get timing and
# `-k` to continue even if files fail
$MAKECMD TIMED=1 -k 2>&1 | tee "$NEW_FILE"
popd 1>/dev/null
|
# Run the named hook, either by calling the function with that name or
# by evaluating the variable with that name. This allows convenient
# setting of hooks both from Nix expressions (as attributes /
# environment variables) and from shell scripts (as functions).
runHook() {
local hookName="$1"
case "$(type -t $hookName)" in
(function|alias|builtin) $hookName;;
(file) source $hookName;;
(keyword) :;;
(*) eval "${!hookName}";;
esac
}
exitHandler() {
exitCode=$?
set +e
closeNest
if [ -n "$showBuildStats" ]; then
times > "$NIX_BUILD_TOP/.times"
local -a times=($(cat "$NIX_BUILD_TOP/.times"))
# Print the following statistics:
# - user time for the shell
# - system time for the shell
# - user time for all child processes
# - system time for all child processes
echo "build time elapsed: " ${times[*]}
fi
if [ $exitCode != 0 ]; then
runHook failureHook
# If the builder had a non-zero exit code and
# $succeedOnFailure is set, create the file
# `$out/nix-support/failed' to signal failure, and exit
# normally. Otherwise, return the original exit code.
if [ -n "$succeedOnFailure" ]; then
echo "build failed with exit code $exitCode (ignored)"
mkdir -p "$out/nix-support"
echo -n $exitCode > "$out/nix-support/failed"
exit 0
fi
else
runHook exitHook
fi
exit $exitCode
}
trap "exitHandler" EXIT
######################################################################
# Helper functions that might be useful in setup hooks.
addToSearchPathWithCustomDelimiter() {
local delimiter=$1
local varName=$2
local dir=$3
if [ -d "$dir" ]; then
eval export ${varName}=${!varName}${!varName:+$delimiter}${dir}
fi
}
PATH_DELIMITER=':'
addToSearchPath() {
addToSearchPathWithCustomDelimiter "${PATH_DELIMITER}" "$@"
}
######################################################################
# Initialisation.
set -e
[ -z $NIX_GCC ] && NIX_GCC=@gcc@
# Wildcard expansions that don't match should expand to an empty list.
# This ensures that, for instance, "for i in *; do ...; done" does the
# right thing.
shopt -s nullglob
# Set up the initial path.
PATH=
for i in $NIX_GCC @initialPath@; do
if [ "$i" = / ]; then i=; fi
addToSearchPath PATH $i/bin
done
if [ "$NIX_DEBUG" = 1 ]; then
echo "initial path: $PATH"
fi
# Execute the pre-hook.
export SHELL=@shell@
if [ -z "$shell" ]; then export shell=@shell@; fi
runHook preHook
# Check that the pre-hook initialised SHELL.
if [ -z "$SHELL" ]; then echo "SHELL not set"; exit 1; fi
# Hack: run gcc's setup hook.
envHooks=()
crossEnvHooks=()
if [ -f $NIX_GCC/nix-support/setup-hook ]; then
source $NIX_GCC/nix-support/setup-hook
fi
# Ensure that the given directories exists.
ensureDir() {
local dir
for dir in "$@"; do
if ! [ -x "$dir" ]; then mkdir -p "$dir"; fi
done
}
installBin() {
mkdir -p $out/bin
cp "$@" $out/bin
}
# Allow the caller to augment buildInputs (it's not always possible to
# do this before the call to setup.sh, since the PATH is empty at that
# point; here we have a basic Unix environment).
runHook addInputsHook
# Recursively find all build inputs.
findInputs() {
local pkg=$1
local var=$2
local propagatedBuildInputsFile=$3
case ${!var} in
*\ $pkg\ *)
return 0
;;
esac
eval $var="'${!var} $pkg '"
if [ -f $pkg/nix-support/setup-hook ]; then
source $pkg/nix-support/setup-hook
fi
if [ -f $pkg/nix-support/$propagatedBuildInputsFile ]; then
for i in $(cat $pkg/nix-support/$propagatedBuildInputsFile); do
findInputs $i $var $propagatedBuildInputsFile
done
fi
}
crossPkgs=""
for i in $buildInputs $propagatedBuildInputs; do
findInputs $i crossPkgs propagated-build-inputs
done
nativePkgs=""
for i in $nativeBuildInputs $propagatedNativeBuildInputs; do
findInputs $i nativePkgs propagated-native-build-inputs
done
# Set the relevant environment variables to point to the build inputs
# found above.
addToNativeEnv() {
local pkg=$1
if [ -d $1/bin ]; then
addToSearchPath _PATH $1/bin
fi
# Run the package-specific hooks set by the setup-hook scripts.
for i in "${envHooks[@]}"; do
$i $pkg
done
}
for i in $nativePkgs; do
addToNativeEnv $i
done
addToCrossEnv() {
local pkg=$1
# Some programs put important build scripts (freetype-config and similar)
# into their crossDrv bin path. Intentionally these should go after
# the nativePkgs in PATH.
if [ -d $1/bin ]; then
addToSearchPath _PATH $1/bin
fi
# Run the package-specific hooks set by the setup-hook scripts.
for i in "${crossEnvHooks[@]}"; do
$i $pkg
done
}
for i in $crossPkgs; do
addToCrossEnv $i
done
# Add the output as an rpath.
if [ "$NIX_NO_SELF_RPATH" != 1 ]; then
export NIX_LDFLAGS="-rpath $out/lib $NIX_LDFLAGS"
if [ -n "$NIX_LIB64_IN_SELF_RPATH" ]; then
export NIX_LDFLAGS="-rpath $out/lib64 $NIX_LDFLAGS"
fi
if [ -n "$NIX_LIB32_IN_SELF_RPATH" ]; then
export NIX_LDFLAGS="-rpath $out/lib32 $NIX_LDFLAGS"
fi
fi
# Set the TZ (timezone) environment variable, otherwise commands like
# `date' will complain (e.g., `Tue Mar 9 10:01:47 Local time zone must
# be set--see zic manual page 2004').
export TZ=UTC
# Set the prefix. This is generally $out, but it can be overriden,
# for instance if we just want to perform a test build/install to a
# temporary location and write a build report to $out.
if [ -z "$prefix" ]; then
prefix="$out";
fi
if [ "$useTempPrefix" = 1 ]; then
prefix="$NIX_BUILD_TOP/tmp_prefix";
fi
PATH=$_PATH${_PATH:+:}$PATH
if [ "$NIX_DEBUG" = 1 ]; then
echo "final path: $PATH"
fi
# Make GNU Make produce nested output.
export NIX_INDENT_MAKE=1
# Normalize the NIX_BUILD_CORES variable. The value might be 0, which
# means that we're supposed to try and auto-detect the number of
# available CPU cores at run-time.
if [ -z "${NIX_BUILD_CORES:-}" ]; then
NIX_BUILD_CORES="1"
elif [ "$NIX_BUILD_CORES" -le 0 ]; then
NIX_BUILD_CORES=$(nproc 2>/dev/null || true)
if expr >/dev/null 2>&1 "$NIX_BUILD_CORES" : "^[0-9][0-9]*$"; then
:
else
NIX_BUILD_CORES="1"
fi
fi
export NIX_BUILD_CORES
######################################################################
# Misc. helper functions.
stripDirs() {
local dirs="$1"
local stripFlags="$2"
local dirsNew=
for d in ${dirs}; do
if [ -d "$prefix/$d" ]; then
dirsNew="${dirsNew} $prefix/$d "
fi
done
dirs=${dirsNew}
if [ -n "${dirs}" ]; then
header "stripping (with flags $stripFlags) in $dirs"
find $dirs -type f -print0 | xargs -0 ${xargsFlags:--r} strip $commonStripFlags $stripFlags || true
stopNest
fi
}
######################################################################
# Textual substitution functions.
substitute() {
local input="$1"
local output="$2"
local -a params=("$@")
local n p pattern replacement varName content
# a slightly hacky way to keep newline at the end
content="$(cat $input; echo -n X)"
content="${content%X}"
for ((n = 2; n < ${#params[*]}; n += 1)); do
p=${params[$n]}
if [ "$p" = --replace ]; then
pattern="${params[$((n + 1))]}"
replacement="${params[$((n + 2))]}"
n=$((n + 2))
fi
if [ "$p" = --subst-var ]; then
varName="${params[$((n + 1))]}"
pattern="@$varName@"
replacement="${!varName}"
n=$((n + 1))
fi
if [ "$p" = --subst-var-by ]; then
pattern="@${params[$((n + 1))]}@"
replacement="${params[$((n + 2))]}"
n=$((n + 2))
fi
content="${content//"$pattern"/$replacement}"
done
# !!! This doesn't work properly if $content is "-n".
echo -n "$content" > "$output".tmp
if [ -x "$output" ]; then chmod +x "$output".tmp; fi
mv -f "$output".tmp "$output"
}
substituteInPlace() {
local fileName="$1"
shift
substitute "$fileName" "$fileName" "$@"
}
substituteAll() {
local input="$1"
local output="$2"
# Select all environment variables that start with a lowercase character.
for envVar in $(env | sed "s/^[^a-z].*//" | sed "s/^\([^=]*\)=.*/\1/"); do
if [ "$NIX_DEBUG" = "1" ]; then
echo "$envVar -> ${!envVar}"
fi
args="$args --subst-var $envVar"
done
substitute "$input" "$output" $args
}
substituteAllInPlace() {
local fileName="$1"
shift
substituteAll "$fileName" "$fileName" "$@"
}
######################################################################
# What follows is the generic builder.
nestingLevel=0
startNest() {
nestingLevel=$(($nestingLevel + 1))
echo -en "\033[$1p"
}
stopNest() {
nestingLevel=$(($nestingLevel - 1))
echo -en "\033[q"
}
header() {
startNest "$2"
echo "$1"
}
# Make sure that even when we exit abnormally, the original nesting
# level is properly restored.
closeNest() {
while [ $nestingLevel -gt 0 ]; do
stopNest
done
}
# This function is useful for debugging broken Nix builds. It dumps
# all environment variables to a file `env-vars' in the build
# directory. If the build fails and the `-K' option is used, you can
# then go to the build directory and source in `env-vars' to reproduce
# the environment used for building.
dumpVars() {
if [ "$noDumpEnvVars" != 1 ]; then
export > "$NIX_BUILD_TOP/env-vars"
fi
}
# Utility function: return the base name of the given path, with the
# prefix `HASH-' removed, if present.
stripHash() {
strippedName=$(basename $1);
if echo "$strippedName" | grep -q '^[a-z0-9]\{32\}-'; then
strippedName=$(echo "$strippedName" | cut -c34-)
fi
}
unpackFile() {
curSrc="$1"
local cmd
header "unpacking source archive $curSrc" 3
case "$curSrc" in
*.tar.xz | *.tar.lzma)
# Don't rely on tar knowing about .xz.
xz -d < $curSrc | tar xf -
;;
*.tar | *.tar.* | *.tgz | *.tbz2)
# GNU tar can automatically select the decompression method
# (info "(tar) gzip").
tar xf $curSrc
;;
*.zip)
unzip -qq $curSrc
;;
*)
if [ -d "$curSrc" ]; then
stripHash $curSrc
cp -prd --no-preserve=timestamps $curSrc $strippedName
else
if [ -z "$unpackCmd" ]; then
echo "source archive $curSrc has unknown type"
exit 1
fi
runHook unpackCmd
fi
;;
esac
stopNest
}
unpackPhase() {
runHook preUnpack
if [ -z "$srcs" ]; then
if [ -z "$src" ]; then
echo 'variable $src or $srcs should point to the source'
exit 1
fi
srcs="$src"
fi
# To determine the source directory created by unpacking the
# source archives, we record the contents of the current
# directory, then look below which directory got added. Yeah,
# it's rather hacky.
local dirsBefore=""
for i in *; do
if [ -d "$i" ]; then
dirsBefore="$dirsBefore $i "
fi
done
# Unpack all source archives.
for i in $srcs; do
unpackFile $i
done
# Find the source directory.
if [ -n "$setSourceRoot" ]; then
runHook setSourceRoot
elif [ -z "$sourceRoot" ]; then
sourceRoot=
for i in *; do
if [ -d "$i" ]; then
case $dirsBefore in
*\ $i\ *)
;;
*)
if [ -n "$sourceRoot" ]; then
echo "unpacker produced multiple directories"
exit 1
fi
sourceRoot="$i"
;;
esac
fi
done
fi
if [ -z "$sourceRoot" ]; then
echo "unpacker appears to have produced no directories"
exit 1
fi
echo "source root is $sourceRoot"
# By default, add write permission to the sources. This is often
# necessary when sources have been copied from other store
# locations.
if [ "$dontMakeSourcesWritable" != 1 ]; then
chmod -R u+w "$sourceRoot"
fi
runHook postUnpack
}
patchPhase() {
runHook prePatch
for i in $patches; do
header "applying patch $i" 3
local uncompress=cat
case $i in
*.gz)
uncompress="gzip -d"
;;
*.bz2)
uncompress="bzip2 -d"
;;
*.xz)
uncompress="xz -d"
;;
*.lzma)
uncompress="lzma -d"
;;
esac
# "2>&1" is a hack to make patch fail if the decompressor fails (nonexistent patch, etc.)
$uncompress < $i 2>&1 | patch ${patchFlags:--p1}
stopNest
done
runHook postPatch
}
fixLibtool() {
sed -i -e 's^eval sys_lib_.*search_path=.*^^' "$1"
}
configurePhase() {
runHook preConfigure
if [ -z "$configureScript" ]; then
configureScript=./configure
if ! [ -x $configureScript ]; then
echo "no configure script, doing nothing"
return
fi
fi
if [ -z "$dontFixLibtool" ]; then
find . -iname "ltmain.sh" | while read i; do
echo "fixing libtool script $i"
fixLibtool $i
done
fi
if [ -z "$dontAddPrefix" ]; then
configureFlags="${prefixKey:---prefix=}$prefix $configureFlags"
fi
# Add --disable-dependency-tracking to speed up some builds.
if [ -z "$dontAddDisableDepTrack" ]; then
if grep -q dependency-tracking $configureScript; then
configureFlags="--disable-dependency-tracking $configureFlags"
fi
fi
# By default, disable static builds.
if [ -z "$dontDisableStatic" ]; then
if grep -q enable-static $configureScript; then
configureFlags="--disable-static $configureFlags"
fi
fi
echo "configure flags: $configureFlags ${configureFlagsArray[@]}"
$configureScript $configureFlags "${configureFlagsArray[@]}"
runHook postConfigure
}
buildPhase() {
runHook preBuild
if [ -z "$makeFlags" ] && ! [ -n "$makefile" -o -e "Makefile" -o -e "makefile" -o -e "GNUmakefile" ]; then
echo "no Makefile, doing nothing"
return
fi
# See https://github.com/NixOS/nixpkgs/pull/1354#issuecomment-31260409
makeFlags="SHELL=$SHELL $makeFlags"
echo "make flags: $makeFlags ${makeFlagsArray[@]} $buildFlags ${buildFlagsArray[@]}"
make ${makefile:+-f $makefile} \
${enableParallelBuilding:+-j${NIX_BUILD_CORES} -l${NIX_BUILD_CORES}} \
$makeFlags "${makeFlagsArray[@]}" \
$buildFlags "${buildFlagsArray[@]}"
runHook postBuild
}
checkPhase() {
runHook preCheck
echo "check flags: $makeFlags ${makeFlagsArray[@]} $checkFlags ${checkFlagsArray[@]}"
make ${makefile:+-f $makefile} \
${enableParallelBuilding:+-j${NIX_BUILD_CORES} -l${NIX_BUILD_CORES}} \
$makeFlags "${makeFlagsArray[@]}" \
${checkFlags:-VERBOSE=y} "${checkFlagsArray[@]}" ${checkTarget:-check}
runHook postCheck
}
patchELF() {
# Patch all ELF executables and shared libraries.
header "patching ELF executables and libraries"
if [ -e "$prefix" ]; then
find "$prefix" \( \
\( -type f -a -name "*.so*" \) -o \
\( -type f -a -perm +0100 \) \
\) -print -exec patchelf --shrink-rpath '{}' \;
fi
stopNest
}
patchShebangs() {
# Rewrite all script interpreter file names (`#! /path') under the
# specified directory tree to paths found in $PATH. E.g.,
# /bin/sh will be rewritten to /nix/store/<hash>-some-bash/bin/sh.
# /usr/bin/env gets special treatment so that ".../bin/env python" is
# rewritten to /nix/store/<hash>/bin/python.
# Interpreters that are already in the store are left untouched.
header "patching script interpreter paths"
local dir="$1"
local f
local oldPath
local newPath
local arg0
local args
local oldInterpreterLine
local newInterpreterLine
find "$dir" -type f -perm +0100 | while read f; do
if [ "$(head -1 "$f" | head -c +2)" != '#!' ]; then
# missing shebang => not a script
continue
fi
oldInterpreterLine=$(head -1 "$f" | tail -c +3)
read -r oldPath arg0 args <<< "$oldInterpreterLine"
if $(echo "$oldPath" | grep -q "/bin/env$"); then
# Check for unsupported 'env' functionality:
# - options: something starting with a '-'
# - environment variables: foo=bar
if $(echo "$arg0" | grep -q -- "^-.*\|.*=.*"); then
echo "unsupported interpreter directive \"$oldInterpreterLine\" (set dontPatchShebangs=1 and handle shebang patching yourself)"
exit 1
fi
newPath="$(command -v "$arg0" || true)"
else
if [ "$oldPath" = "" ]; then
# If no interpreter is specified linux will use /bin/sh. Set
# oldpath="/bin/sh" so that we get /nix/store/.../sh.
oldPath="/bin/sh"
fi
newPath="$(command -v "$(basename "$oldPath")" || true)"
args="$arg0 $args"
fi
newInterpreterLine="$newPath $args"
if [ -n "$oldPath" -a "${oldPath:0:${#NIX_STORE}}" != "$NIX_STORE" ]; then
if [ -n "$newPath" -a "$newPath" != "$oldPath" ]; then
echo "$f: interpreter directive changed from \"$oldInterpreterLine\" to \"$newInterpreterLine\""
# escape the escape chars so that sed doesn't interpret them
escapedInterpreterLine=$(echo "$newInterpreterLine" | sed 's|\\|\\\\|g')
sed -i -e "1 s|.*|#\!$escapedInterpreterLine|" "$f"
fi
fi
done
stopNest
}
installPhase() {
runHook preInstall
mkdir -p "$prefix"
installTargets=${installTargets:-install}
echo "install flags: $installTargets $makeFlags ${makeFlagsArray[@]} $installFlags ${installFlagsArray[@]}"
make ${makefile:+-f $makefile} $installTargets \
$makeFlags "${makeFlagsArray[@]}" \
$installFlags "${installFlagsArray[@]}"
runHook postInstall
}
# The fixup phase performs generic, package-independent, Nix-related
# stuff, like running patchelf and setting the
# propagated-build-inputs. It should rarely be overriden.
fixupPhase() {
runHook preFixup
# Make sure everything is writable so "strip" et al. work.
if [ -e "$prefix" ]; then chmod -R u+w "$prefix"; fi
# Put man/doc/info under $out/share.
forceShare=${forceShare:=man doc info}
if [ -n "$forceShare" ]; then
for d in $forceShare; do
if [ -d "$prefix/$d" ]; then
if [ -d "$prefix/share/$d" ]; then
echo "both $d/ and share/$d/ exists!"
else
echo "fixing location of $d/ subdirectory"
mkdir -p $prefix/share
if [ -w $prefix/share ]; then
mv -v $prefix/$d $prefix/share
ln -sv share/$d $prefix
fi
fi
fi
done;
fi
if [ -z "$dontGzipMan" ]; then
echo "gzipping man pages"
GLOBIGNORE=.:..:*.gz:*.bz2
for f in "$out"/share/man/*/* "$out"/share/man/*/*/*; do
if [ -f "$f" -a ! -L "$f" ]; then
if gzip -c -n "$f" > "$f".gz; then
rm "$f"
else
rm "$f".gz
fi
fi
done
for f in "$out"/share/man/*/* "$out"/share/man/*/*/*; do
if [ -L "$f" -a -f `readlink -f "$f"`.gz ]; then
ln -sf `readlink "$f"`.gz "$f".gz && rm "$f"
fi
done
unset GLOBIGNORE
fi
# TODO: strip _only_ ELF executables, and return || fail here...
if [ -z "$dontStrip" ]; then
stripDebugList=${stripDebugList:-lib lib32 lib64 libexec bin sbin}
if [ -n "$stripDebugList" ]; then
stripDirs "$stripDebugList" "${stripDebugFlags:--S}"
fi
stripAllList=${stripAllList:-}
if [ -n "$stripAllList" ]; then
stripDirs "$stripAllList" "${stripAllFlags:--s}"
fi
fi
if [ "$havePatchELF" = 1 -a -z "$dontPatchELF" ]; then
patchELF "$prefix"
fi
if [ -z "$dontPatchShebangs" ]; then
patchShebangs "$prefix"
fi
if [ -n "$propagatedBuildInputs" ]; then
mkdir -p "$out/nix-support"
echo "$propagatedBuildInputs" > "$out/nix-support/propagated-build-inputs"
fi
if [ -n "$propagatedNativeBuildInputs" ]; then
mkdir -p "$out/nix-support"
echo "$propagatedNativeBuildInputs" > "$out/nix-support/propagated-native-build-inputs"
fi
if [ -n "$propagatedUserEnvPkgs" ]; then
mkdir -p "$out/nix-support"
echo "$propagatedUserEnvPkgs" > "$out/nix-support/propagated-user-env-packages"
fi
if [ -n "$setupHook" ]; then
mkdir -p "$out/nix-support"
substituteAll "$setupHook" "$out/nix-support/setup-hook"
fi
runHook postFixup
}
installCheckPhase() {
runHook preInstallCheck
echo "installcheck flags: $makeFlags ${makeFlagsArray[@]} $installCheckFlags ${installCheckFlagsArray[@]}"
make ${makefile:+-f $makefile} \
${enableParallelBuilding:+-j${NIX_BUILD_CORES} -l${NIX_BUILD_CORES}} \
$makeFlags "${makeFlagsArray[@]}" \
$installCheckFlags "${installCheckFlagsArray[@]}" ${installCheckTarget:-installcheck}
runHook postInstallCheck
}
distPhase() {
runHook preDist
echo "dist flags: $distFlags ${distFlagsArray[@]}"
make ${makefile:+-f $makefile} $distFlags "${distFlagsArray[@]}" ${distTarget:-dist}
if [ "$dontCopyDist" != 1 ]; then
mkdir -p "$out/tarballs"
# Note: don't quote $tarballs, since we explicitly permit
# wildcards in there.
cp -pvd ${tarballs:-*.tar.gz} $out/tarballs
fi
runHook postDist
}
showPhaseHeader() {
local phase="$1"
case $phase in
unpackPhase) header "unpacking sources";;
patchPhase) header "patching sources";;
configurePhase) header "configuring";;
buildPhase) header "building";;
checkPhase) header "running tests";;
installPhase) header "installing";;
fixupPhase) header "post-installation fixup";;
installCheckPhase) header "running install tests";;
*) header "$phase";;
esac
}
genericBuild() {
header "building $out"
if [ -n "$buildCommand" ]; then
eval "$buildCommand"
return
fi
if [ -z "$phases" ]; then
phases="$prePhases unpackPhase patchPhase $preConfigurePhases \
configurePhase $preBuildPhases buildPhase checkPhase \
$preInstallPhases installPhase $preFixupPhases fixupPhase installCheckPhase \
$preDistPhases distPhase $postPhases";
fi
for curPhase in $phases; do
if [ "$curPhase" = buildPhase -a -n "$dontBuild" ]; then continue; fi
if [ "$curPhase" = checkPhase -a -z "$doCheck" ]; then continue; fi
if [ "$curPhase" = installPhase -a -n "$dontInstall" ]; then continue; fi
if [ "$curPhase" = fixupPhase -a -n "$dontFixup" ]; then continue; fi
if [ "$curPhase" = installCheckPhase -a -z "$doInstallCheck" ]; then continue; fi
if [ "$curPhase" = distPhase -a -z "$doDist" ]; then continue; fi
if [ -n "$tracePhases" ]; then
echo
echo "@ phase-started $out $curPhase"
fi
showPhaseHeader "$curPhase"
dumpVars
# Evaluate the variable named $curPhase if it exists, otherwise the
# function named $curPhase.
eval "${!curPhase:-$curPhase}"
if [ "$curPhase" = unpackPhase ]; then
cd "${sourceRoot:-.}"
fi
if [ -n "$tracePhases" ]; then
echo
echo "@ phase-succeeded $out $curPhase"
fi
stopNest
done
stopNest
}
# Execute the post-hooks.
for i in "${postHooks[@]}"; do $i; done
runHook postHook
# Execute the global user hook (defined through the Nixpkgs
# configuration option ‘stdenv.userHook’). This can be used to set
# global compiler optimisation flags, for instance.
runHook userHook
dumpVars
|
#! /usr/bin/env bash
THEME='vampyr-grub'
LANG='English'
# Change to temporary directory
cd $(mktemp -d)
# Pre-authorise sudo
sudo echo
# Select language, optional
declare -A LANGS=(
[Chinese]=zh_CN
[English]=EN
[French]=FR
[German]=DE
[Italian]=IT
[Norwegian]=NO
[Portuguese]=PT
[Russian]=RU
[Spanish]=ES
[Ukrainian]=UA
)
LANG_NAMES=($(echo ${!LANGS[*]} | tr ' ' '\n' | sort -n))
PS3='Please select language #: '
select l in "${LANG_NAMES[@]}"
do
if [[ -v LANGS[$l] ]]; then
LANG=$l
break
else
echo 'No such language, try again'
fi
done < /dev/tty
# Detect distro and set GRUB location and update method
GRUB_DIR='grub'
UPDATE_GRUB=''
BOOT_MODE='legacy'
if [[ -d /boot/efi && -d /sys/firmware/efi ]]; then
BOOT_MODE='UEFI'
fi
echo "Boot mode: ${BOOT_MODE}"
if [[ -e /etc/os-release ]]; then
source /etc/os-release
if [[ "$ID" =~ (debian|ubuntu|solus|void) || \
"$ID_LIKE" =~ (debian|ubuntu) ]]; then
UPDATE_GRUB='update-grub'
elif [[ "$ID" =~ (arch|gentoo) || \
"$ID_LIKE" =~ (archlinux|gentoo) ]]; then
UPDATE_GRUB='grub-mkconfig -o /boot/grub/grub.cfg'
elif [[ "$ID" =~ (centos|fedora|opensuse) || \
"$ID_LIKE" =~ (fedora|rhel|suse) ]]; then
GRUB_DIR='grub2'
GRUB_CFG='/boot/grub2/grub.cfg'
if [[ "$BOOT_MODE" = "UEFI" ]]; then
GRUB_CFG="/boot/efi/EFI/${ID}/grub.cfg"
fi
UPDATE_GRUB="grub2-mkconfig -o ${GRUB_CFG}"
# BLS etries have 'kernel' class, copy corresponding icon
if [[ -d /boot/loader/entries && -e ${THEME}-master/icons/${ID}.png ]]; then
cp ${THEME}-master/icons/${ID}.png ${THEME}-master/icons/kernel.png
fi
fi
fi
echo 'Fetching theme archive'
wget -O ${THEME}.zip https://github.com/BUFU1610/${THEME}/archive/master.zip
echo 'Unpacking theme'
unzip ${THEME}.zip
if [[ "$LANG" != "English" ]]; then
echo "Changing language to ${LANG}"
sed -i -r -e '/^\s+# EN$/{n;s/^(\s*)/\1# /}' \
-e '/^\s+# '"${LANGS[$LANG]}"'$/{n;s/^(\s*)#\s*/\1/}' ${THEME}-master/theme.txt
fi
echo 'Creating GRUB themes directory'
sudo mkdir -p /boot/${GRUB_DIR}/themes/${THEME}
echo 'Copying theme to GRUB themes directory'
sudo cp -r ${THEME}-master/* /boot/${GRUB_DIR}/themes/${THEME}
echo 'Removing other themes from GRUB config'
sudo sed -i '/^GRUB_THEME=/d' /etc/default/grub
echo 'Making sure GRUB uses graphical output'
sudo sed -i 's/^\(GRUB_TERMINAL\w*=.*\)/#\1/' /etc/default/grub
echo 'Removing empty lines at the end of GRUB config' # optional
sudo sed -i -e :a -e '/^\n*$/{$d;N;};/\n$/ba' /etc/default/grub
echo 'Adding new line to GRUB config just in case' # optional
echo | sudo tee -a /etc/default/grub
echo 'Adding theme to GRUB config'
echo "GRUB_THEME=/boot/${GRUB_DIR}/themes/${THEME}/theme.txt" | sudo tee -a /etc/default/grub
echo 'Removing theme installation files'
rm -rf ${THEME}.zip ${THEME}-master
echo 'Updating GRUB'
if [[ $UPDATE_GRUB ]]; then
eval sudo "$UPDATE_GRUB"
else
cat << ' EOF'
--------------------------------------------------------------------------------
Cannot detect your distro, you will need to run `grub-mkconfig` (as root) manually.
Common ways:
- Debian, Ubuntu, Solus and derivatives: `update-grub` or `grub-mkconfig -o /boot/grub/grub.cfg`
- RHEL, CentOS, Fedora, SUSE and derivatives: `grub2-mkconfig -o /boot/grub2/grub.cfg`
- Arch, Gentoo and derivatives: `grub-mkconfig -o /boot/grub/grub.cfg`
--------------------------------------------------------------------------------
EOF
fi
|
<reponame>payhawk/travelperk-integration
// tslint:disable-next-line: no-namespace
export namespace SCHEMA {
export const NAME = 'travelperk_integration';
export enum TABLE_NAMES {
ACCESS_TOKENS = 'access_tokens',
PAYHAWK_API_KEYS = 'payhawk_api_keys',
INVOICES_SYNC_HISTORY = 'invoices_sync_history',
}
}
|
#!/bin/bash
sed -i 's/#define DEBUG_CATO_PASS 1/#define DEBUG_CATO_PASS 0/g' ${CATO_ROOT}/src/cato/debug.h
${CATO_ROOT}/src/scripts/build_pass.sh
cd ${CATO_ROOT}/src/test-suite
llvm-lit -v .
sed -i 's/#define DEBUG_CATO_PASS 0/#define DEBUG_CATO_PASS 1/g' ${CATO_ROOT}/src/cato/debug.h
|
package common
import (
"encoding/hex"
"fmt"
"github.com/spacemeshos/ed25519"
gosmtypes "github.com/spacemeshos/go-spacemesh/common/types"
)
type LocalAccount struct {
Name string
PrivKey ed25519.PrivateKey // the pub & private key
PubKey ed25519.PublicKey // only the pub key part
}
func (a *LocalAccount) Address() gosmtypes.Address {
return gosmtypes.BytesToAddress(a.PubKey[:])
}
type AccountState struct {
Nonce uint64
Balance uint64
ProjectedNonce uint64
ProjectedBalance uint64
}
func (s Store) GetAccount(name string) (*LocalAccount, error) {
if acc, ok := s[name]; ok {
priv, err := hex.DecodeString(acc.PrivKey)
if err != nil {
return nil, err
}
pub, err := hex.DecodeString(acc.PubKey)
if err != nil {
return nil, err
}
return &LocalAccount{name, priv, pub}, nil
}
return nil, fmt.Errorf("account not found")
}
func (s Store) ListAccounts() []string {
lst := make([]string, 0, len(s))
for key := range s {
lst = append(lst, key)
}
return lst
}
|
/*
Run.
Along with C and Windows libraries
Remarks:
Refer at fn. cli_io_beta, fn. cli_in, fn. cli_ctrl_at_beta and cli_support_meta_keys.
*/
# define CBR
# define CLI_W32
# include <conio.h>
# include <stdio.h>
# include <stdlib.h>
# include "../../../incl/config.h"
signed(__cdecl cli_run_meta_beta(signed short(meta),CLI_W32_STAT(*argp))) {
/* **** DATA, BSS and STACK */
auto signed char *p;
auto signed i,r;
auto signed short flag;
/* **** CODE/TEXT */
if(!argp) return(0x00);
return(0x01);
}
|
<filename>spec/cf/cli/app/help_spec.rb
require "spec_helper"
module CF
module App
describe "Help" do
let(:global) { {} }
let(:given) { {} }
subject do
capture_output { Mothership.new.invoke(:help, :command => "app") }
end
it "describes the command" do
subject
stdout.rewind
expect(stdout.readlines.first).to match /Show app information/
end
it "prints the options" do
subject
stdout.rewind
expect(stdout.readlines.any? {|line| line =~ /Options:/ }).to be_true
end
context "when the user is not logged in" do
before do
capture_output { Mothership.new.invoke(:logout) }
end
it "does not require login" do
subject
stdout.rewind
expect(stdout.readlines.first).to_not match /Please log in first to proceed/
end
end
end
end
end
|
<reponame>OSADP/C2C-RI<filename>C2CRIBuildDir/projects/C2C-RI/src/jameleon-test-suite-3_3-RC1-C2CRI/jameleon-core/tst/java/net/sf/jameleon/function/AttributeBrokerTest.java
package net.sf.jameleon.function;
import net.sf.jameleon.bean.Attribute;
import net.sf.jameleon.util.JameleonUtility;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.apache.commons.jelly.JellyContext;
import org.apache.commons.jelly.MissingAttributeException;
import java.io.File;
import java.lang.reflect.Field;
import java.util.LinkedList;
import java.util.List;
public class AttributeBrokerTest extends TestCase {
protected JellyContext context;
protected AttributeBroker broker;
protected AttributeConsumer consumer;
protected static String[] types = {
"primLongAttr","primShortAttr","primCharAttr",
"primBooleanAttr","primByteAttr","primDoubleAttr",
"primFloatAttr"};
class AttributeConsumer implements Attributable {
protected String stringAttr;
protected Boolean booleanAttr;
protected List listAttr;
protected File fileAttr;
protected Double doubleAttr;
protected final String cantSetMeImFinal = new String("Hi");
public int numDescribeAttributesCalls;
protected boolean primBooleanAttr;
protected int primIntAttr;
protected long primLongAttr;
protected double primDoubleAttr;
protected float primFloatAttr;
protected short primShortAttr;
protected byte primByteAttr;
protected char primCharAttr;
public void describeAttributes(AttributeBroker broker) {
numDescribeAttributesCalls++;
}
}
public static void main(String args[]) {
junit.textui.TestRunner.run(suite());
}
public static Test suite() {
return new TestSuite(AttributeBrokerTest.class);
}
public AttributeBrokerTest(String name) {
super(name);
}
public void setUp() {
context = new JellyContext();
consumer = new AttributeConsumer();
broker = new AttributeBroker(consumer);
}
public void testGetAttributeValueFromInstance(){
//Test Object values
consumer.stringAttr = "Another Value";
Attribute attr = createAttribute("stringAttr", "" , false, "Some Value");
Object actual = broker.getAttributeValueFromInstance(attr);
assertNotNull("stringAttr's value should not be null", actual);
assertEquals("stringAttr", "Another Value", (String)actual);
//Test primitive non-zero value
consumer.primIntAttr = 2;
attr = createAttribute("primIntAttr", "" , false, null);
actual = broker.getAttributeValueFromInstance(attr);
assertNotNull("primIntAttr's value should not be null", actual);
assertEquals("primIntAttr", 2, ((Integer)actual).intValue());
//Test primitive (double) non-zero value
consumer.primFloatAttr = 2.0f;
attr = createAttribute("primFloatAttr", "" , false, null);
actual = broker.getAttributeValueFromInstance(attr);
assertNotNull("primFloatAttr's value should not be null", actual);
assertEquals("primFloatAttr", Float.valueOf(2.0f), ((Float)actual));
//Test primitive very large value
consumer.primIntAttr = Integer.MAX_VALUE;
attr = createAttribute("primIntAttr", "" , false, null);
actual = broker.getAttributeValueFromInstance(attr);
assertNotNull("primIntAttr's value should not be null", actual);
assertEquals("primIntAttr", Integer.MAX_VALUE, ((Integer)actual).intValue());
//Test primitive int zero value
consumer.primIntAttr = 0;
attr = createAttribute("primIntAttr", "" , false, null);
actual = broker.getAttributeValueFromInstance(attr);
assertNull("primIntAttr's value should not be null", actual);
//Test primitive byte zero value
consumer.primByteAttr = 0;
attr = createAttribute("primByteAttr", "" , false, null);
actual = broker.getAttributeValueFromInstance(attr);
assertNull("primByteAttr's value should not be null", actual);
//Test primitive long zero value
consumer.primLongAttr = 0;
attr = createAttribute("primLongAttr", "" , false, null);
actual = broker.getAttributeValueFromInstance(attr);
assertNull("primLongAttr's value should not be null", actual);
//Test primitive char zero value
consumer.primCharAttr = 0;
attr = createAttribute("primCharAttr", "" , false, null);
actual = broker.getAttributeValueFromInstance(attr);
assertNull("primCharAttr's value should not be null", actual);
//Test primitive boolean zero value
consumer.primBooleanAttr = false;
attr = createAttribute("primBooleanAttr", "" , false, null);
actual = broker.getAttributeValueFromInstance(attr);
assertNull("primBooleanAttr's value should not be null", actual);
}
public void testSetUpCallsDescribeAttributes() {
consumer.numDescribeAttributesCalls = 0;
broker.setUp();
assertEquals(1, consumer.numDescribeAttributesCalls);
}
public void testRegisterAttribute() {
broker.registerAttribute(createAttribute("stringAttr", "testStringAttribute"));
assertEquals("Number of attributes", 1, broker.attributes.size());
}
public void testTransferStringAttribute() {
broker.registerAttribute(createAttribute("stringAttr", "testStringAttribute"));
broker.transferAttributes(context);
assertNull(consumer.stringAttr);
context.setVariable("testStringAttribute", "testString");
broker.transferAttributes(context);
assertEquals("testString", consumer.stringAttr);
}
public void testTransferBooleanAttribute() {
broker.registerAttribute(createAttribute("booleanAttr", "functionTagTestBooleanAttribute"));
context.setVariable("functionTagTestBooleanAttribute", "true");
broker.transferAttributes(context);
assertEquals(Boolean.TRUE, consumer.booleanAttr);
}
public void testTransferListAttribute() {
broker.registerAttribute(createAttribute("listAttr", "functionTagTestListAttribute"));
LinkedList list = new LinkedList();
list.add("Hello");
list.add("World");
context.setVariable("functionTagTestListAttribute", list);
broker.transferAttributes(context);
assertEquals(2, consumer.listAttr.size());
assertEquals("Hello", consumer.listAttr.get(0));
assertEquals("World", consumer.listAttr.get(1));
}
public void testTransferPrimitiveIntAttributeNull() {
broker.registerAttribute(createAttribute("primIntAttr", "functionTagTestIntAttribute"));
context.setVariable("functionTagTestNoAttribute", "1");
broker.transferAttributes(context);
assertEquals(0, consumer.primIntAttr);
}
public void testTransferPrimitiveIntAttribute() {
broker.registerAttribute(createAttribute("primIntAttr", "functionTagTestIntAttribute"));
context.setVariable("functionTagTestIntAttribute", "1");
broker.transferAttributes(context);
assertEquals(1, consumer.primIntAttr);
}
public void testTransferPrimitiveBooleanAttributeNull() {
broker.registerAttribute(createAttribute("primBooleanAttr", "functionTagTestBoolAttribute"));
context.setVariable("functionTagTestNoAttribute", "true");
broker.transferAttributes(context);
assertEquals(false, consumer.primBooleanAttr);
}
public void testTransferPrimitiveBooleanAttribute() {
broker.registerAttribute(createAttribute("primBooleanAttr", "functionTagTestBoolAttribute"));
context.setVariable("functionTagTestBoolAttribute", "true");
broker.transferAttributes(context);
assertEquals(true, consumer.primBooleanAttr);
}
public void testTransferPrimitiveByteAttributeNull() {
broker.registerAttribute(createAttribute("primByteAttr", "functionTagTestByteAttribute"));
context.setVariable("functionTagTestNoAttribute", "1");
broker.transferAttributes(context);
assertEquals(0, consumer.primByteAttr);
}
public void testTransferPrimitiveByteAttribute() {
broker.registerAttribute(createAttribute("primByteAttr", "functionTagTestByteAttribute"));
context.setVariable("functionTagTestByteAttribute", "1");
broker.transferAttributes(context);
assertEquals(1, consumer.primByteAttr);
}
public void testTransferPrimitiveCharAttributeNull() {
broker.registerAttribute(createAttribute("primCharAttr", "functionTagTestCharAttribute"));
context.setVariable("functionTagTestNoAttribute", "a");
broker.transferAttributes(context);
assertEquals(0, consumer.primCharAttr);
}
public void testTransferPrimitiveCharAttribute() {
broker.registerAttribute(createAttribute("primCharAttr", "functionTagTestCharAttribute"));
context.setVariable("functionTagTestCharAttribute", "a");
broker.transferAttributes(context);
assertEquals('a', consumer.primCharAttr);
}
public void testTransferFile() {
broker.registerAttribute(createAttribute("fileAttr", "fileAttribute"));
context.setVariable("fileAttribute", "file.txt");
broker.transferAttributes(context);
assertEquals("file.txt", consumer.fileAttr.getPath());
broker.attributes.clear();
broker.registerAttribute(createAttribute("fileAttr", "fileAttribute"));
context.setVariable("fileAttribute", new File("file.txt"));
broker.transferAttributes(context);
assertEquals("file.txt", consumer.fileAttr.getPath());
}
public void testTransferDouble() {
broker.registerAttribute(createAttribute("doubleAttr", "doubleAttribute"));
context.setVariable("doubleAttribute", "20.33");
broker.transferAttributes(context);
assertEquals("20.33", ""+consumer.doubleAttr);
broker.attributes.clear();
broker.registerAttribute(createAttribute("doubleAttr", "fileAttribute"));
context.setVariable("fileAttribute", new Double("21.33"));
broker.transferAttributes(context);
assertEquals("21.33", ""+consumer.doubleAttr);
}
public void testTransferNull() {
consumer.stringAttr = "Hello World";
broker.registerAttribute(createAttribute("stringAttr", "functionTagTestStringAttribute"));
broker.transferAttributes(context);
assertEquals("Hello World",consumer.stringAttr);
consumer.primIntAttr = 10;
broker.registerAttribute(createAttribute("primIntAttr", "functionTagTestStringAttribute"));
broker.transferAttributes(context);
assertEquals(10,consumer.primIntAttr);
}
public void testValidateOptionalAttribute() {
broker.registerAttribute(createAttribute("stringAttr", "functionTagTestStringAttribute", AttributeBroker.OPTIONAL));
MissingAttributeException exception = null;
try {
broker.transferAttributes(context);
broker.validate(context);
} catch (MissingAttributeException e) {
exception = e;
}
assertNull("The attribute is not required, so the exception should not have been thrown.", exception);
}
public void testValidateRequiredContextAttribute() {
MissingAttributeException exception = null;
broker.registerAttribute(createAttribute("stringAttr", "functionTagTestStringAttribute", AttributeBroker.REQUIRED));
try {
broker.transferAttributes(context);
broker.validate(context);
} catch (MissingAttributeException e) {
exception = e;
}
assertNotNull("Required attribute is missing, so an exception should have been thrown.", exception);
assertTrue("Exception should say something about attribute functionTagTestStringAttribute",
exception.getMessage().indexOf("functionTagTestStringAttribute") > -1);
}
public void testValidateRequiredInstanceAttribute() {
MissingAttributeException exception = null;
broker.registerAttribute(createAttribute("stringAttr", "", AttributeBroker.REQUIRED));
try {
broker.transferAttributes(context);
broker.validate(context);
} catch (MissingAttributeException e) {
exception = e;
}
assertNotNull("Required attribute is missing, so an exception should have been thrown.", exception);
assertTrue("Exception should say something about attribute stringAttr",
exception.getMessage().indexOf("stringAttr") > -1);
}
public void testNoSuchAttribute() {
RuntimeException exception = null;
broker.registerAttribute(createAttribute("noSuchAttr", "functionTagTestNonexistentAttribute"));
try {
broker.transferAttributes(context);
} catch (RuntimeException e) {
exception = e;
}
assertNotNull("Exception should have been thrown", exception);
assertTrue("Exception should say something about attribute does not exist",
exception.getMessage().indexOf("Instance variable noSuchAttr does not exist") > -1);
}
public void testSetToInstanceVariableOnlyWithDefault() {
Attribute attr = createAttribute("stringAttr", "" , true, "Some Value");
attr.setInstanceVariable(true);
broker.registerAttribute(attr);
broker.transferAttributes(context);
assertEquals("Consumer stringAttr", "Some Value", consumer.stringAttr);
try {
broker.validate(context);
} catch (MissingAttributeException mae) {
fail("No exception should have been thrown because a default value is specified");
}
broker.setConsumerAttribute(attr, "another value");
assertEquals("Consumer stringAttr before transferAttributes", "another value", consumer.stringAttr);
broker.transferAttributes(context);
// assertEquals("Consumer stringAttr", "another value", consumer.stringAttr);
}
public void testNotSettableAttribute() {
String javaVersion = System.getProperty("java.version");
if (javaVersion != null && !javaVersion.startsWith("1.5.") && !javaVersion.startsWith("1.6.")) {
RuntimeException exception = null;
broker.registerAttribute(createAttribute("cantSetMeImFinal", "functionTagTestNonexistentAttribute"));
context.setVariable("functionTagTestNonexistentAttribute","some value");
try {
broker.transferAttributes(context);
} catch (RuntimeException e) {
exception = e;
}
assertNotNull("Exception should have been thrown", exception);
assertTrue("Exception should say something about attribute is not settable",
exception.getMessage().indexOf("Instance variable cantSetMeImFinal is not settable") > -1);
}else{
System.out.println("Skipping testNotSettableAttribute due to bug in JDK 1.5 and 1.6");
}
}
public void testGetValueFromContext() {
assertEquals("default", (String)broker.getValueFromContext(context, "key", "default"));
context.setVariable("key", "value");
assertEquals("value", (String)broker.getValueFromContext(context, "key", "default"));
assertEquals("null contextName & emptyDefault", "", broker.getValueFromContext(context,null,""));
assertNull("null contextName", broker.getValueFromContext(context,null,null));
}
public void testDefaultObject() {
Attribute attr = createAttribute("stringAttr", "functionTagTestStringAttribute", true, "default string value");
broker.registerAttribute(attr);
assertFalse("Value should not be set", attr.isValueSet());
broker.transferAttributes(context);
attr.setValue(null);
assertEquals("default string value",consumer.stringAttr);
context.setVariable("functionTagTestStringAttribute", "this is not the default value");
broker.transferAttributes(context);
assertEquals("this is not the default value",consumer.stringAttr);
}
public void testDefaultPrimitiveNotRequired() {
Attribute attr = createAttribute("primIntAttr", "functionTagTestPrimIntAttribute", false, "0");
broker.registerAttribute(attr);
broker.transferAttributes(context);
assertEquals(0,consumer.primIntAttr);
attr.setValue(null);
context.setVariable("functionTagTestPrimIntAttribute", "2");
broker.transferAttributes(context);
assertEquals(2,consumer.primIntAttr);
}
public void testDefaultPrimitiveRequired() {
Attribute attr = createAttribute("primIntAttr", "functionTagTestPrimIntAttribute", true, "0");
attr.setContextName(null);
broker.registerAttribute(attr);
broker.transferAttributes(context);
try {
broker.validate(context);
} catch (MissingAttributeException mae) {
fail("No exception should have been thrown because a default value is specified");
}
assertEquals(0,consumer.primIntAttr);
attr.setValue(null);
attr.setContextName("functionTagTestPrimIntAttribute");
context.setVariable("functionTagTestPrimIntAttribute", "2");
broker.transferAttributes(context);
assertEquals(2,consumer.primIntAttr);
}
public void testDefaultPrimitiveRequiredNoDefault() {
Attribute attr = createAttribute("primIntAttr", "functionTagTestPrimIntAttribute", true);
context.setVariable("functionTagTestPrimIntAttribute", "0");
broker.registerAttribute(attr);
broker.transferAttributes(context);
try {
broker.validate(context);
} catch (MissingAttributeException mae) {
fail("No exception should have been thrown because a value was set");
}
assertEquals(0,consumer.primIntAttr);
}
public void testDefaultPrimitiveRequiredSetDirectly() {
Attribute attr = createAttribute("primIntAttr", "", true);
broker.registerAttribute(attr);
consumer.primIntAttr = 2;
broker.transferAttributes(context);
try {
broker.validate(context);
} catch (MissingAttributeException mae) {
fail("No exception should have been thrown because a value was set");
}
}
public void testTransferAttributesContextOnly(){
context.setVariable("stringAttr","Some value");
Attribute attr = createAttribute("stringAttr", "", true);
attr.setInstanceVariable(false);
broker.registerAttribute(attr);
broker.transferAttributes(context);
try {
broker.validate(context);
} catch (MissingAttributeException mae) {
fail("The property was set, but is reported as not being set.");
}
}
public void testGetAttributeValue(){
Attribute attr = createAttribute("stringAttr", "",false);
assertNull("stringAttr should be null", broker.getAttributeValue(attr, context));
attr.setDefaultValue("Some value");
assertNotNull("stringAttr should not be null", broker.getAttributeValue(attr,context));
assertEquals("stringAttr", "Some value", (String)broker.getAttributeValue(attr,context));
doAttrTest("primIntAttr");
attr.setDefaultValue("4");
assertNotNull("primIntAttr should not be null", broker.getAttributeValue(attr,context));
for (int i = 0; i < types.length; i++) {
doAttrTest(types[i]);
}
}
public void testGetConsumerField()throws Exception{
Attribute attr = createAttribute("primIntAttr", "7");
Field f = broker.getConsumerField(attr);
consumer.primIntAttr = 10;
assertNotNull("primIntAttr", f);
assertEquals(f.getType().toString(), "int");
assertEquals(10, f.getInt(consumer));
attr = createAttribute("stringAttr", "value");
f = broker.getConsumerField(attr);
consumer.stringAttr = "value";
assertNotNull("stringAttr", f);
assertEquals(f.getType().toString(), "class java.lang.String");
assertEquals("value", f.get(consumer));
attr = createAttribute("nonExistentVariable", "value");
f = broker.getConsumerField(attr);
assertNull("stringAttr", f);
}
public void testSetConsumerAttributeAsPrimitive(){
Attribute attr = createAttribute("primBooleanAttr", "true");
Field f = broker.getConsumerField(attr);
broker.setConsumerAttributeAsPrimitive(f, "true");
assertTrue("primBooleanAttr as 'true'", consumer.primBooleanAttr);
attr = createAttribute("primBooleanAttr", "false");
f = broker.getConsumerField(attr);
broker.setConsumerAttributeAsPrimitive(f, "false");
assertFalse("primBooleanAttr as 'false'", consumer.primBooleanAttr);
attr = createAttribute("primByteAttr", "2");
f = broker.getConsumerField(attr);
broker.setConsumerAttributeAsPrimitive(f, "2");
assertEquals("primByteAttr as '2'", 2, consumer.primByteAttr);
attr = createAttribute("primCharAttr", "a");
f = broker.getConsumerField(attr);
broker.setConsumerAttributeAsPrimitive(f, "a");
assertEquals("primCharAttr as 'a'", 'a', consumer.primCharAttr);
broker.setConsumerAttributeAsPrimitive(f, null);
assertEquals("primCharAttr as 'null'", 0, consumer.primCharAttr);
attr = createAttribute("primDoubleAttr", "7.7");
f = broker.getConsumerField(attr);
broker.setConsumerAttributeAsPrimitive(f, "7.7");
assertEquals("primDoubleAttr as '7.7'", 7.7, consumer.primDoubleAttr, 0.0);
attr = createAttribute("primFloatAttr", "7.7");
f = broker.getConsumerField(attr);
broker.setConsumerAttributeAsPrimitive(f, "7.7");
assertEquals("primFloatAttr as '7.7'", 7.7f, consumer.primFloatAttr, 0.0f);
attr = createAttribute("primIntAttr", "7");
f = broker.getConsumerField(attr);
broker.setConsumerAttributeAsPrimitive(f, "7");
assertEquals("primIntAttr as '7'", 7, consumer.primIntAttr);
attr = createAttribute("primLongAttr", "7");
f = broker.getConsumerField(attr);
broker.setConsumerAttributeAsPrimitive(f, "7");
assertEquals("primLongAttr as '7'", 7, consumer.primLongAttr);
attr = createAttribute("primShortAttr", "7");
f = broker.getConsumerField(attr);
broker.setConsumerAttributeAsPrimitive(f, "7");
assertEquals("primShortAttr as '7'", 7, consumer.primShortAttr);
}
public void testSetConsumerAttributeAsObject(){
String value = "value";
Attribute attr = createAttribute("stringAttr", value);
Field f = broker.getConsumerField(attr);
broker.setConsumerAttributeAsObject(f, value);
assertEquals(value, consumer.stringAttr);
value = null;
attr = createAttribute("listAttr", null);
f = broker.getConsumerField(attr);
broker.setConsumerAttributeAsObject(f, value);
assertNull(consumer.listAttr);
value = "value";
attr = createAttribute("listAttr", value);
f = broker.getConsumerField(attr);
broker.setConsumerAttributeAsObject(f, value);
assertEquals(value, (String)consumer.listAttr.get(0));
value = JameleonUtility.fixFileSeparators("some/path");
attr = createAttribute("fileAttr", value);
f = broker.getConsumerField(attr);
broker.setConsumerAttributeAsObject(f, value);
assertEquals(value, (String)consumer.fileAttr.getPath());
value = null;
attr = createAttribute("fileAttr", value);
f = broker.getConsumerField(attr);
broker.setConsumerAttributeAsObject(f, value);
assertNull(consumer.fileAttr);
value = "true";
attr = createAttribute("booleanAttr", value);
f = broker.getConsumerField(attr);
broker.setConsumerAttributeAsObject(f, value);
assertEquals(value, consumer.booleanAttr.toString());
value = null;
attr = createAttribute("booleanAttr", value);
f = broker.getConsumerField(attr);
broker.setConsumerAttributeAsObject(f, value);
assertNull(consumer.booleanAttr);
value = "7.7";
attr = createAttribute("doubleAttr", value);
f = broker.getConsumerField(attr);
broker.setConsumerAttributeAsObject(f, value);
assertEquals(value, consumer.doubleAttr.toString());
value = null;
attr = createAttribute("doubleAttr", value);
f = broker.getConsumerField(attr);
broker.setConsumerAttributeAsObject(f, value);
assertNull(consumer.doubleAttr);
}
protected void doAttrTest(String name){
Attribute attr = createAttribute(name, "",false);
assertNull(name+" should be null ", broker.getAttributeValue(attr, context));
}
//Helper methods
protected Attribute createAttribute(String name, String contextName) {
Attribute attr = new Attribute();
attr.setName(name); //change to ContextName?
attr.setContextName(contextName);
attr.setInstanceVariable(true);
return attr;
}
protected Attribute createAttribute(String name, String contextName, boolean required) {
Attribute attr = createAttribute(name, contextName);
attr.setRequired(required);
return attr;
}
protected Attribute createAttribute(String name, String contextName, boolean required, String defaultValue) {
Attribute attr = createAttribute(name, contextName, required);
attr.setDefaultValue(defaultValue);
return attr;
}
}
|
import subprocess
# Step 1: Subdomain enumeration using subfinder
def subdomain_enumeration(domain_file):
subprocess.run(['subfinder', '-silent', '-d', domain_file, '-o', 'domains.txt'])
# Step 2: HTTP/HTTPS probing using httpx
def http_probing(domain_file):
subprocess.run(['cat', domain_file, '|', 'httpx', '-silent', '|', 'tee', '-a', 'hosts.txt'], shell=True)
# Step 3: Screenshot capturing using aquatone
def capture_screenshots(domain_file):
subprocess.run(['cat', domain_file, '|', 'aquatone', '-out', 'screens', '-scan-timeout', '200', '-screenshot-timeout', '60000', '-ports', 'xlarge'], shell=True)
# Step 4: Port scanning using naabu
def port_scanning(domain_file):
subprocess.run(['naabu', '-silent', '-iL', domain_file, '>', 'portscan.txt'], shell=True)
# Step 5: Subdomain takeover check using subjack
def subdomain_takeover_check(domain_file):
subprocess.run(['subjack', '-w', domain_file, '-t', '100', '-timeout', '20', '-o', 'subjack_out.txt', '--ssl', '-c', '~/fingerprints.json'])
# Step 6: Vulnerability scanning using nuclei
def vulnerability_scanning(hosts_file):
subprocess.run(['nuclei', '-l', hosts_file, '-t', 'cves/', '|', 'tee', '-a', 'vuln.txt'], shell=True)
# Main function to orchestrate the reconnaissance tasks
def main():
domain_file = 'domains'
subdomain_enumeration(domain_file)
http_probing('domains.txt')
capture_screenshots('domains.txt')
port_scanning('domains.txt')
subdomain_takeover_check('domains.txt')
vulnerability_scanning('hosts.txt')
if __name__ == "__main__":
main()
|
import "../styles/globals.css";
import { RouteGuard } from "../components/RouteGuard";
import Axios from "axios";
import type { AppProps } from "next/app";
import { UserProvider } from "../context/user.context";
import { CustomMessageProvider } from "../context/error.context";
import Head from "next/head";
function MyApp({ Component, pageProps }: AppProps) {
Axios.defaults.withCredentials = true;
return (
<>
<Head>
<title>Reminderse</title>
<meta name="viewport" content="width=device-width, initial-scale=1" />
</Head>
<UserProvider>
<CustomMessageProvider>
<RouteGuard>
<Component {...pageProps} />
</RouteGuard>
</CustomMessageProvider>
</UserProvider>
</>
);
}
export default MyApp;
|
<filename>src/structures/command/arguments/types/EmojiArgument.js
'use strict';
const CommandError = require('../../CommandError.js');
const Argument = require('./Argument.js');
const EMOJI_REGEX = /^<(a)?:(\w+):(\d{16,18})>$/;
class EmojiArgument extends Argument {
static parseOptions(options = {}) {
return {
...super.parseOptions(options),
sameGuildOnly: !!options.sameGuildOnly,
};
}
static parse(arg, { t, client, guild }) {
const regexResult = EMOJI_REGEX.exec(arg);
if (regexResult) {
const [,,, id] = regexResult;
const emoji = client.emojis.cache.get(id);
if (!emoji) throw new CommandError(t('errors:invalidEmoji'), this.showUsage);
if (this.sameGuildOnly && emoji.guild.id !== guild.id) throw new CommandError(t('errors:emojiNotFromSameGuild'));
return emoji;
}
const emoji = (this.sameGuildOnly ? guild : client).emojis.cache.find((e) => e.name === arg);
if (!emoji) throw new CommandError(t('errors:invalidEmoji'), this.showUsage);
return emoji;
}
}
module.exports = EmojiArgument;
|
#!/bin/bash
CURRENT_DIR=`pwd`
echo $CURRENT_DIR
sudo docker run -it \
-p 33000:3000 -p 33001:3001 -p 8080:8080 \
-p 8545:8545 -p 9545:9545 \
-v $CURRENT_DIR:/home/node/project \
sirhill/hardhat
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for DSA-2530-1
#
# Security announcement date: 2012-08-15 00:00:00 UTC
# Script generation date: 2017-01-01 21:06:26 UTC
#
# Operating System: Debian 6 (Squeeze)
# Architecture: i386
#
# Vulnerable packages fix on version:
# - rssh:2.3.2-13squeeze1
#
# Last versions recommanded by security team:
# - rssh:2.3.2-13squeeze1
#
# CVE List:
# - CVE-2012-3478
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade rssh=2.3.2-13squeeze1 -y
|
#!/bin/bash
set -e
source /pd_build/buildconfig
# Install the application.
run /pd_build/release.sh
# Remove cron and sshd entirely, unless we use them
run rm -r /etc/service/sshd && rm /etc/my_init.d/00_regen_ssh_host_keys.sh
# Clean up after ourselves.
run /pd_build/finalize.sh
|
def total_stars_for_checked_repositories(repos_list, check_function) -> int:
total_stars = 0
for repo in repos_list:
if check_function(repo):
total_stars += repo["stars"]
return total_stars
|
<filename>discovery-plugin-admin-center/src/main/java/com/nepxion/discovery/plugin/admincenter/endpoint/VersionEndpoint.java
package com.nepxion.discovery.plugin.admincenter.endpoint;
/**
* <p>Title: Nepxion Discovery</p>
* <p>Description: Nepxion Discovery</p>
* <p>Copyright: Copyright (c) 2017-2050</p>
* <p>Company: Nepxion</p>
* @author <NAME>
* @version 1.0
*/
import io.swagger.annotations.Api;
import io.swagger.annotations.ApiOperation;
import io.swagger.annotations.ApiParam;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.actuate.endpoint.web.annotation.RestControllerEndpoint;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.jmx.export.annotation.ManagedOperation;
import org.springframework.jmx.export.annotation.ManagedResource;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
import com.nepxion.discovery.common.constant.DiscoveryConstant;
import com.nepxion.discovery.plugin.framework.adapter.PluginAdapter;
import com.nepxion.discovery.plugin.framework.context.PluginContextAware;
import com.nepxion.discovery.plugin.framework.event.PluginEventWapper;
import com.nepxion.discovery.plugin.framework.event.VersionClearedEvent;
import com.nepxion.discovery.plugin.framework.event.VersionUpdatedEvent;
@RestController
@RequestMapping(path = "/version")
@Api(tags = { "版本接口" })
@RestControllerEndpoint(id = "version")
@ManagedResource(description = "Version Endpoint")
public class VersionEndpoint {
@Autowired
private PluginContextAware pluginContextAware;
@Autowired
private PluginAdapter pluginAdapter;
@Autowired
private PluginEventWapper pluginEventWapper;
@RequestMapping(path = "/update-async", method = RequestMethod.POST)
@ApiOperation(value = "异步更新服务的动态版本", notes = "根据指定的localVersion更新服务的dynamicVersion。如果输入的localVersion不匹配服务的localVersion,则忽略;如果如果输入的localVersion为空,则直接更新服务的dynamicVersion", response = ResponseEntity.class, httpMethod = "POST")
@ResponseBody
@ManagedOperation
public ResponseEntity<?> updateAsync(@RequestBody @ApiParam(value = "版本号,格式为[dynamicVersion]或者[dynamicVersion];[localVersion]", required = true) String version) {
return update(version, true);
}
@RequestMapping(path = "/update-sync", method = RequestMethod.POST)
@ApiOperation(value = "同步更新服务的动态版本", notes = "根据指定的localVersion更新服务的dynamicVersion。如果输入的localVersion不匹配服务的localVersion,则忽略;如果如果输入的localVersion为空,则直接更新服务的dynamicVersion", response = ResponseEntity.class, httpMethod = "POST")
@ResponseBody
@ManagedOperation
public ResponseEntity<?> updateSync(@RequestBody @ApiParam(value = "版本号,格式为[dynamicVersion]或者[dynamicVersion];[localVersion]", required = true) String version) {
return update(version, false);
}
@RequestMapping(path = "/clear-async", method = RequestMethod.POST)
@ApiOperation(value = "异步清除服务的动态版本", notes = "根据指定的localVersion清除服务的dynamicVersion。如果输入的localVersion不匹配服务的localVersion,则忽略;如果如果输入的localVersion为空,则直接清除服务的dynamicVersion", response = ResponseEntity.class, httpMethod = "POST")
@ResponseBody
@ManagedOperation
public ResponseEntity<?> clearAsync(@RequestBody(required = false) @ApiParam(value = "版本号,指localVersion,可以为空") String version) {
return clear(version, true);
}
@RequestMapping(path = "/clear-sync", method = RequestMethod.POST)
@ApiOperation(value = "同步清除服务的动态版本", notes = "根据指定的localVersion清除服务的dynamicVersion。如果输入的localVersion不匹配服务的localVersion,则忽略;如果如果输入的localVersion为空,则直接清除服务的dynamicVersion", response = ResponseEntity.class, httpMethod = "POST")
@ResponseBody
@ManagedOperation
public ResponseEntity<?> clearSync(@RequestBody(required = false) @ApiParam(value = "版本号,指localVersion,可以为空") String version) {
return clear(version, false);
}
@RequestMapping(path = "/view", method = RequestMethod.GET)
@ApiOperation(value = "查看服务的本地版本和动态版本", notes = "", response = ResponseEntity.class, httpMethod = "GET")
@ResponseBody
@ManagedOperation
public ResponseEntity<List<String>> view() {
return view(false);
}
private ResponseEntity<?> update(String version, boolean async) {
Boolean discoveryControlEnabled = pluginContextAware.isDiscoveryControlEnabled();
if (!discoveryControlEnabled) {
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body("Discovery control is disabled");
}
if (StringUtils.isEmpty(version)) {
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body("Version can't be null or empty");
}
String dynamicVersion = null;
String localVersion = null;
String[] versionArray = StringUtils.split(version, DiscoveryConstant.SEPARATE);
if (versionArray.length == 2) {
dynamicVersion = versionArray[0];
localVersion = versionArray[1];
} else if (versionArray.length == 1) {
dynamicVersion = versionArray[0];
} else {
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body("Invalid version format, it must be '[dynamicVersion]' or '[dynamicVersion];[localVersion]'");
}
pluginEventWapper.fireVersionUpdated(new VersionUpdatedEvent(dynamicVersion, localVersion), async);
return ResponseEntity.ok().body(DiscoveryConstant.OK);
}
private ResponseEntity<?> clear(String version, boolean async) {
Boolean discoveryControlEnabled = pluginContextAware.isDiscoveryControlEnabled();
if (!discoveryControlEnabled) {
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body("Discovery control is disabled");
}
// 修复Swagger的一个Bug,当在Swagger界面不输入版本号的时候,传到后端变成了“{}”
if (StringUtils.isNotEmpty(version) && StringUtils.equals(version.trim(), "{}")) {
version = null;
}
pluginEventWapper.fireVersionCleared(new VersionClearedEvent(version), async);
return ResponseEntity.ok().body(DiscoveryConstant.OK);
}
private ResponseEntity<List<String>> view(boolean async) {
List<String> versionList = new ArrayList<String>(2);
String localVersion = pluginAdapter.getLocalVersion();
String dynamicVersion = pluginAdapter.getDynamicVersion();
versionList.add(StringUtils.isNotEmpty(localVersion) ? localVersion : StringUtils.EMPTY);
versionList.add(StringUtils.isNotEmpty(dynamicVersion) ? dynamicVersion : StringUtils.EMPTY);
return ResponseEntity.ok().body(versionList);
}
}
|
#!/bin/bash
echo Entering continuous compilation loop ...
`dirname $0`/sbt.sh --loop --no-jrebel "$@" ~compile
|
<reponame>JLLeitschuh/movingcode<gh_stars>1-10
/**
* Copyright (C) 2012 52°North Initiative for Geospatial Open Source Software GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.n52.movingcode.runtime.coderepository;
import java.io.File;
import java.io.IOException;
import java.nio.file.DirectoryStream;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collection;
import java.util.HashSet;
import org.apache.xmlbeans.XmlException;
import org.n52.movingcode.runtime.codepackage.Constants;
import org.n52.movingcode.runtime.codepackage.MovingCodePackage;
import de.tudresden.gis.geoprocessing.movingcode.schema.PackageDescriptionDocument;
/**
* This class implements an {@link MovingCodeRepository} for local plain (unzipped) packages, stored
* in a nested folder structure. This folder structure shall have the following appearance:
*
* <absPath>-<folder1>-<packagedescription.xml>
* | \<workspacefolder1>
* |
* -<folder2>-<packagedescription.xml>
* | \<workspacefolder2>
* |
* -<folder3>-<packagedescription.xml>
* | \<workspacefolder3>
* |
* ...
* |
* -<folderN>-<processdescriptionN>
* \<workspacefolderN>
*
* For any sub-folder found in <absPath> it will be assumed that it potentially contains a plain (unzipped)
* Code Package. Thus, if the parser encounters any <packagedescription.xml> file, it will attempt an
* interpretation as a package description.
*
* This Repo performs occasional checks for updated content.
* (Interval for periodical checks is given by {@link MovingCodeRepository#localPollingInterval})
*
* @author <NAME>, <NAME>
*
*/
public final class LocalPlainRepository extends AbstractRepository {
private final File directory;
private final Thread updateThread;
/**
*
* Constructor for file system based repositories. Scans all sub-directories of a given sourceDirectory
* for valid workspaces and attempt to interpret them as MovingCodePackages.
* Incomplete or malformed packages will be ignored.
*
* @param sourceDirectory {@link File} - the directory to be scanned for Moving Code Packages.
*
*/
public LocalPlainRepository(File sourceDirectory) {
this.directory = sourceDirectory;
// load packages from folder
updateContent();
// start update thread
updateThread = new UpdateInventoryThread();
updateThread.start();
}
private synchronized void updateContent(){
PackageInventory newInventory = new PackageInventory();
// recursively obtain all folders in sourceDirectory
Path repoRoot = FileSystems.getDefault().getPath(directory.getAbsolutePath());
Collection<Path> potentialPackageFolders = listSubdirs(repoRoot);
LOGGER.info("Scanning directory: " + directory.getAbsolutePath());
for (Path currentFolder : potentialPackageFolders) {
// attempt to read packageDescription XML
File packageDescriptionFile = new File(currentFolder.toFile(), Constants.PACKAGE_DESCRIPTION_XML);
if (!packageDescriptionFile.exists()){
continue; // skip this and immediately jump to the next iteration
}
PackageDescriptionDocument pd = null;
try {
pd = PackageDescriptionDocument.Factory.parse(packageDescriptionFile);
} catch (XmlException e) {
// silently skip this and immediately jump to the next iteration
continue;
} catch (IOException e) {
// silently skip this and immediately jump to the next iteration
continue;
}
// attempt to access workspace root folder
String workspace = pd.getPackageDescription().getWorkspace().getWorkspaceRoot();
if (workspace.startsWith("./")){
workspace = workspace.substring(2); // remove leading "./" if it exists
}
File workspaceDir = new File(currentFolder.toFile(), workspace);
if (!workspaceDir.exists()){
continue; // skip this and immediately jump to the next iteration
}
MovingCodePackage mcPackage = new MovingCodePackage(workspaceDir, pd);
// validate
// and add to package map
// and add current file to zipFiles map
if (mcPackage.isValid()) {
newInventory.add(mcPackage);
LOGGER.info("Found package: " + currentFolder + "; using ID: " + mcPackage.getPackageId().toString());
}
else {
LOGGER.error(currentFolder + " is an invalid package.");
}
}
this.updateInventory(newInventory);
}
private static final Collection<Path> listSubdirs(Path path) {
Collection<Path> dirs = new HashSet<Path>();
DirectoryStream<Path> stream;
try {
stream = Files.newDirectoryStream(path);
for (Path entry : stream) {
if (Files.isDirectory(entry)) {
dirs.add(entry);
dirs.addAll(listSubdirs(entry));
}
// files.add(entry);
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return dirs;
}
/**
* A thread that occasionally updates the repo's inventory
* triggers a content reload if required.
*
* @author <NAME>
*
*/
private final class UpdateInventoryThread extends Thread {
private static final long updateInterval = MovingCodeRepository.localPollingInterval;
@Override
public void run() {
while(true){ // spin forever
LOGGER.debug("Update thread started."
+"\nDirectory: " + directory.getAbsolutePath()
+ "\nUpdate interval: " + updateInterval + " milliseconds"
);
try {
Thread.sleep(updateInterval);
} catch (InterruptedException e1) {
LOGGER.debug("Interrupt received. Update thread stopped.");
this.interrupt();
}
updateContent();
}
}
}
@Override
protected void finalize() throws Throwable {
updateThread.interrupt();
super.finalize();
}
}
|
/**
* @license
* Copyright (c) 2014 The Polymer Project Authors. All rights reserved.
* This code may only be used under the BSD style license found at
* http://polymer.github.io/LICENSE.txt The complete set of authors may be found
* at http://polymer.github.io/AUTHORS.txt The complete set of contributors may
* be found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by
* Google as part of the polymer project is also subject to an additional IP
* rights grant found at http://polymer.github.io/PATENTS.txt
*/
// Establish scope.
const extendedWindow = window;
extendedWindow['WebComponents'] = extendedWindow['WebComponents'] || {
'flags': {},
};
// loading script
const file = 'webcomponents-bundle';
const script = document.querySelector('script[src*="' + file + '"]');
const flagMatcher = /wc-(.+)/;
// Note(rictic): a lot of this code looks wrong. Should we be pulling
// the flags local variable off of window.WebComponents.flags? If not
// then why check for noOpts, which can't possibly have been set?
// Flags. Convert url arguments to flags
const flags = {};
if (!flags['noOpts']) {
// from url
location.search
.slice(1)
.split('&')
.forEach(function (option) {
const parts = option.split('=');
let match;
if (parts[0] && (match = parts[0].match(flagMatcher))) {
flags[match[1]] = parts[1] || true;
}
});
// from script
if (script) {
for (let i = 0, a; (a = script.attributes[i]); i++) {
if (a.name !== 'src') {
flags[a.name] = a.value || true;
}
}
}
// log flags
const log = {};
if (flags['log'] && flags['log']['split']) {
const parts = flags['log'].split(',');
parts.forEach(function (f) {
log[f] = true;
});
}
flags['log'] = log;
}
// exports
extendedWindow['WebComponents']['flags'] = flags;
const forceShady = flags['shadydom'];
if (forceShady) {
extendedWindow['ShadyDOM'] = extendedWindow['ShadyDOM'] || {};
extendedWindow['ShadyDOM']['force'] = forceShady;
const noPatch = flags['noPatch'];
extendedWindow['ShadyDOM']['noPatch'] = noPatch === 'true' ? true : noPatch;
}
const forceCE = (flags['register'] || flags['ce']);
if (forceCE && window['customElements']) {
extendedWindow['customElements']['forcePolyfill'] = forceCE;
}
export {};
//# sourceMappingURL=flag-parser.js.map
|
<filename>pkg/deviceclaimingserver/deviceclaimingserver.go
// Copyright © 2021 The Things Network Foundation, The Things Industries B.V.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package deviceclaimingserver
import (
"context"
pbtypes "github.com/gogo/protobuf/types"
"github.com/grpc-ecosystem/grpc-gateway/runtime"
"go.thethings.network/lorawan-stack/v3/pkg/component"
"go.thethings.network/lorawan-stack/v3/pkg/log"
"go.thethings.network/lorawan-stack/v3/pkg/ttnpb"
"go.thethings.network/lorawan-stack/v3/pkg/types"
"go.thethings.network/lorawan-stack/v3/pkg/web"
"google.golang.org/grpc"
)
// EndDeviceClaimingUpstream provides upstream methods.
type EndDeviceClaimingUpstream interface {
// SupportsJoinEUI returns whether this EndDeviceClaimingServer is configured to a Join Server that supports this Join EUI.
SupportsJoinEUI(types.EUI64) bool
// RegisterRoutes registers web routes.
RegisterRoutes(server *web.Server)
Claim(ctx context.Context, req *ttnpb.ClaimEndDeviceRequest) (ids *ttnpb.EndDeviceIdentifiers, err error)
AuthorizeApplication(ctx context.Context, req *ttnpb.AuthorizeApplicationRequest) (*pbtypes.Empty, error)
UnauthorizeApplication(ctx context.Context, ids *ttnpb.ApplicationIdentifiers) (*pbtypes.Empty, error)
}
// DeviceClaimingServer is the Device Claiming Server.
type DeviceClaimingServer struct {
*component.Component
ctx context.Context
config Config
endDeviceClaimingUpstreams map[string]EndDeviceClaimingUpstream
gatewayClaimingServerUpstream ttnpb.GatewayClaimingServerServer
grpc struct {
endDeviceClaimingServer *endDeviceClaimingServer
gatewayClaimingServer *gatewayClaimingServer
}
}
const (
defaultType = "default"
)
// New returns a new Device Claiming component.
func New(c *component.Component, conf *Config, opts ...Option) (*DeviceClaimingServer, error) {
ctx := log.NewContextWithField(c.Context(), "namespace", "deviceclaimingserver")
dcs := &DeviceClaimingServer{
Component: c,
ctx: ctx,
config: *conf,
endDeviceClaimingUpstreams: make(map[string]EndDeviceClaimingUpstream),
}
for _, opt := range opts {
opt(dcs)
}
dcs.endDeviceClaimingUpstreams[defaultType] = noopEDCS{}
dcs.gatewayClaimingServerUpstream = noopGCLS{}
// TODO: Implement JS Clients (https://github.com/TheThingsNetwork/lorawan-stack/issues/4841#issuecomment-998294988)
// Switch on the API type defined for a Join EUI and instantiate clients and add to `dcs.endDeviceClaimingUpstreams`.
dcs.grpc.endDeviceClaimingServer = &endDeviceClaimingServer{
DCS: dcs,
}
dcs.grpc.gatewayClaimingServer = &gatewayClaimingServer{
DCS: dcs,
}
c.RegisterGRPC(dcs)
c.RegisterWeb(dcs)
return dcs, nil
}
// Option configures GatewayClaimingServer.
type Option func(*DeviceClaimingServer)
// Context returns the context of the Device Claiming Server.
func (dcs *DeviceClaimingServer) Context() context.Context {
return dcs.ctx
}
// Roles returns the roles that the Device Claiming Server fulfills.
func (dcs *DeviceClaimingServer) Roles() []ttnpb.ClusterRole {
return []ttnpb.ClusterRole{ttnpb.ClusterRole_DEVICE_CLAIMING_SERVER}
}
// RegisterServices registers services provided by dcs at s.
func (dcs *DeviceClaimingServer) RegisterServices(s *grpc.Server) {
ttnpb.RegisterEndDeviceClaimingServerServer(s, dcs.grpc.endDeviceClaimingServer)
ttnpb.RegisterGatewayClaimingServerServer(s, dcs.grpc.gatewayClaimingServer)
}
// RegisterHandlers registers gRPC handlers.
func (dcs *DeviceClaimingServer) RegisterHandlers(s *runtime.ServeMux, conn *grpc.ClientConn) {
ttnpb.RegisterEndDeviceClaimingServerHandler(dcs.Context(), s, conn)
ttnpb.RegisterGatewayClaimingServerHandler(dcs.Context(), s, conn)
}
// RegisterRoutes implements web.Registerer. It registers the Device Claiming Server to the web server.
func (dcs *DeviceClaimingServer) RegisterRoutes(server *web.Server) {
for _, edcs := range dcs.endDeviceClaimingUpstreams {
edcs.RegisterRoutes(server)
}
}
|
#ifndef DVDSHOP_MOVIE_H
#define DVDSHOP_MOVIE_H
#include <string>
class Movie
{
private:
std::string title;
public:
explicit Movie(std::string title);
[[nodiscard]] const std::string& get_title() const;
[[nodiscard]] virtual double determine_amount(int days_rented) const = 0;
[[nodiscard]] virtual int determine_points(int days_rented) const = 0;
};
#endif//DVDSHOP_MOVIE_H
|
# coding: utf-8
from __future__ import unicode_literals
from admitad.items.base import Item
__all__ = [
'Coupons',
'CouponsForWebsite',
'CouponsCategories',
]
class CouponsBase(Item):
ORDERING = ('name', 'date_start', 'date_end', 'rating',)
FILTERING = {
'campaign': lambda x: Item.sanitize_integer_array(x, 'campaign', blank=True),
'campaign_category': lambda x: Item.sanitize_integer_array(x, 'campaign_category', blank=True),
'category': lambda x: Item.sanitize_integer_array(x, 'category', blank=True),
'type': lambda x: Item.sanitize_string_value(x, 'type', blank=True),
'is_exclusive': lambda x: Item.sanitize_bool_value(x, 'is_exclusive', blank=True),
'is_unique': lambda x: Item.sanitize_bool_value(x, 'is_unique', blank=True),
'is_personal': lambda x: Item.sanitize_bool_value(x, 'is_personal', blank=True),
'is_planned': lambda x: Item.sanitize_bool_value(x, 'is_planned', blank=True),
'search': lambda x: Item.sanitize_string_value(x, 'search', blank=True),
}
class Coupons(CouponsBase):
"""
List of coupons
"""
SCOPE = 'coupons'
URL = Item.prepare_url('coupons')
SINGLE_URL = Item.prepare_url('coupons/%(coupon_id)s')
def get(self, **kwargs):
"""
Args:
campaign (list of int)
campaign_category (list of int)
category (list of int)
type (str)
limit (int)
offset (int)
order_by (str)
"""
filtering = {
'filter_by': kwargs,
'available': self.FILTERING
}
ordering = {
'order_by': kwargs.get('order_by', None),
'available': self.ORDERING
}
return self.transport.get() \
.set_pagination(**kwargs) \
.set_ordering(ordering) \
.set_filtering(filtering) \
.request(url=self.URL)
def getOne(self, _id, **kwargs):
"""
Args:
_id (int)
"""
request_data = {
'url': self.SINGLE_URL,
'coupon_id': Item.sanitize_id(_id)
}
return self.transport.get().request(**request_data)
class CouponsForWebsite(CouponsBase):
"""
List of the website coupons
"""
SCOPE = 'coupons_for_website'
URL = Item.prepare_url('coupons/website/%(website_id)s')
SINGLE_URL = Item.prepare_url('coupons/%(campaign_id)s/website/%(website_id)s')
def get(self, _id, **kwargs):
"""
Here _id is a websites id
Args:
_id (int)
campaign (list of int)
campaign_category (list of int)
category (list of int)
type (str)
limit (int)
offset (int)
order_by (str)
"""
request_data = {
'url': self.URL,
'website_id': Item.sanitize_id(_id)
}
filtering = {
'filter_by': kwargs,
'available': self.FILTERING
}
ordering = {
'order_by': kwargs.get('order_by', None),
'available': self.ORDERING
}
return self.transport.get() \
.set_pagination(**kwargs) \
.set_ordering(ordering) \
.set_filtering(filtering) \
.request(**request_data)
def getOne(self, _id, c_id, **kwargs):
"""
Here _id is a websites id and c_id is a coupon id
Args:
_id (int)
c_id (int)
"""
request_data = {
'url': self.SINGLE_URL,
'website_id': Item.sanitize_id(_id),
'campaign_id': Item.sanitize_id(c_id)
}
return self.transport.get().request(**request_data)
class CouponsCategories(CouponsBase):
SCOPE = 'public_data'
URL = Item.prepare_url('coupons/categories')
SINGLE_URL = Item.prepare_url('coupons/categories/%(coupon_category_id)s')
def get(self, **kwargs):
"""
Args:
limit (int)
offset (int)
"""
return self.transport.get().set_pagination(**kwargs).request(url=self.URL)
def getOne(self, coupon_category_id):
"""
Args:
coupon_category_id (int)
"""
request_data = {
'url': self.SINGLE_URL,
'coupon_category_id': Item.sanitize_id(coupon_category_id)
}
return self.transport.get().request(**request_data)
|
def Fibonacci(n):
output = [0] * (n+1)
output[0] = 0
output[1] = 1
for i in range(2, n+1):
output[i] = output[i-1] + output[i-2]
return output[-1]
|
def createGrid(data, verbose):
"""
Variables for all board related data such as
food, height, width, and snakes
"""
height = data['board']['height']
width = data['board']['width']
foodPos = data['board']['food']
enemySnakes = data['board']['snakes']
enemySnakeHeads = data['board']['snakes'][0]
head = 1
snake = 1
tail = 1
foods = 0
""" If verbose is true, debugging print statements will be printed """
if verbose:
print "Height: ", height, '\n'
print "Width: ", width, '\n'
print "Food Position: ", json.dumps(foodPos, indent=4, sort_keys=True), '\n'
print "enemySnakes: ", json.dumps(enemySnakes, indent=4, sort_keys=True), '\n'
print "enemySnakeHeads: ", json.dumps(enemySnakeHeads, indent=4, sort_keys=True), '\n'
"""
Our snake information such as id, name, position,
and health
"""
id = data['you']['id']
name = data['you']['name']
body = data['you']['body']
xHeadPos = body[0]['x']
yHeadPos = body[0]['y']
xTailPos = body[-1]['x']
yTailPos = body[-1]['y']
""" Grid Creation """
grid = [[0 for col in range(width)] for row in range(height)]
""" Plotting out the food in the board """
for food in foodPos:
grid[food['y']][food['x']] = foods
""" Plotting snakes that are on the board in the grid """
for cords in enemySnakeHeads['body']:
grid[cords['y']][cords['x']] = snake
for snakeHeads in enemySnakes:
x = snakeHeads['body'][0]['x']
y = snakeHeads['body'][0]['y']
grid[y][x] = head
""" Our snakes head and body positions """
for cords in body:
grid[cords['y']][cords['x']] = snake
grid[yHeadPos][xHeadPos] = head
grid[yTailPos][xTailPos] = tail
return grid
|
import { ServiceFactory } from "../factories/serviceFactory";
import { IJsonStorageService } from "../models/services/IJsonStorageService";
import { IStorageService } from "../models/services/IStorageService";
/**
* Storage service implementation for Local storage.
*/
export class JsonStorageService implements IJsonStorageService {
/**
* The storage service to use.
*/
private readonly _storageService: IStorageService;
/**
* Create a new instance of JsonStorageService.
*/
constructor() {
this._storageService = ServiceFactory.get<IStorageService>("storage");
}
/**
* Store an object in the storage system.
* @param name The name of the item to store.
* @param obj The buffer of data to store.
* @returns Promise.
*/
public async set<T>(name: string, obj: T): Promise<void> {
return this._storageService.set(name, Buffer.from(JSON.stringify(obj, undefined, "\t")));
}
/**
* Get an object from the storage system.
* @param name The name of the item to retrieve.
* @returns The object retrieved.
*/
public async get<T>(name: string): Promise<T | undefined> {
const buffer = await this._storageService.get(name);
return buffer ? JSON.parse(buffer.toString()) : undefined;
}
/**
* Remove an object from the storage system.
* @param name The name of the item to remove.
* @returns Promise.
*/
public async remove(name: string): Promise<void> {
return this._storageService.remove(name);
}
}
|
TERMUX_PKG_HOMEPAGE=https://lftp.tech/
TERMUX_PKG_DESCRIPTION="FTP/HTTP client and file transfer program"
TERMUX_PKG_LICENSE="GPL-3.0"
TERMUX_PKG_MAINTAINER="@termux"
TERMUX_PKG_VERSION=4.9.2
TERMUX_PKG_SRCURL=https://lftp.tech/ftp/lftp-${TERMUX_PKG_VERSION}.tar.xz
TERMUX_PKG_SHA256=c517c4f4f9c39bd415d7313088a2b1e313b2d386867fe40b7692b83a20f0670d
TERMUX_PKG_REVISION=2
TERMUX_PKG_DEPENDS="libandroid-support, libc++, libexpat, libiconv, openssl, readline, libidn2, zlib"
# (1) Android has dn_expand, but lftp assumes that dn_skipname then exists, which it does not on android.
# (2) Use --with-openssl to use openssl instead of gnutls.
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="
ac_cv_header_glob_h=no
ac_cv_func_dn_expand=no
--with-openssl
--with-expat=$TERMUX_PREFIX
--with-readline=$TERMUX_PREFIX
--with-zlib=$TERMUX_PREFIX
"
termux_step_pre_configure() {
CXXFLAGS+=" -DNO_INLINE_GETPASS=1"
}
|
<filename>src/entrypoints/main/router/pages/main-page/index.tsx<gh_stars>0
import React, { memo, FC, useEffect } from 'react';
import { compose } from 'redux';
import { Link as RouterLink, RouteComponentProps } from 'react-router-dom';
import { InView } from 'react-intersection-observer';
import Root from '../../../../../components/root';
import Container from '../../../../../components/container';
import ParallaxContainer from '../../../../../components/parallax-container';
import {
ContentBox,
ContentBoxHeader,
ContextBoxBody,
SC as ContentBoxSC
} from '../../../../../components/content-box';
import Link, { Variant } from '../../../../../components/link';
import {
InfoBox,
InfoBoxBody,
InfoBoxTitle,
SC as InfoBoxSC
} from '../../../../../components/info-box';
import Translation from '../../../../../components/translation';
import withNews, {
Props as CmsNewsProps
} from '../../../../../components/with-cms-news';
import withPage, {
Props as CmsPageProps
} from '../../../../../components/with-cms-page';
import { PATHNAME } from '../../../../../enums';
import { dateStringToDate, formatDate } from '../../../../../utils/date';
import { convertToSanitizedHtml } from '../../../../../utils/markdown-converter';
import SC from './styled';
const isFeatureToggleActive = localStorage.getItem('DF_TOGGLE');
const articleId = 'a8ba0c51-4693-401c-b2c8-61dfe144cc83';
interface Props extends RouteComponentProps, CmsNewsProps, CmsPageProps {}
const MainPage: FC<Props> = ({
cmsNews,
cmsNewsActions: { getNewsRequested: getNews },
cmsPage,
cmsPageActions: { getCmsPageRequested: getCmsPage, resetCmsPage }
}) => {
useEffect(() => {
getNews({ pageLimit: 2 });
getCmsPage(articleId);
return () => {
resetCmsPage();
};
}, []);
const modules = cmsPage?.field_modules ?? [];
const firstElement = modules?.shift();
return isFeatureToggleActive ? (
<ParallaxContainer>
<Root>
<Container>
<SC.Banner>
<SC.Row animate>
<ContentBox>
<ContentBoxHeader>
<ContentBoxSC.ContentBoxHeader.Title>
{firstElement?.field_title}
</ContentBoxSC.ContentBoxHeader.Title>
</ContentBoxHeader>
<ContextBoxBody>
<div
// eslint-disable-next-line react/no-danger
dangerouslySetInnerHTML={{
__html: convertToSanitizedHtml(
firstElement?.field_body?.processed
)
}}
/>
{firstElement?.field_link && (
<Link
variant={Variant.PRIMARY}
as={RouterLink}
to={firstElement?.field_link?.uri.replace(
'internal:',
''
)}
>
{firstElement?.field_link?.title}
</Link>
)}
</ContextBoxBody>
</ContentBox>
</SC.Row>
</SC.Banner>
<SC.MainContent>
{modules.map((module: any) => (
<InView key={module.id} triggerOnce threshold={0.1}>
{({ inView, ref }) => (
<SC.Row ref={ref} animate={inView}>
<ContentBox>
<ContentBoxHeader>
<ContentBoxSC.ContentBoxHeader.Title>
{module?.field_title}
</ContentBoxSC.ContentBoxHeader.Title>
<ContextBoxBody>
<div
// eslint-disable-next-line react/no-danger
dangerouslySetInnerHTML={{
__html: convertToSanitizedHtml(
module?.field_body?.processed
)
}}
/>
{module?.field_link && (
<Link
as={RouterLink}
to={module?.field_link?.uri.replace(
'internal:',
''
)}
>
{module?.field_link?.title}
</Link>
)}
</ContextBoxBody>
</ContentBoxHeader>
</ContentBox>
</SC.Row>
)}
</InView>
))}
<InView triggerOnce threshold={0.1}>
{({ inView, ref }) => (
<SC.NewsRow ref={ref} animate={inView}>
{cmsNews?.map(
({ id, created, title, field_ingress: ingress }) => (
<InfoBox
key={id}
as={RouterLink}
to={`${PATHNAME.NEWS}/${id}`}
>
<InfoBoxSC.InfoBox.Date>
{created && formatDate(dateStringToDate(created))}
</InfoBoxSC.InfoBox.Date>
<InfoBoxTitle>
<h2>{title}</h2>
</InfoBoxTitle>
<InfoBoxBody>{ingress}</InfoBoxBody>
</InfoBox>
)
)}
</SC.NewsRow>
)}
</InView>
</SC.MainContent>
</Container>
</Root>
</ParallaxContainer>
) : (
<SC.MainPageFeatureToggleOff>
<SC.Title>
<Translation id='title' />
</SC.Title>
</SC.MainPageFeatureToggleOff>
);
};
export default compose<FC>(memo, withNews, withPage)(MainPage);
|
<filename>src/role.harvester.ts
import { log } from "./lib/logger/log";
export function run(creep: Creep): void {
if (creep.memory.working === false) {
if (creep.store.getFreeCapacity() === 0) {
creep.memory.working = true;
_work(creep);
} else {
_moveToHarvest(creep);
}
} else {
if (creep.store.getUsedCapacity() === 0) {
creep.memory.working = false;
_moveToHarvest(creep);
} else {
_work(creep);
}
}
}
function _work(creep: Creep) {
const spawn = creep.room.find<Spawn>(FIND_MY_SPAWNS)[0];
if (spawn.store.getFreeCapacity(RESOURCE_ENERGY) !== 0) {
_moveToDropEnergy(creep, spawn);
return;
}
const extensions = creep.room.find<FIND_MY_STRUCTURES>(FIND_MY_STRUCTURES).filter(s => s.structureType === STRUCTURE_EXTENSION) as StructureExtension[];
const extensionsToFill = extensions.filter(e => e.store.getFreeCapacity(RESOURCE_ENERGY) !== 0);
if (extensionsToFill.length > 0) {
_moveToDropEnergy(creep, creep.pos.findClosestByPath(extensionsToFill));
return;
}
const containers = creep.room.find<FIND_STRUCTURES>(FIND_STRUCTURES).filter(s => s.structureType === STRUCTURE_CONTAINER) as StructureContainer[];
const containersToFill = containers.filter(e => e.store.getFreeCapacity(RESOURCE_ENERGY) !== 0);
if (containersToFill.length > 0) {
_moveToDropEnergy(creep, creep.pos.findClosestByPath(containersToFill));
return;
}
const towers = creep.room.find(FIND_MY_STRUCTURES).filter(s => s.structureType === STRUCTURE_TOWER);
const tower = creep.pos.findClosestByRange(towers);
if (tower) {
creep.moveTo(tower);
}
}
function _tryHarvest(creep: Creep, target: Source): number {
return creep.harvest(target);
}
function _tryFillUp(creep: Creep, target: StructureContainer): number {
return creep.withdraw(target, RESOURCE_ENERGY);
}
function _moveToHarvest(creep: Creep): void {
const spawn = creep.room.find<Spawn>(FIND_MY_SPAWNS)[0];
const close = spawn.pos.findClosestByRange<FIND_SOURCES_ACTIVE>(FIND_SOURCES_ACTIVE);
const far = creep.room.find<FIND_SOURCES_ACTIVE>(FIND_SOURCES_ACTIVE).filter(s => s !== close)[0];
if (far && far.energy !== 0) {
if (_tryHarvest(creep, far) === ERR_NOT_IN_RANGE) {
creep.moveTo(far.pos);
}
} else {
let containers = creep.room.find<FIND_STRUCTURES>(FIND_STRUCTURES).filter(s => s.structureType === STRUCTURE_CONTAINER) as StructureContainer[];
containers = containers.filter(c => c.store.getUsedCapacity(RESOURCE_ENERGY) > 0);
const closestContainer = creep.pos.findClosestByRange(containers);
if (closestContainer) {
if (_tryFillUp(creep, closestContainer) === ERR_NOT_IN_RANGE) {
creep.moveTo(closestContainer.pos);
}
}
}
}
function _tryEnergyDropOff(creep: Creep, target: Spawn | Structure): number {
return creep.transfer(target, RESOURCE_ENERGY);
}
function _moveToDropEnergy(creep: Creep, target?: Spawn | Structure | null): void {
if (target) {
if (_tryEnergyDropOff(creep, target) === ERR_NOT_IN_RANGE) {
creep.moveTo(target.pos);
}
}
}
|
import React, { useState } from 'react';
import { BrowserRouter as Router, Route, Link } from 'react-router-dom';
const Users = ({ users }) => {
const [selectedUser, setSelectedUser] = useState(null);
return (
<>
<ul>
{users.map(user => (
<li key={user.id} onClick={() => setSelectedUser(user.id)}>
{user.name}
</li>
))}
</ul>
{selectedUser && (
<Router>
<Route exact path={`/user/${selectedUser}`}>
{users.find(user => user.id === selectedUser).name}
</Route>
<Link to={`/user/${selectedUser}`}>View User</Link>
</Router>
)}
</>
);
};
export default Users;
|
def diff(self, other):
""" Automatically find the differences between two trees
Argument other has to be a Merkle Tree
Optimized if two trees have the same height
"""
if not isinstance(other, self.__class__):
raise Exception("You have to compare Merkle Tree with Merkle Tree")
if self.getHeight() != other.getHeight():
raise Exception("Trees must have the same height for optimized comparison")
return self._diffHelper(other)
def _diffHelper(self, other):
if self.isLeaf() and other.isLeaf():
if self.getRootValue() != other.getRootValue():
return [(self.getRootValue(), other.getRootValue())]
else:
return []
elif not self.isLeaf() and not other.isLeaf():
left_diff = self.getLeftChild()._diffHelper(other.getLeftChild())
right_diff = self.getRightChild()._diffHelper(other.getRightChild())
return left_diff + right_diff
else:
raise Exception("Trees must have the same structure for optimized comparison")
|
#!/usr/bin/env bash
#TEST: Use correct PIN plus one additional character while PIN is needed, expect failure
#TODO: Merge with 10-080 (fail_incorrect_pin)?
if [ -n "$HAVE_MYSQL" ]; then
ods_setup_conf conf.xml conf-mysql.xml
fi &&
ods_reset_env_noenforcer &&
! log_this ods-hsmutil-purge ods-hsmutil purge SoftHSM &&
log_grep ods-hsmutil-purge stderr 'Incorrect PIN for repository SoftHSM' &&
! ods_start_enforcer &&
syslog_waitfor 10 'ods-enforcerd: .*Incorrect PIN for repository SoftHSM' &&
! ods_start_signer &&
syslog_waitfor 10 'ods-signerd: .* Incorrect PIN for repository SoftHSM' &&
syslog_waitfor 10 'ods-signerd: \[engine\] opening hsm failed' &&
! pgrep -u `id -u` '(ods-enforcerd|ods-signerd)' >/dev/null 2>/dev/null &&
return 0
ods_kill
return 1
|
<filename>webtools.py
#
# Copyright 2012 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from elementtree.ElementTree import Element, SubElement
from elementtree.ElementTree import dump, tostring, fromstring
import requests
class WebToolsRequest():
def __init__(self, user_id):
self.user_id = user_id
self.api_url = 'https://secure.shippingapis.com/ShippingAPI.dll'
self.test_api_url = 'https://secure.shippingapis.com/ShippingAPITest.dll'
self.address_fields = ('FirmName', 'Address1', 'Address2',
'City', 'State', 'Zip5', 'Zip4')
self.verify_root_tag = 'AddressValidateRequest'
self.zipcode_lookup_root_tag = 'ZipCodeLookupRequest'
self.citystate_lookup_root_tag = 'CityStateLookupRequest'
self.test_data = [
{
'address2':'6406 Ivy Lane',
'city':'Greenbelt',
'state':'MD'
},
{
'address2':'8 Wildwood Drive',
'city':'Old Lyme',
'state':'CT',
'zip5':'06371'
}]
def build_request_xml(self, data, root_tag):
root = Element(root_tag, USERID=self.user_id)
for i, address in enumerate(data):
address_element = SubElement(root, 'Address', ID=str(i))
for field in self.address_fields:
SubElement(
address_element, field).text = address.get(field.lower())
return tostring(root)
def request(self, api_name, xml, test=False):
if test:
response = requests.get(
self.test_api_url, params={'API': api_name, 'XML': xml})
else:
response = requests.get(
self.api_url, params={'API': api_name, 'XML': xml})
return response
def verify(self, data):
api_name = 'Verify'
xml = self.build_request_xml(data, self.verify_root_tag)
response = self.request(api_name, xml)
return Response(response)
def zipcode_lookup(self, data):
api_name = 'ZipCodeLookup'
xml = self.build_request_xml(data, self.zipcode_lookup_root_tag)
response = self.request(api_name, xml)
return Response(response)
def citystate_lookup(self, data):
api_name = 'CityStateLookup'
xml = self.build_request_xml(data, 'CityStateLookupRequest')
response = self.request(api_name, xml)
return Response(response)
def verify_test(self):
api_name = 'Verify'
xml = self.build_request_xml(self.test_data, self.verify_root_tag)
response = self.request(api_name, xml)
print response.content
def zipcode_lookup_test(self):
api_name = 'ZipCodeLookup'
xml = self.build_request_xml(
self.test_data, self.zipcode_lookup_root_tag)
response = self.request(api_name, xml)
print response.content
def citystate_lookup_test(self):
api_name = 'CityStateLookup'
xml = self.build_request_xml(
self.test_data, self.citystate_lookup_root_tag)
response = self.request(api_name, xml)
print response.content
def make_all_test_requests(self):
self.verify_test()
self.zipcode_lookup_test()
self.citystate_lookup_test()
class Response():
def __init__(self, response):
self.address_fields = (
'FirmName',
'Address1',
'Address2',
'City',
'State',
'Zip5',
'Zip4')
self.response = response
self.et = self.response_to_et(self.response)
self.check_et_errors(self.et)
self.dict = self.build_address_dict(self.et)
self.index = self.address_count
def __iter__(self):
return self
def __getitem__(self, key):
if self.dict.get(str(key)):
return self.dict[str(key)]
else:
raise IndexError
def next(self):
if self.index == 0:
raise StopIteration
self.index = self.index - 1
return self.data[self.index]
def dump(self):
print self.response.status_code
print self.response.content
if self.et:
dump(self.et)
def check_respone_errors(self, response):
if response.status_code is not 200:
self.dump()
raise Exception
def response_to_et(self, response):
return fromstring(response.content)
def check_et_errors(self, et):
if et.tag == 'Error':
self.dump()
raise Exception
else:
return et
def build_address_dict(self, et):
addresses = {}
for address_element in et.getiterator('Address'):
address = {}
id = address_element.get('ID')
address['id'] = id
for key in self.address_fields:
address[str(key).lower()] = address_element.findtext(key)
addresses[id] = WebToolsAddress(address)
return addresses
@property
def address_count(self):
return len(self.et.getiterator('Address'))
class WebToolsAddress():
def __init__(self, address):
self.address = address
def __str__(self):
fields = ('firmname', 'address1', 'address2')
add_string = ''
for field in fields:
if self.address[field]:
add_string += '%s\n' % self.address[field]
add_string += self.last_line
return add_string
@property
def address(self):
return self.address
@property
def address1(self):
return self.address['address1']
@property
def address2(self):
return self.address['address2']
@property
def city(self):
return self.address['city']
@property
def state(self):
return self.address['state']
@property
def zip4(self):
return self.address['zip4']
@property
def zip5(self):
return self.address['zip5']
@property
def address_lines(self):
if self.address1:
return '%s\n%s' % (self.address1, self.address2)
else:
return '%s' % self.address2
@property
def zipcode(self):
return '%s-%s' % (self.zip5, self.zip4)
@property
def citystate(self):
return '%s, %s' % (self.city, self.state)
@property
def last_line(self):
return '%s %s' % (self.citystate, self.zipcode)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.