text
stringlengths 1
1.05M
|
|---|
import { FunctionComponent } from "react"
import * as React from "react"
import { css } from "@emotion/react"
export const Conclusion: FunctionComponent = ({ children }) => (
<div
className="f5 tc fw6"
css={css`
line-height: 1.5em;
font-family: "Playfair Display", serif;
margin-bottom: 1.45rem;
ul,
li {
margin-left: 0;
list-style-type: none;
}
`}
>
{children}
</div>
)
|
#zcc +cpm -vn -SO3 -clib=sdcc_iy --max-allocs-per-node200000 sudoku.c -o sudoku -create-app
zcc +zx -vn -startup=1 -clib=sdcc_ix -SO3 --max-allocs-per-node200000 --reserve-regs-iy sudoku.c -o sudoku -create-app
|
<filename>src/test/java/seoul/democracy/features/E_06_제안/S_6_8_사용자는_제안의견을_수정_및_삭제할_수_있다.java
package seoul.democracy.features.E_06_제안;
import org.junit.Before;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.MethodSorters;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.security.access.AccessDeniedException;
import org.springframework.security.test.context.support.WithUserDetails;
import org.springframework.test.annotation.Rollback;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import seoul.democracy.common.exception.NotFoundException;
import seoul.democracy.opinion.domain.Opinion;
import seoul.democracy.opinion.dto.OpinionDto;
import seoul.democracy.opinion.dto.OpinionUpdateDto;
import seoul.democracy.opinion.service.OpinionService;
import seoul.democracy.proposal.dto.ProposalDto;
import seoul.democracy.proposal.predicate.ProposalPredicate;
import seoul.democracy.proposal.service.ProposalService;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import static seoul.democracy.opinion.dto.OpinionDto.projection;
import static seoul.democracy.opinion.predicate.OpinionPredicate.equalId;
/**
* epic : 6. 제안
* story : 6.8 사용자는 제안의견을 수정 및 삭제할 수 있다.
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = {
"file:src/test/resources/egovframework/spring-test/context-*.xml",
"file:src/main/webapp/WEB-INF/config/egovframework/springmvc/egov-com-*.xml"
})
@Transactional
@Rollback
@FixMethodOrder(MethodSorters.NAME_ASCENDING)
public class S_6_8_사용자는_제안의견을_수정_및_삭제할_수_있다 {
private final static DateTimeFormatter dateTimeFormatter = DateTimeFormatter.ofPattern("yyyy-MM-dd HH-mm");
private final static String ip = "127.0.0.2";
private MockHttpServletRequest request;
@Autowired
private OpinionService opinionService;
@Autowired
private ProposalService proposalService;
private Long opinionId = 1L;
private Long deletedOpinionId = 2L;
private Long blockedOpinionId = 3L;
private Long notExistsId = 999L;
private Long multiOpinionId = 31L;
@Before
public void setUp() throws Exception {
request = new MockHttpServletRequest();
request.setRemoteAddr(ip);
RequestContextHolder.setRequestAttributes(new ServletRequestAttributes(request));
}
/**
* 1. 사용자는 본인의견을 수정할 수 있다.
*/
@Test
@WithUserDetails("<EMAIL>")
public void T_01_사용자는_본인의견을_수정할_수_있다() {
final String now = LocalDateTime.now().format(dateTimeFormatter);
OpinionUpdateDto updateDto = OpinionUpdateDto.of(opinionId, "제안의견 수정합니다.");
Opinion opinion = opinionService.updateOpinion(updateDto);
OpinionDto opinionDto = opinionService.getOpinion(equalId(opinion.getId()), projection);
assertThat(opinionDto.getModifiedDate().format(dateTimeFormatter), is(now));
assertThat(opinionDto.getModifiedBy().getEmail(), is("<EMAIL>"));
assertThat(opinionDto.getModifiedIp(), is(ip));
assertThat(opinionDto.getContent(), is(updateDto.getContent()));
ProposalDto proposalDto = proposalService.getProposal(ProposalPredicate.equalId(opinion.getIssue().getId()), ProposalDto.projection);
assertThat(proposalDto.getStats().getOpinionCount(), is(1L));
assertThat(proposalDto.getStats().getApplicantCount(), is(1L));
}
/**
* 2. 사용자는 본인의견을 삭제할 수 있다.
*/
@Test
@WithUserDetails("<EMAIL>")
public void T_02_사용자는_본인의견을_삭제할_수_있다() {
final String now = LocalDateTime.now().format(dateTimeFormatter);
Opinion opinion = opinionService.deleteOpinion(opinionId);
OpinionDto opinionDto = opinionService.getOpinion(equalId(opinion.getId()), projection);
assertThat(opinionDto.getModifiedDate().format(dateTimeFormatter), is(now));
assertThat(opinionDto.getModifiedBy().getEmail(), is("<EMAIL>"));
assertThat(opinionDto.getModifiedIp(), is(ip));
assertThat(opinionDto.getStatus(), is(Opinion.Status.DELETE));
ProposalDto proposalDto = proposalService.getProposal(ProposalPredicate.equalId(opinion.getIssue().getId()), ProposalDto.projection);
assertThat(proposalDto.getStats().getOpinionCount(), is(0L));
assertThat(proposalDto.getStats().getApplicantCount(), is(0L));
}
/**
* 3. 다른 사용자의 의견을 수정할 수 없다.
*/
@Test(expected = AccessDeniedException.class)
@WithUserDetails("<EMAIL>")
public void T_03_다른_사용자의_의견을_수정할_수_없다() {
OpinionUpdateDto updateDto = OpinionUpdateDto.of(opinionId, "다른 사용자가 제안의견을 수정합니다.");
opinionService.updateOpinion(updateDto);
}
/**
* 4. 다른 사용자의 의견을 삭제할 수 없다.
*/
@Test(expected = AccessDeniedException.class)
@WithUserDetails("<EMAIL>")
public void T_04_다른_사용자의_의견을_삭제할_수_없다() {
opinionService.deleteOpinion(opinionId);
}
/**
* 5. 없는 의견을 수정할 수 없다.
*/
@Test(expected = NotFoundException.class)
@WithUserDetails("<EMAIL>")
public void T_05_없는_의견을_수정할_수_없다() {
OpinionUpdateDto updateDto = OpinionUpdateDto.of(notExistsId, "없는 제안의견 수정합니다.");
opinionService.updateOpinion(updateDto);
}
/**
* 6. 없는 의견을 삭제할 수 없다.
*/
@Test(expected = NotFoundException.class)
@WithUserDetails("<EMAIL>")
public void T_06_없는_의견을_삭제할_수_없다() {
opinionService.deleteOpinion(notExistsId);
}
/**
* 7. 삭제된 의견을 수정할 수 없다.
*/
@Test(expected = NotFoundException.class)
@WithUserDetails("<EMAIL>")
public void T_07_삭제된_의견을_수정할_수_없다() {
OpinionUpdateDto updateDto = OpinionUpdateDto.of(deletedOpinionId, "삭제된 의견은 수정할 수 없다.");
opinionService.updateOpinion(updateDto);
}
/**
* 8. 삭제된 의견을 삭제할 수 없다.
*/
@Test(expected = NotFoundException.class)
@WithUserDetails("<EMAIL>")
public void T_08_삭제된_의견을_삭제할_수_없다() {
opinionService.deleteOpinion(deletedOpinionId);
}
/**
* 9. 블럭된 의견을 수정할 수 없다.
*/
@Test(expected = NotFoundException.class)
@WithUserDetails("<EMAIL>")
public void T_09_블럭된_의견을_수정할_수_없다() {
OpinionUpdateDto updateDto = OpinionUpdateDto.of(blockedOpinionId, "블럭된 의견은 수정할 수 없다.");
opinionService.updateOpinion(updateDto);
}
/**
* 10. 블럭된 의견을 삭제할 수 없다.
*/
@Test(expected = NotFoundException.class)
@WithUserDetails("<EMAIL>")
public void T_10_블럭된_의견을_삭제할_수_없다() {
opinionService.deleteOpinion(blockedOpinionId);
}
/**
* 11. 사용자는 여러 제안의견 중 하나를 삭제할 수 있다.
*/
@Test
@WithUserDetails("<EMAIL>")
public void T_11_사용자는_여러_제안의견_중_하나를_삭제할_수_있다() {
final String now = LocalDateTime.now().format(dateTimeFormatter);
Opinion opinion = opinionService.deleteOpinion(multiOpinionId);
OpinionDto opinionDto = opinionService.getOpinion(equalId(opinion.getId()), OpinionDto.projection);
assertThat(opinionDto.getModifiedDate().format(dateTimeFormatter), is(now));
assertThat(opinionDto.getModifiedBy().getEmail(), is("<EMAIL>"));
assertThat(opinionDto.getModifiedIp(), is(ip));
assertThat(opinionDto.getStatus(), is(Opinion.Status.DELETE));
ProposalDto proposalDto = proposalService.getProposal(ProposalPredicate.equalId(opinion.getIssue().getId()), ProposalDto.projection);
assertThat(proposalDto.getStats().getOpinionCount(), is(8L));
assertThat(proposalDto.getStats().getApplicantCount(), is(1L));
}
}
|
<filename>C2CRIBuildDir/projects/C2C-RI/src/RI_Utilities/src/org/fhwa/c2cri/utilities/ProgressReporter.java
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package org.fhwa.c2cri.utilities;
/**
* The Interface ProgressReporter supplies methods to report progress to the progress monitor.
*
* @author TransCore ITS, LLC
* Last Updated: 1/3/2012
*/
public interface ProgressReporter {
/**
* Start.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*/
public void start();
/**
* Update.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*
* @param updateText the update text
*/
public void update(String updateText);
/**
* Done.
*
* Pre-Conditions: N/A
* Post-Conditions: N/A
*/
public void done();
}
|
from .y import *
print('x')
|
package com.h8.nh.nhoodengine.matrix.workers;
import com.h8.nh.nhoodengine.core.DataResource;
import com.h8.nh.nhoodengine.core.DataResourceKey;
import com.h8.nh.nhoodengine.matrix.DataDoesNotExistException;
import com.h8.nh.nhoodengine.matrix.DataMatrixRepository;
import com.h8.nh.nhoodengine.matrix.DataMatrixRepositoryFailedException;
import java.util.UUID;
import java.util.concurrent.SynchronousQueue;
import java.util.concurrent.TimeUnit;
public final class ResourcesFindWorker<K extends DataResourceKey, D> implements Runnable, AssertableWorker {
private final DataMatrixRepository<K, D> repository;
private final SynchronousQueue<UUID> toFind;
private final Boolean notFoundExpected;
private boolean hasErrors = false;
private ResourcesFindWorker(
final DataMatrixRepository<K, D> repository,
final SynchronousQueue<UUID> toFind,
final Boolean notFoundExpected) {
this.repository = repository;
this.toFind = toFind;
this.notFoundExpected = notFoundExpected;
}
public static <K extends DataResourceKey, D> ResourcesFindWorker<K, D> of(
final DataMatrixRepository<K, D> repository,
final SynchronousQueue<UUID> toFind) {
return new ResourcesFindWorker<>(repository, toFind, false);
}
public static <K extends DataResourceKey, D> ResourcesFindWorker<K, D> notFoundExpected(
final DataMatrixRepository<K, D> repository,
final SynchronousQueue<UUID> toFind) {
return new ResourcesFindWorker<>(repository, toFind, true);
}
@Override
public boolean hasErrors() {
return hasErrors;
}
@Override
public void run() {
try {
while (true) {
UUID uuid = toFind.poll(200, TimeUnit.MILLISECONDS);
if (uuid == null) {
return;
}
findResource(uuid);
}
} catch (Exception e) {
System.err.println(Thread.currentThread().getName()
+ " : Could not remove data from matrix repository because of"
+ " an exception: " + e.getClass().getSimpleName() + " : " + e.getMessage());
hasErrors = true;
}
}
private void findResource(final UUID uuid) {
try {
DataResource<K, D> resource = repository.find(uuid);
if (resource == null) {
throw new RuntimeException(
"Returned data is null");
}
if (resource.getUuid() != uuid) {
throw new RuntimeException(
"Returned data has different UUID than requested");
}
} catch (DataDoesNotExistException e) {
if (!notFoundExpected) {
throw new RuntimeException(e);
}
} catch (DataMatrixRepositoryFailedException e) {
throw new RuntimeException(e);
}
}
}
|
#!/usr/local/bin/bash
(echo "Example Files"; ls -lah ~/.dzen2/knoxbug; sleep 20) | dzen2 -fn 'Roboto:Medium:15px' -x 550 -y 250 -tw 175 -w 775 -l 8
|
package cn.stylefeng.roses.kernel.auth.starter;
import cn.hutool.cache.CacheUtil;
import cn.hutool.cache.impl.TimedCache;
import cn.stylefeng.roses.kernel.auth.api.constants.LoginCacheConstants;
import cn.stylefeng.roses.kernel.auth.cache.LoginMemoryCache;
import cn.stylefeng.roses.kernel.cache.api.CacheOperatorApi;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* 登录缓存的自动配置
*
* @author xixiaowei
* @date 2022/1/22 17:40
*/
@Configuration
public class GunsLoginCacheAutoConfiguration {
/**
* 登录帐号冻结的缓存
*
* @author xixiaowei
* @date 2022/1/22 17:45
*/
@Bean
@ConditionalOnMissingBean(name = "loginCacheOperatorApi")
public CacheOperatorApi<String> loginCacheOperatorApi() {
TimedCache<String, String> loginTimeCache = CacheUtil.newTimedCache(LoginCacheConstants.LOGIN_CACHE_TIMEOUT_SECONDS * 1000);
return new LoginMemoryCache(loginTimeCache);
}
}
|
#ifndef _BUTTON_H
#define _BUTTON_H
#include "commonassets.h"
#include "guibase.h"
struct Button
{
float x, y;
float w, h;
float color[4];
int depressed;
int clicked;
int focused;
struct GUIWindow * wp;
void (*Prerender)( struct Button * b );
void (*Render)( struct Button * b );
int (*CheckClick)( struct GUIBase * b, float screenx, float screeny, int button, int down ); //Return 0 if received event.
void (*HandleMove)( struct GUIBase * b, float screenx, float screeny, int buttonmask ); //Return 0 if received event.
void (*HandleKeyboard)( struct GUIBase * b, char c, int down, int focused ); //If unfocused, treat as a hotkey only.
void (*DestroyMe)( struct Button * b );
void (*ProcessClick)( struct Button * b );
char * text;
char * oldtext;
struct GPUGeometry * geotext;
};
struct GUIBase * CreateButton( struct GUIWindow * parent, const char * text );
//Below here are kind of private-ish.
//Assumes you are in ortho or an orto-like mode.
void ButtonRender( struct Button * b );
void ButtonDestroyMe( struct Button * b );
void ButtonProcessClick( struct Button * b );
#endif
|
<gh_stars>0
import React from 'react';
import styled from 'styled-components';
const Styled = styled.div``;
const Templates = () => {
return <Styled />;
};
export default React.memo(Templates);
|
# Define a function to crawl a website
def web_crawler(url):
# Create the request object
request = Request(url)
# Make the request
response = urlopen(request)
# Extract the response text
html = response.read()
# Use regex to find the relevant data
data_regex = re.compile('<div class="data-item">(.*?)</div>')
data_matches = data_regex.finditer(html)
# Return the data found
return [match.group(1) for match in data_matches]
# Make a request to a website
url = 'http://www.example.com'
data = web_crawler(url)
|
CREATE PROCEDURE GetTotalNumberOfOrdersInPastThirtyDays()
BEGIN
SELECT COUNT(*) FROM Orders
WHERE order_date BETWEEN NOW() - INTERVAL 30 DAY AND NOW();
END
|
#
# Copyright 2011-2016 Asakusa Framework Team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
if [ "$HADOOP_CMD" = "" ]
then
if [ "$HADOOP_HOME" != "" ]
then
HADOOP_CMD="$HADOOP_HOME/bin/hadoop"
else
HADOOP_CMD="$(which hadoop)"
_RET=$?
if [ $_RET -ne 0 ]
then
echo 'hadoop command is not found' 1>&2
exit 1
fi
fi
fi
if [ ! -x "$HADOOP_CMD" ]
then
echo "$HADOOP_CMD is not executable" 1>&2
exit 1
fi
|
<gh_stars>0
import React from 'react';
import Layout from '../components/layout';
import { css } from 'emotion';
class IndexPage extends React.Component {
render() {
return (
<Layout>
<div
className={css`
min-height: 300vh;
`}
>
Hello
</div>
</Layout>
);
}
}
export default IndexPage;
|
#!/bin/bash -e
clear
RED='\033[1;31m'
WHITE='\033[1;37m'
ORANGE='\033[01;38;5;166m'
GREEN='\033[01;38;5;118m'
echo "Task Completed Thanks For Using"
printf "${ORANGE}"
echo -e " ███████╗ ███████╗ ██████╗ ██╗ ██╗ █████╗ ██╗"
echo -e " ███╔══██╗ ██╔════╝ ██╔══██╗ ██║ ██╔╝ ██╔══██╗ ██║"
echo -e "${WHITE} ███║ ██║ █████╗ ██████╔╝ █████╔╝ ███████║ ██║"
echo -e " ███║ ██║ ██╔══╝ ██╔══██╗ ██╔═██╗ ██╔══██║ ██║"
echo -e "${GREEN} ███████╔╝ ███████╗ ██████╔╝ ██║ ██╗ ██║ ██║ ███████╗"
echo -e " ╚═════╝ ╚══════╝ ╚═════╝ ╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚══════╝"
echo -e " ${RED} (Version - 1.O) "
printf "${WHITE}"
echo -e "----------------Contributer :- Anurag Batra -------------------------------------------"
echo -e "---------------------------${ORANGE} Hindustani ${WHITE} Tech ${GREEN} Solutions ${WHITE}-------------------------------"
echo -e "============================================================================================================================="
echo -e " Welcome to the troubleshooter This will install all the Tools in Kali"
echo -e " (Tested on Debian and Ubuntu)"
echo -e "============================================================================================================================="
echo -e "Are you Ready to Proceed (y/n) :- "
read sure
if [ "$sure" == y ] ; then
sudo apt-get update && apt-get upgrade --allow-unauthenticated -y && apt-get dist-upgrade -y --allow-unauthenticated
apt-get -f install acccheck ace-voip amap automater braa casefile cdpsnarf cisco-torch cookie-cadger copy-router-config dmitry dnmap dnsenum dnsmap dnsrecon dnstracer dnswalk dotdotpwn enum4linux enumiax exploitdb fierce firewalk fragroute fragrouter ghost-phisher golismero goofile lbd maltego-teeth masscan metagoofil miranda nmap p0f parsero recon-ng set smtp-user-enum snmpcheck sslcaudit sslsplit sslstrip sslyze thc-ipv6 theharvester tlssled twofi urlcrazy wireshark wol-e xplico ismtp intrace hping3 bbqsql bed cisco-auditing-tool cisco-global-exploiter cisco-ocs cisco-torch copy-router-config doona dotdotpwn greenbone-security-assistant hexorbase jsql lynis nmap ohrwurm openvas-cli openvas-manager openvas-scanner oscanner powerfuzzer sfuzz sidguesser siparmyknife sqlmap sqlninja sqlsus thc-ipv6 tnscmd10g unix-privesc-check yersinia aircrack-ng asleap bluelog blueranger bluesnarfer bully cowpatty crackle eapmd5pass fern-wifi-cracker ghost-phisher giskismet gqrx kalibrate-rtl killerbee kismet mdk3 mfcuk mfoc mfterm multimon-ng pixiewps reaver redfang spooftooph wifi-honey wifitap wifite apache-users arachni bbqsql blindelephant burpsuite cutycapt davtest deblaze dirb dirbuster fimap funkload grabber jboss-autopwn joomscan jsql maltego-teeth padbuster paros parsero plecost powerfuzzer proxystrike recon-ng skipfish sqlmap sqlninja sqlsus ua-tester uniscan vega webscarab websploit wfuzz wpscan xsser zaproxy burpsuite dnschef fiked hamster-sidejack hexinject iaxflood inviteflood ismtp mitmproxy ohrwurm protos-sip rebind responder rtpbreak rtpinsertsound rtpmixsound sctpscan siparmyknife sipp sipvicious sniffjoke sslsplit sslstrip thc-ipv6 voiphopper webscarab wifi-honey wireshark xspy yersinia zaproxy cryptcat cymothoa dbd dns2tcp http-tunnel httptunnel intersect nishang polenum powersploit pwnat ridenum sbd u3-pwn webshells weevely casefile cutycapt dos2unix dradis keepnote magictree metagoofil nipper-ng pipal armitage backdoor-factory cisco-auditing-tool cisco-global-exploiter cisco-ocs cisco-torch crackle jboss-autopwn linux-exploit-suggester maltego-teeth set shellnoob sqlmap thc-ipv6 yersinia beef-xss binwalk bulk-extractor chntpw cuckoo dc3dd ddrescue dumpzilla extundelete foremost galleta guymager iphone-backup-analyzer p0f pdf-parser pdfid pdgmail peepdf volatility xplico dhcpig funkload iaxflood inviteflood ipv6-toolkit mdk3 reaver rtpflood slowhttptest t50 termineter thc-ipv6 thc-ssl-dos acccheck burpsuite cewl chntpw cisco-auditing-tool cmospwd creddump crunch findmyhash gpp-decrypt hash-identifier hexorbase john johnny keimpx maltego-teeth maskprocessor multiforcer ncrack oclgausscrack pack patator polenum rainbowcrack rcracki-mt rsmangler statsprocessor thc-pptp-bruter truecrack webscarab wordlists zaproxy apktool dex2jar python-distorm3 edb-debugger jad javasnoop jd ollydbg smali valgrind yara android-sdk apktool arduino dex2jar sakis3g smali && wget http://www.morningstarsecurity.com/downloads/bing-ip2hosts-0.4.tar.gz && tar -xzvf bing-ip2hosts-0.4.tar.gz && cp bing-ip2hosts-0.4/bing-ip2hosts /usr/local/bin/
clear
cat /etc/*-release
echo -e "==================================================================================="
echo -e "now check if an error occures just raise a issue and it will be solved"
echo -e "######### For Qurries Contact me at : hindustanitechsolutions@gmail.com #############"
echo -e "===================================================================================="
else
echo -e "==================================================================================="
echo -e "So on your Wish Installation has Been Aborded"
echo -e "===================================================================================="
fi
done
|
<filename>src/index.test.ts
import { Clock } from '.'
const TIME = 5 * 60_000
const UPDATE_INTERVAL = 100
jest.useFakeTimers()
const dateNow = jest.spyOn(Date, 'now')
let time = 0
dateNow.mockImplementation(() => (time += UPDATE_INTERVAL))
const callback = jest.fn()
const fischer = Clock.getConfig('Fischer Rapid 5|5')!
const noIncrement = Clock.getConfig('Fischer Blitz 5|0')!
const bronstein = Clock.getConfig('Bronstein Bullet 1|2')!
const simpleDelay = Clock.getConfig('Tournament 40/120|5, 60|5')!
const hourglass = Clock.getConfig('Hourglass 1')!
describe('Clock', () => {
beforeEach(() => {
jest.clearAllMocks()
jest.clearAllTimers()
time = 0
})
describe('Basic', () => {
it('can track moves of two players', () => {
const clock = new Clock(noIncrement)
expect(clock.state.move).toStrictEqual([0, 0])
})
it('can return white', () => {
const clock = new Clock(noIncrement)
expect(clock.state.white).toBe(undefined)
clock.push(1)
clock.push(0)
expect(clock.state.white).toBe(0)
clock.reset()
expect(clock.state.white).toBe(undefined)
clock.push(0)
expect(clock.state.white).toBe(1)
})
it('can track times of two players', () => {
const clock = new Clock(noIncrement)
expect(clock.state.remainingTime).toStrictEqual([TIME, TIME])
})
it('tracks history of two players', () => {
const clock = new Clock(noIncrement)
expect(clock.state.log).toStrictEqual([[], []])
})
it('can have different initial times', () => {
const clock = new Clock({
...noIncrement,
stages: [{ time: [4, 2], mode: 'Fischer', increment: 0 }],
})
expect(clock.state.remainingTime).toStrictEqual([4, 2])
})
it('can add a move', () => {
const clock = new Clock(noIncrement)
clock.push(0)
expect(clock.state.move).toStrictEqual([1, 0])
})
it('stage defined by move', () => {
const clock = new Clock({
...noIncrement,
stages: [
{ time: [1, 1], mode: 'Fischer', increment: 0 },
{ time: [1, 1], move: 1, mode: 'Fischer', increment: 0 },
],
})
clock.push(0)
expect(clock.state.stage.map((e) => e.i)).toStrictEqual([1, 0])
})
it('can add time', () => {
const clock = new Clock(noIncrement)
clock.addTime(0, 42)
expect(clock.state.remainingTime).toStrictEqual([TIME + 42, TIME])
})
it('will add time on new stage', () => {
const clock = new Clock({
...noIncrement,
stages: [
{ time: [42, 21], mode: 'Fischer', increment: 0 },
{ time: [42, 21], move: 1, mode: 'Fischer', increment: 0 },
],
})
clock.push(0)
expect(clock.state.remainingTime).toStrictEqual([84, 21])
})
it('can reset to initial game parameters', () => {
const clock = new Clock(noIncrement)
clock.push(0)
clock.addTime(0, 42)
expect(clock.state.move).toStrictEqual([1, 0])
expect(clock.state.remainingTime).toStrictEqual([TIME + 42, TIME])
expect(clock.state.log).not.toStrictEqual([[], []])
clock.reset()
expect(clock.state.move).toStrictEqual([0, 0])
expect(clock.state.remainingTime).toStrictEqual([TIME, TIME])
expect(clock.state.log).toStrictEqual([[], []])
})
it('can reset to new game parameters', () => {
const clock = new Clock(noIncrement)
expect(clock.state.stages[0].mode).toBe('Fischer')
clock.reset(hourglass)
expect(clock.state.stages[0].mode).toBe('Hourglass')
})
it('provides state', () => {
const clock = new Clock(noIncrement)
expect(clock.state).not.toBe(undefined)
})
it('status is ready initially and after reset', () => {
const clock = new Clock(noIncrement)
expect(clock.state.status).toBe('ready')
clock.push(0)
expect(clock.state.status).toBe('live')
clock.reset()
expect(clock.state.status).toBe('ready')
})
it('status is done when remainingTime has expired', () => {
const clock = new Clock(noIncrement)
clock.push(0)
jest.runTimersToTime(TIME)
expect(clock.state.status).toBe('done')
})
it('status is done when remainingTime has expired on push', () => {
const clock = new Clock(noIncrement)
clock.push(0)
dateNow.mockReturnValueOnce(TIME + 100)
clock.push(1)
expect(clock.state.status).toBe('done')
})
it('remainingTime can not be lower than 0', () => {
const clock = new Clock({ ...noIncrement, callback })
clock.push(0)
jest.runTimersToTime(2 * TIME)
expect(clock.state.remainingTime).toStrictEqual([TIME, 0])
})
})
describe('Push', () => {
it('can starts timing', () => {
const clock = new Clock(noIncrement)
expect(clock.state.status).toBe('ready')
clock.push(0)
expect(clock.state.status).toBe('live')
})
it('can set last player', () => {
const clock = new Clock(noIncrement)
clock.push(0)
expect(clock.state.lastPlayer).toBe(0)
})
it('can add a move to player', () => {
const clock = new Clock(noIncrement)
clock.push(0)
expect(clock.state.move).toStrictEqual([1, 0])
})
it('has no effect if twice in a row by the same player', () => {
const clock = new Clock(noIncrement)
clock.push(0)
const state = clock.state
clock.push(0)
expect(clock.state).toStrictEqual(state)
})
it('has no effect if status is done', () => {
const clock = new Clock(noIncrement)
clock.push(0)
jest.runTimersToTime(TIME)
clock.push(1)
const state = clock.state
clock.push(0)
expect(clock.state).toStrictEqual(state)
})
it('can update elapsed time (without increment)', () => {
const clock = new Clock(noIncrement)
clock.push(0)
expect(clock.state.timestamp).toBe(100)
clock.push(1)
expect(clock.state.timestamp).toBe(200)
expect(dateNow).toBeCalledTimes(2)
})
it('can log time on non opening moves (without increment)', () => {
const clock = new Clock(noIncrement)
expect(clock.state.status).toBe('ready')
clock.push(0)
expect(clock.state.log[0][0]).toBe(undefined)
expect(clock.state.remainingTime).toStrictEqual([TIME, TIME])
jest.runTimersToTime(1000)
expect(clock.state.remainingTime).toStrictEqual([TIME, TIME - 1000])
clock.push(1)
expect(clock.state.log[1][0]).toBe(1100)
expect(clock.state.remainingTime).toStrictEqual([TIME, TIME - 1100])
jest.runTimersToTime(450)
clock.push(0)
expect(clock.state.log[0][0]).toBe(500)
expect(clock.state.remainingTime).toStrictEqual([TIME - 500, TIME - 1100])
expect(clock.state.status).toBe('live')
expect(dateNow).toBeCalledTimes(17)
})
})
describe('Pause', () => {
it('has effect if status is live', () => {
const clock = new Clock(noIncrement)
clock.push(0)
expect(clock.state.status).toBe('live')
clock.pause()
jest.runTimersToTime(4200)
expect(clock.state.remainingTime).toStrictEqual([TIME, TIME - 100])
expect(clock.state.log[1][0]).toBe(100)
expect(dateNow).toBeCalledTimes(2)
expect(clock.state.status).toBe('paused')
})
it('has no effect if status is not live', () => {
const clock = new Clock(noIncrement)
// ready
clock.pause()
jest.runTimersToTime(4200)
expect(clock.state.status).toBe('ready')
clock.push(0)
jest.runTimersToTime(TIME)
expect(clock.state.status).toBe('done')
// done
clock.pause()
jest.runTimersToTime(4200)
expect(clock.state.status).toBe('done')
expect(dateNow).toBeCalledTimes(TIME / UPDATE_INTERVAL + 1)
})
})
describe('Resume', () => {
it('has effect if status is paused', () => {
const clock = new Clock(noIncrement)
clock.push(0)
clock.pause()
jest.runTimersToTime(1000)
clock.resume()
jest.runTimersToTime(1000)
expect(clock.state.remainingTime).toStrictEqual([TIME, TIME - 1100])
expect(clock.state.log[1][0]).toBe(1100)
expect(dateNow).toBeCalledTimes(13)
expect(clock.state.status).toBe('live')
})
it('has no effect if status is not live', () => {
const clock = new Clock(noIncrement)
// ready
clock.resume()
jest.runTimersToTime(1000)
expect(clock.state.status).toBe('ready')
clock.push(0)
jest.runTimersToTime(1000)
// live
clock.resume()
jest.runTimersToTime(1000)
expect(clock.state.remainingTime).toStrictEqual([TIME, TIME - 2000])
expect(dateNow).toBeCalledTimes(21)
expect(clock.state.status).toBe('live')
jest.runTimersToTime(TIME)
// done
clock.resume()
expect(clock.state.status).toBe('done')
})
it('Resume has no effect if status is ready or done', () => {
const clock = new Clock(noIncrement)
// ready
clock.resume()
expect(clock.state.status).toBe('ready')
clock.push(0)
jest.runTimersToTime(TIME)
expect(clock.state.status).toBe('done')
expect(clock.state.remainingTime).toStrictEqual([TIME, 0])
// done
clock.resume()
jest.runTimersToTime(1000)
expect(clock.state.status).toBe('done')
expect(clock.state.remainingTime).toStrictEqual([TIME, 0])
})
})
describe('Callback', () => {
it('is called on push', () => {
const clock = new Clock({ ...noIncrement, callback })
clock.push(0)
expect(callback).toBeCalledTimes(1)
})
it('is called on pause', () => {
const clock = new Clock({ ...noIncrement, callback })
clock.push(0)
clock.pause()
clock.pause()
expect(callback).toBeCalledTimes(2)
})
it('is called on resume', () => {
const clock = new Clock({ ...noIncrement, callback })
clock.push(0)
clock.resume()
clock.pause()
clock.resume()
clock.resume()
expect(callback).toBeCalledTimes(3)
})
it('is called on addTime', () => {
const clock = new Clock({ ...noIncrement, callback })
clock.addTime(0, 0)
expect(callback).toBeCalledTimes(1)
})
it('is called on ticks', () => {
const clock = new Clock({ ...noIncrement, callback })
clock.push(0)
jest.runTimersToTime(1000)
expect(callback).toBeCalledTimes(11)
})
})
describe('Increments', () => {
describe('Fischer', () => {
it('will add increment at end of each turn', () => {
const clock = new Clock(fischer)
clock.push(0)
expect(clock.state.remainingTime).toStrictEqual([
TIME + clock.state.stage[0].increment,
TIME,
])
})
})
describe('Bronstein', () => {
it('will add spent increment at end of each turn', () => {
const clock = new Clock(bronstein)
const time = clock.state.stages[0].time
clock.push(0)
expect(clock.state.remainingTime).toStrictEqual(time)
jest.advanceTimersByTime(clock.state.stage[1]?.increment / 2)
expect(clock.state.remainingTime).toStrictEqual([
time[0],
time[1] - clock.state.stage[1].increment / 2,
])
clock.push(1)
expect(clock.state.remainingTime).toStrictEqual(time)
})
it('will add whole increment if it is spent at end of each turn', () => {
const clock = new Clock(bronstein)
const time = clock.state.stages[0].time
clock.push(0)
expect(clock.state.remainingTime).toStrictEqual(time)
jest.advanceTimersByTime(clock.state.stage[1]?.increment || 0)
clock.push(1)
expect(clock.state.remainingTime).toStrictEqual([
time[0],
time[1] - 100,
])
})
})
describe('Hourglass', () => {
it('will add spent time to opponent', () => {
const clock = new Clock(hourglass)
const time = clock.state.stages[0].time
clock.push(0)
expect(clock.state.remainingTime).toStrictEqual(time)
jest.advanceTimersByTime(1000)
clock.push(1)
expect(clock.state.remainingTime).toStrictEqual([
time[0] + 1100,
time[1] - 1100,
])
})
})
describe('Delay', () => {
it('will not start decreasing remainingTime before delay', () => {
const clock = new Clock(simpleDelay)
const time = clock.state.stages[0].time
const delay = clock.state.stage[1]?.increment
clock.push(0)
expect(clock.state.remainingTime).toStrictEqual(time)
jest.advanceTimersByTime(delay / 2)
expect(clock.state.remainingTime).toStrictEqual(time)
clock.push(1)
jest.advanceTimersByTime(delay - 100)
clock.push(0)
expect(clock.state.remainingTime).toStrictEqual(time)
})
it('will decrease remainingTime after delay', () => {
const clock = new Clock(simpleDelay)
const time = clock.state.stages[0].time
const delay = clock.state.stage[1]?.increment
clock.push(0)
jest.advanceTimersByTime(delay + 100)
clock.push(1)
expect(clock.state.remainingTime).toStrictEqual([
time[0],
time[1] - 200,
])
})
it('will immediately decrease remainingTime after delay', () => {
const clock = new Clock(simpleDelay)
const time = clock.state.stages[0].time
const delay = clock.state.stage[1]?.increment
clock.push(0)
jest.advanceTimersByTime(delay)
clock.push(1)
expect(clock.state.remainingTime).toStrictEqual([
time[0],
time[1] - 100,
])
})
})
})
describe('Configs', () => {
it('can list config names', () => {
const names = Clock.listConfigNames()
expect(names.length).not.toBe(0)
expect(names.every((n) => typeof n === 'string')).toBe(true)
})
it('can list config entries', () => {
const entries = Clock.listConfigEntries()
expect(entries.length).not.toBe(0)
expect(
entries.every(
([n, s]) => typeof n === 'string' && typeof s === 'object'
)
).toBe(true)
})
it('can add/set a config', () => {
const length = Clock.listConfigNames().length
Clock.setConfig('test', [
{
time: [60_000, 60_000],
mode: 'Hourglass',
increment: 0,
},
])
expect(Clock.listConfigNames().length).toBe(length + 1)
expect(Clock.listConfigNames().includes('test')).toBeTruthy()
Clock.setConfig('test', [
{
time: [60_000, 60_000],
mode: 'Fischer',
increment: 0,
},
])
expect(Clock.listConfigNames().length).toBe(length + 1)
expect(Clock.getConfig('test')?.stages[0].mode).toBe('Fischer')
})
it('can get a config', () => {
expect(Clock.getConfig('Hourglass 1')).not.toBeUndefined()
})
it('returns undefined if no config', () => {
expect(Clock.getConfig('Does not exist')).toBeUndefined()
})
it('can delete a config', () => {
Clock.setConfig('test', [
{
time: [60_000, 60_000],
mode: 'Fischer',
increment: 0,
},
])
Clock.deleteConfig('test')
expect(Clock.getConfig('Does not exist')).toBeUndefined()
})
})
})
|
from projectq import MainEngine
from projectq.ops import H, X, Y, Z, Measure, CNOT, Swap
class ProjectqQuantumSimulator:
def __init__(self, num_qubits):
self.num_qubits = num_qubits
self.engine = MainEngine()
def apply_single_qubit_gate(self, gate, target_qubit):
if gate == 'H':
H | self.engine.allocate_qubit()[target_qubit]
elif gate == 'X':
X | self.engine.allocate_qubit()[target_qubit]
elif gate == 'Y':
Y | self.engine.allocate_qubit()[target_qubit]
elif gate == 'Z':
Z | self.engine.allocate_qubit()[target_qubit]
def apply_two_qubit_gate(self, gate, control_qubit, target_qubit):
if gate == 'CNOT':
CNOT | (self.engine.allocate_qubit()[control_qubit], self.engine.allocate_qubit()[target_qubit])
elif gate == 'SWAP':
Swap | (self.engine.allocate_qubit()[control_qubit], self.engine.allocate_qubit()[target_qubit])
def measure_qubits(self, qubits):
Measure | self.engine.allocate_qubit()[qubits[0]]
Measure | self.engine.allocate_qubit()[qubits[1]]
self.engine.flush()
# Example usage
simulator = ProjectqQuantumSimulator(2)
simulator.apply_single_qubit_gate('H', 0)
simulator.apply_two_qubit_gate('CNOT', 0, 1)
simulator.measure_qubits([0, 1])
|
export { BaseStyle as Base } from './style';
|
<reponame>rebase-network/ckb-cache-layer<gh_stars>0
import { Module } from '@nestjs/common';
import { TypeOrmModule } from '@nestjs/typeorm';
import { ScheduleModule } from 'nest-schedule';
import { AppController } from './app.controller';
import { AppService } from './app.service';
import { configService } from './config/config.service';
import { BlockModule } from './block/block.module';
import { CellModule } from './cell/cell.module';
import { CkbModule } from './ckb/ckb.module';
import { AddressModule } from './address/address.module';
import { SyncstatModule } from './syncstat/syncstat.module';
@Module({
imports: [
TypeOrmModule.forRoot(configService.getTypeOrmConfig()),
ScheduleModule.register(),
BlockModule,
CellModule,
CkbModule,
AddressModule,
SyncstatModule
],
controllers: [AppController],
providers: [AppService],
})
export class AppModule {}
|
#!/bin/sh
# auto repeat command (for example xdotool command)
bname=$(basename "$0")
USAGE=$(printf "%s" "\
Usage: $bname [OPTION...]
OPTIONS
-c, --command Command to execute
-h, --help Display help
-s, --sleep Seconds to sleep between executions (default 0.5)
EXAMPLE
$bname -c 'xdotool click 1' -s 1.0
")
get_opt() {
# Parse and read OPTIONS command-line options
SHORT=c:hs:
LONG=command:,help,sleep:
OPTIONS=$(getopt --options $SHORT --long $LONG --name "$0" -- "$@")
# PLACE FOR OPTION DEFAULTS
sleep_time=0.5
eval set -- "$OPTIONS"
while true; do
case "$1" in
-c|--command)
shift
command_string="$1"
;;
-h|--help)
echo "$USAGE"
exit 0
;;
-s|--sleep)
shift
sleep_time=$1
;;
--)
shift
break
;;
esac
shift
done
}
get_opt "$@"
DIR="$XDG_CACHE_HOME/repeat"
mkdir -p "$DIR"
NAME=$(echo "$command_string" | sed "s/ /_/g")
CACHE="$DIR/$NAME"'_repeat'
if [ -f "$CACHE" ]; then
rm -f "$CACHE"
notify-send -u low -t 1000 "[OFF] REPEAT" "'$command_string'"
exit 0
else
echo "$command_string" > "$CACHE"
notify-send -u low -t 1000 "[ON] REPEAT" "'$command_string'"
fi
while [ -f "$CACHE" ]; do
command_cached=$(cat "$CACHE")
sh -c "$command_cached"
sleep "$sleep_time"
done
|
package com.example.opensorcerer.ui.signup.fragments;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.os.Bundle;
import android.text.Editable;
import android.view.KeyEvent;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.MultiAutoCompleteTextView;
import androidx.annotation.NonNull;
import androidx.appcompat.widget.AppCompatMultiAutoCompleteTextView;
import androidx.fragment.app.Fragment;
import androidx.navigation.fragment.NavHostFragment;
import com.example.opensorcerer.databinding.FragmentSignupTagsBinding;
import com.example.opensorcerer.models.Tools;
import com.example.opensorcerer.models.User;
import com.example.opensorcerer.ui.main.MainActivity;
import org.jetbrains.annotations.NotNull;
import java.util.Arrays;
import java.util.List;
/**
* Fragment for adding interested categories and languages
*/
public class SignupTagsFragment extends Fragment {
/**
* Binder for View Binding
*/
private FragmentSignupTagsBinding mApp;
/**
* Fragment's context
*/
private Context mContext;
/**
* Newly created user for signup
*/
private User mNewUser;
/**
* Spanned length of the languages text view
*/
private int spannedLengthLanguages = 0;
/**
* Spanned length of the tags text view
*/
private int spannedLengthTags = 0;
/**
* Inflates the fragment's layout
*/
@Override
public View onCreateView(@NotNull LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
mApp = FragmentSignupTagsBinding.inflate(inflater, container, false);
return mApp.getRoot();
}
/**
* Sets up the fragment's methods
*/
public void onViewCreated(@NonNull View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
mContext = getContext();
//Get the user created in the Role fragment
mNewUser = SignupTagsFragmentArgs.fromBundle(getArguments()).getUser();
setupButtonListeners();
setupChipInput(mApp.chipInputLanguages, Arrays.asList(Tools.getLanguages()));
setupChipInput(mApp.chipInputTags, Arrays.asList(Tools.getLanguages()));
}
/**
* Sets up the click listeners for the buttons
*/
private void setupButtonListeners() {
mApp.buttonFinish.setOnClickListener(v -> {
mNewUser.setLanguages(Arrays.asList(mApp.chipInputLanguages.getText().toString().split(",")));
mNewUser.setInterests(Arrays.asList(mApp.chipInputTags.getText().toString().split(",")));
navigateToMain();
});
mApp.buttonSkip.setOnClickListener(v -> navigateToMain());
mApp.buttonBack.setOnClickListener(v -> navigateBackward());
}
/**
* Navigates to the corresponding home activity depending on the user's role
*/
private void navigateToMain() {
Intent i = new Intent(mContext, MainActivity.class);
//Navigate to the selected home activity
startActivity(i);
requireActivity().finish();
}
/**
* Goes to the account details fragment
*/
private void navigateBackward() {
SignupTagsFragmentDirections.TagsToDetailsAction tagsToDetailsAction = SignupTagsFragmentDirections.tagsToDetailsAction(mNewUser);
NavHostFragment.findNavController(this)
.navigate(tagsToDetailsAction);
}
/**
* Sets up the text input to behave like a chip group
*/
private void setupChipInput(AppCompatMultiAutoCompleteTextView chipInput, List<String> recommendationItems) {
//Set the adapter
ArrayAdapter<String> adapter = new ArrayAdapter<>(mContext,
android.R.layout.simple_dropdown_item_1line, recommendationItems);
chipInput.setAdapter(adapter);
//Set the tokenizer to separate items by commas
chipInput.setTokenizer(new MultiAutoCompleteTextView.CommaTokenizer());
//Create a new token when a recommended item is selected
chipInput.setOnItemClickListener((parent, arg1, pos, id) -> tokenize(chipInput));
//Create a new token when a comma is typed
chipInput.setOnKeyListener((v, keyCode, event) -> {
if (keyCode == KeyEvent.KEYCODE_COMMA) {
tokenize(chipInput);
}
return true;
});
}
/**
* Creates a new chip from the last imputed word and adds it to the group
*/
private void tokenize(AppCompatMultiAutoCompleteTextView chipInput) {
//Get the spanned length depending on which chip input is being tokenized
int spannedLength = chipInput == mApp.chipInputLanguages ? spannedLengthLanguages : spannedLengthTags;
//Add a new chip to the input
Editable editable = Tools.addChip(mContext, chipInput.getEditableText(), spannedLength);
//Update the current length of the selected input
if (chipInput == mApp.chipInputLanguages) {
spannedLengthLanguages = editable.length();
} else {
spannedLengthTags = editable.length();
}
}
/**
* Resets the ViewBinder
*/
@Override
public void onDestroyView() {
super.onDestroyView();
mApp = null;
}
}
|
require File.dirname(__FILE__) + '/spec_helper'
describe ReconnectingPid do
extend MockDescribe
before(:each) do
@options = {
:max_backlog => 666,
:max_attempts => 666,
:timeout => 666,
:timer => Timers::NOOP
}
@connection = mock('connection')
@remote = mock('remote pid')
# We use allocate because initialize fires #connect before it is stubbed.
# See init_pid helper.
@pid = ReconnectingPid.allocate
@pid.stub!(:connect).and_return(@connection)
end
def init_pid(overrides = {})
params = {:local_pid => @pid, :remote_pid => @remote }.merge(@options).merge(overrides)
local = params.delete(:local_pid)
remote = params.delete(:remote_pid)
local.__send__(:initialize, remote, params)
end
mock_describe "with normal connection" do
@remote.should_receive(:send).with(:quick, :brown).ordered
@remote.should_receive(:send).with(:fox, :jumped).ordered
@pid.should_receive(:connect).exactly(3).times.with(@remote).and_return{|p| @connection }
init_pid(:max_backlog => 2, :max_attempts => 3)
@pid.send(:quick, :brown)
@pid.send(:fox, :jumped)
@pid.connection_failed(@connection)
@pid.connection_failed(@connection)
@pid.connected(@remote)
end
mock_describe "with backlog error" do
@remote.should_not_receive(:send)
@pid.should_receive(:connect).once.with(@remote).and_return{|p| @connection }
@pid.should_receive(:on_raise).once.with(@pid, an_instance_of(ReconnectingPid::BacklogError))
init_pid(:max_backlog => 2)
@pid.send(:quick, :brown)
@pid.send(:fox, :jumped)
@pid.send(:this_should_overflow_the_backlog)
end
mock_describe "with exceeding max. attempts" do
@remote.should_not_receive(:send)
@pid.should_receive(:connect).exactly(3).times.with(@remote).and_return{|p| @connection }
@pid.should_receive(:on_raise).once.with(@pid, an_instance_of(ReconnectingPid::AttemptsError))
init_pid(:max_attempts => 2)
@pid.send(:quick, :brown)
@pid.connection_failed(@connection)
@pid.connection_failed(@connection)
end
mock_describe "with exceeding timeout" do
@remote.should_not_receive(:send)
@pid.should_receive(:connect).exactly(3).times.with(@remote).and_return{|p| @connection }
@pid.should_receive(:on_raise).once.with(@pid, an_instance_of(ReconnectingPid::TimeoutError))
init_pid(:max_attempts => 3)
@pid.send(:quick, :brown)
@pid.connection_failed(@connection)
@pid.connection_failed(@connection)
@pid.timer_action
@pid.timer_action
end
end
|
#!/bin/bash
set -e
function echoc {
echo -e "\033[1;30m$1\033[0m"
}
NETWORK=https://net.ton.dev
COMPILE_DIR=./build/compile
DEPLOY_DIR=./build/deploy
SERVICE_DISCOVERY_APP=App
# shellcheck disable=SC2002
SERVICE_DISCOVERY_ADDR=$(cat "$DEPLOY_DIR/${SERVICE_DISCOVERY_APP}_genaddr.log" | grep "Raw address:" | cut -d ' ' -f 3)
function add {
SERVICE_ADDR=$1
SERVICE_TAGS=$2
echoc "Add service $SERVICE_ADDR to $SERVICE_DISCOVERY_ADDR"
# shellcheck disable=SC2002
tonos-cli --url $NETWORK call "$SERVICE_DISCOVERY_ADDR" add "{\"service\":{\"addr\":\"$SERVICE_ADDR\",\"tags\":$SERVICE_TAGS}}" --sign "$DEPLOY_DIR/$SERVICE_DISCOVERY_APP.keys.json" --abi "$COMPILE_DIR/$SERVICE_DISCOVERY_APP.abi.json"
echoc "Done."
}
function all {
echoc "Fetch index"
# shellcheck disable=SC2002
tonos-cli --url $NETWORK run "$SERVICE_DISCOVERY_ADDR" all {} --abi "$COMPILE_DIR/$SERVICE_DISCOVERY_APP.abi.json"
echoc "Done."
}
function find {
SERVICE_TAGS=$1
echoc "Add service $SERVICE_ADDR to $SERVICE_DISCOVERY_ADDR"
# shellcheck disable=SC2002
tonos-cli --url $NETWORK run "$SERVICE_DISCOVERY_ADDR" find "{\"tags\":$SERVICE_TAGS}" --abi "$COMPILE_DIR/$SERVICE_DISCOVERY_APP.abi.json"
echoc "Done."
}
function remove {
SERVICE_ADDR=$1
echoc "Add service $SERVICE_ADDR to $SERVICE_DISCOVERY_ADDR"
# shellcheck disable=SC2002
tonos-cli --url $NETWORK call "$SERVICE_DISCOVERY_ADDR" remove "{\"addr\":\"$SERVICE_ADDR\"}" --sign "$DEPLOY_DIR/$SERVICE_DISCOVERY_APP.keys.json" --abi "$COMPILE_DIR/$SERVICE_DISCOVERY_APP.abi.json"
echoc "Done."
}
case $1 in
add)
add "$2" "$3"
;;
all)
all
;;
find)
find "$2"
;;
remove)
remove "$2"
;;
esac
|
#!/usr/bin/env bash
source .config
usage() {
cat << EOF
usage: $ aoc.sh <command> [<args>]
Commands:
help - display this help
$ aoc.sh help
gen - fetch input and generate boilerplate
$ aoc.sh gen [YEAR] [DAY]
if no YEAR or DAY are specified then today is used
$ aoc.sh gen
to fetch input and generate boilerplate for a specific challenge run:
$ aoc.sh [YEAR] [DAY]
for example, day 4 of 2020
$ aoc.sh 2020 4
lb - display a leaderboard
$ aoc.sh lb [LEADERBOARD] [YEAR]
show leaderboard DEFAULT_LEADERBOARD from .config or
$ aoc.sh lb
show a specfic leaderboard
$ aoc.sh lb 1234567
show a specfic leaderboard for a specific year
$ aoc.sh lb 1234567 2021
by default the current year is used
EOF
}
show_leaderboard() {
leaderboard="$1"
if [[ -z "$leaderboard" ]]; then
leaderboard="$AOC_DEFAULT_LEADERBOARD"
fi
year="$2"
if [[ -z "$year" ]]; then
year="$(date +%Y)"
fi
leaderboard_url="https://adventofcode.com/${year}/leaderboard/private/view/${leaderboard}.json"
leaderboard_file="${AOC_DIR}/${year}-${leaderboard}.json"
mkdir -p "$AOC_DIR"
if [ ! -f "$leaderboard_file" ]; then
curl --cookie "session=${AOC_COOKIE}" -s "$leaderboard_url" > "$leaderboard_file"
else
last_updated="$(($(date +%s) - $(date -r $leaderboard_file +%s)))"
if [[ "$last_updated" -gt "$AOC_CACHE_TIME" ]]; then
curl --cookie "session=${AOC_COOKIE}" -s "$leaderboard_url" > "$leaderboard_file"
fi
fi
jq -r '["SCORE", "STARS", "NAME"],
([
.members[]]
| sort_by(.local_score)
| reverse[]
| [
.local_score,
.stars,
if .name then .name else ("(anonymous user #" + .id + ")") end
]
)
| @tsv' "$leaderboard_file"
}
generate() {
tmpfile=$(mktemp)
cat << EOF > /dev/null
if [[ -z "$1" ]]; then
>&2 echo "error: no argument specified."
>&2 usage
exit
fi
EOF
if [[ "$1" == "auto" ]]; then
if [[ "$(date +%m)" == "12" ]]; then
year=$(date +%Y)
day=$(date +%-d)
if [[ "$(date +%m)" -gt "25" ]]; then
>&2 echo "error: cannot use auto when it is after 25th december"
exit
fi
else
>&2 echo "error: cannot use auto when it isn't december"
exit
fi
else
year="$1"
day="$2"
if [[ -z "$day" ]]; then
>&2 echo "error: day specified, but no year"
>&2 usage
exit
fi
fi
day_fmt=$(printf "%02d" "${day}")
challenge_url="https://adventofcode.com/${year}/day/${day}"
input_url="${challenge_url}/input"
input_dir="./inputs/${year}"
sol_dir="./solutions/${year}"
input_file="./${input_dir}/${day_fmt}-input"
sol_file="./${sol_dir}/day_${day_fmt}.py"
curl -s "${challenge_url}" > "${tmpfile}"
title=$(grep day-desc "${tmpfile}" | sed -r 's/.+--- (Day [0-9]+: .+) ---.+/\1/g')
if [ -z "${title}" ]; then
>&2 echo "error: challenge at URL ${url} not found"
fi
if [ -z "$AOC_COOKIE" ]; then
>&2 echo "no AOC_COOKIE set. cannot download input automatically"
else
mkdir -p "${input_dir}"
curl -s --cookie "session=${AOC_COOKIE}" "${input_url}" > "${input_file}"
fi
if test -f "$sol_file"; then
>&2 echo "error: file ${sol_file} already exists. i won't override it"
exit
fi
mkdir -p "${sol_dir}"
cat << EOF > "${sol_file}"
#!/usr/bin/env python
"""
Puzzle Title: AoC ${year} ${title}
Puzzle Link: ${challenge_url}
Solution Author: ${AOC_AUTHOR}
Solution License: MIT
"""
import fileinput
def parse_input():
data = []
for line in fileinput.input():
if line.strip().isnumeric():
data.append(int(line))
return data
def solve_part1(data):
...
def solve_part2(data):
...
def main():
data = parse_input()
part1_ans = solve_part1(data)
print(f"Part 1: {part1_ans}")
part2_ans = solve_part2(data)
print(f"Part 2: {part2_ans}")
if __name__ == "__main__":
main()
EOF
chmod +x "${sol_file}"
}
cmd="$1"
if [[ -z "$cmd" ]]; then
>&2 echo "error: no command specified"
>&2 usage
exit
fi
shift
case "$cmd" in
"help")
usage
exit
;;
"lb")
show_leaderboard $@
;;
"gen")
generate $@
;;
*)
>&2 echo "error: invalid command ${cmd}"
>&2 usage
exit
;;
esac
|
# Function to process audio file
process_audio() {
input_path=$1
output_path=$2
# Step 1: Convert input audio file to 16-bit raw audio with 8000 Hz sample rate
sox $input_path -t raw -r 8000 -e signed-integer -b 16 - - | \
# Step 2: Convert 16-bit raw audio to 16 kHz WAV file
sox -r 8000 -e signed-integer -b 16 -t raw - -r 16000 -t wav $output_path/processed_audio.wav
utils/utt2spk_to_spk2utt.pl $output_path/utt2spk > $output_path/spk2utt
utils/fix_data_dir.sh $output_path
utils/validate_data_dir.sh --no-text --no-feats $output_path
}
|
package com.waflo.cooltimediaplattform.backend.beans;
public enum MediaType {
MOVIE("Film"), AUDIO("Audio"), DOCUMENT("Dokument");
private MediaType(String l){this.label=l;}
private String label;
public String getLabel() {
return label;
}
public void setLabel(String label){
//no-op
}
}
|
<gh_stars>0
export default function handleChangeRowsPerPage(event) {
this.setState({ pageSize: +event.target.value, pageNumber: 1 }, () => {
this.getData();
});
}
|
import { Component,ViewContainerRef,OnInit, AfterViewInit } from '@angular/core';
import {ChangeService} from '../service/change.service';
import {AuthenticateService} from '../service/authenticate.service';
import { Modal } from 'ngx-modialog/plugins/bootstrap';
import {AngularFireDatabase} from 'angularfire2/database';
import { ToastsManager } from 'ng2-toastr/ng2-toastr';
import { Ng4LoadingSpinnerService } from 'ng4-loading-spinner';
import { AmChartsService, AmChart } from "@amcharts/amcharts3-angular";
import { NotificationChange } from './NotificationChange';
import { AngularFirestore, AngularFirestoreCollection, AngularFirestoreDocument } from 'angularfire2/firestore';
import { Observable } from 'rxjs/Observable';
import 'rxjs/add/operator/map';
@Component({
moduleId: module.id,
selector: 'change',
templateUrl: 'change.component.html'
})
export class ChangeComponent implements OnInit {
//--ATTRBIUTS
Employe : {codePersonne : null,username:null,agence: {codeAgence:0}};
From = 'TND';
To = 'EUR';
Amount = 1;
AmountConverted = 0;
notifCol: AngularFirestoreCollection<NotificationChange>;
notifications: Observable<NotificationChange[]>;
dataCurrency = [
{code:'AED', name:'United Arab Emirates Dirham'},
{code : 'AFN', name: 'Afghan Afghani'},
{code : 'ALL', name: 'Albanian Lek'},
{code : 'AMD' , name : 'Armenian Dram' },
{code : 'ANG' , name : 'Netherlands Antillean Guilder'},
{code : 'AOA' , name: 'Angolan Kwanza'},
{code : 'ARS', name : 'Argentine Peso'},
{code : 'AUD', name : 'Australian Dollar'},
{code : 'AWG', name : 'Aruban Florin'},
{code : 'AZN', name : 'Azerbaijani Manat'},
{code : 'BAM', name : 'Bosnia-Herzegovina Convertible Mark'},
{code : 'BBD', name : 'Barbadian Dollar'},
{code : 'BDT', name : 'Bangladeshi Taka'},
{code : 'BGN', name : 'Bulgarian Lev'},
{code : 'BHD', name : 'Bahraini Dinar'},
{code : 'BIF', name : 'Burundian Franc'},
{code : 'BITGOLD', name : 'BitGOLD'},
{code : 'BMD', name : 'Bermudan Dollar'},
{code : 'BND', name : 'Brunei Dollar'},
{code : 'BOB', name : 'Bolivian Boliviano'},
{code : 'BRL', name : 'Brazilian Real'},
{code : 'BSD', name : 'Bahamian Dollar'},
{code : 'BTN', name : 'Bhutanese Ngultrum'},
{code : 'BWP', name : 'Botswanan Pula'},
{code : 'BYR', name : 'Belarusian Ruble (pre-2016)'},
{code : 'BZD', name : 'Belize Dollar'},
{code : 'CAD', name : 'Canadian Dollar'},
{code : 'CDF', name : 'Congolese Franc'},
{code : 'CHF', name : 'Swiss Franc'},
{code : 'CLF', name : 'Chilean Unit of Account (UF)'},
{code : 'CLP', name : 'Chilean Peso'},
{code : 'CNY', name : 'Chinese Yuan'},
{code : 'COP', name : 'Colombian Peso'},
{code : 'CRC', name : 'Costa Rican Colon'},
{code : 'CUP', name : 'Cuban Peso'},
{code : 'CVE', name : 'Cape Verdean Escudo'},
{code : 'CZK', name : 'Czech Republic Koruna'},
{code : 'DJF', name : 'Djiboutian Franc'},
{code : 'DKK', name : 'Danish Krone'},
{code : 'DOP', name : 'Dominican Peso'},
{code : 'DZD', name : 'Algerian Dinar'},
{code : 'EEK', name : 'Estonian Kroon'},
{code : 'EGP', name : 'Egyptian Pound'},
{code : 'ERN', name : 'Eritrean Nakfa'},
{code : 'ETB', name : 'Ethiopian Birr'},
{code : 'EUR', name : 'Euro'},
{code : 'FJD', name : 'Fijian Dollar'},
{code : 'FKP', name : 'Falkland Islands Pound'},
{code : 'GBP', name : 'British Pound Sterling'},
{code : 'GEL', name : 'Georgian Lari'},
{code : 'GHS', name : 'Ghanaian Cedi'},
{code : 'GIP', name : 'Gibraltar Pound'},
{code : 'GMD', name : 'Gambian Dalasi'},
{code : 'GNF', name : 'Guinean Franc'},
{code : 'GTQ', name : 'Guatemalan Quetzal'},
{code : 'GYD', name : 'Guyanaese Dollar'},
{code : 'HKD', name : 'Hong Kong Dollar'},
{code : 'HNL', name : 'Honduran Lempira'},
{code : 'HRK', name : 'Croatian Kuna'},
{code : 'HTG', name : 'Haitian Gourde'},
{code : 'HUF', name : 'Hungarian Forint'},
{code : 'IDR', name : 'Indonesian Rupiah'},
{code : 'ILS', name : 'Israeli New Sheqel'},
{code : 'INR', name : 'Indian Rupee'},
{code : 'IQD', name : 'Iraqi Dinar'},
{code : 'IRR', name : 'Iranian Rial'},
{code : 'ISK', name : 'Icelandic Krona'},
{code : 'JEP', name : 'Jersey Pound'},
{code : 'JMD', name : 'Jamaican Dollar'},
{code : 'JOD', name : 'Jordanian Dinar'},
{code : 'JPY', name : 'Japanese Yen'},
{code : 'KES', name : 'Kenyan Shilling'},
{code : 'KGS', name : 'Kyrgystani Som'},
{code : 'KHR', name : 'Cambodian Riel'},
{code : 'KMF', name : 'Comorian Franc'},
{code : 'KPW', name : 'North Korean Won'},
{code : 'KRW', name : 'South Korean Won'},
{code : 'KWD', name : 'Kuwaiti Dinar'},
{code : 'KYD', name : 'Cayman Islands Dollar'},
{code : 'KZT', name : 'Kazakhstani Tenge'},
{code : 'LAK', name : 'Laotian Kip'},
{code : 'LBP', name : 'Lebanese Pound'},
{code : 'LKR', name : 'Sri Lankan Rupee'},
{code : 'LRD', name : 'Liberian Dollar'},
{code : 'LSL', name : 'Lesotho Loti'},
{code : 'LTL', name : 'Lithuanian Litas'},
{code : 'LVL', name : 'Latvian Lats'},
{code : 'LYD', name : 'Libyan Dinar'},
{code : 'MAD', name : '<NAME>'},
{code : 'MDL', name : 'Moldovan Leu'},
{code : 'MGA', name : 'Malagasy Ariary'},
{code : 'MKD', name : 'Macedonian Denar'},
{code : 'MMK', name : 'Myanma Kyat'},
{code : 'MNT', name : 'Mongolian Tugrik'},
{code : 'MOP', name : 'Macanese Pataca'},
{code : 'MRO', name : 'Mauritanian Ouguiya'},
{code : 'MTL', name : 'Maltese Lira'},
{code : 'MUR', name : 'Mauritian Rupee'},
{code : 'MVR', name : 'Maldivian Rufiyaa'},
{code : 'MWK', name : 'Malawian Kwacha'},
{code : 'MXN', name : 'Mexican Peso'},
{code : 'MZN', name : 'Mozambican Metical'},
{code : 'NAD', name : 'Namibian Dollar'},
{code : 'NGN', name : 'Nigerian Naira'},
{code : 'NIO', name : 'Nicaraguan Cordoba'},
{code : 'NOK', name : 'Norwegian Krone'},
{code : 'NPR', name : 'Nepalese Rupee'},
{code : 'NZD', name : 'New Zealand Dollar'},
{code : 'OMR', name : 'Omani Rial'},
{code : 'PAB', name : 'Panamanian Balboa'},
{code : 'PEN', name : 'Peruvian Nuevo Sol'},
{code : 'PGK', name : 'Papua New Guinean Kina'},
{code : 'PHP', name : 'Philippine Peso'},
{code : 'PKR', name : 'Pakistani Rupee'},
{code : 'PLN', name : 'Polish Zloty'},
{code : 'PYG', name : 'Paraguayan Guarani'},
{code : 'QAR', name : 'Qatari Rial'},
{code : 'RON', name : 'Romanian Leu'},
{code : 'RSD', name : 'Serbian Dinar'},
{code : 'RUB', name : 'Russian Ruble'},
{code : 'RUR', name : 'Old Russian Ruble'},
{code : 'RWF', name : 'Rwandan Franc'},
{code : 'SAR', name : 'Saudi Riyal'},
{code : 'SBDf', name : 'Solomon Islands Dollar'},
{code : 'SCR', name : 'Seychellois Rupee'},
{code : 'SDG', name : 'Sudanese Pound'},
{code : 'SEK', name : 'Swedish Krona'},
{code : 'SGD', name : 'Singapore Dollar'},
{code : 'SHP', name : 'Saint Helena Pound'},
{code : 'SLL', name : 'Sierra Leonean Leone'},
{code : 'SOS', name : 'Somali Shilling'},
{code : 'SRD', name : 'Surinamese Dollar'},
{code : 'STD', name : 'Sao Tome and Principe Dobra'},
{code : 'SVC', name : 'Salvadoran Colon'},
{code : 'SYP', name : 'Syrian Pound'},
{code : 'SZL', name : 'Swazi Lilangeni'},
{code : 'THB', name : 'Thai Baht'},
{code : 'TJS', name : 'Tajikistani Somoni'},
{code : 'TMT', name : 'Turkmenistani Manat'},
{code : 'TND', name : 'Tunisian Dinar'},
{code : 'TOP', name : "Tongan Tongan Pa'anga"},
{code : 'TRY', name : 'Turkish Lira'},
{code : 'TTD', name : 'Trinidad and Tobago Dollar'},
{code : 'TWD', name : 'New Taiwan Dollar'},
{code : 'TZS', name : 'Tanzanian Shilling'},
{code : 'UAH', name : 'Ukrainian Hryvnia'},
{code : 'UGX', name : 'Ugandan Shilling'},
{code : 'USD', name : 'United States Dollar'},
{code : 'USDE', name : 'USDe'},
{code : 'UYU', name : 'Uruguayan Peso'},
{code : 'UZS', name : 'Uzbekistan Som'},
{code : 'VEF', name : 'Venezuelan Bolivar Fuerte'},
{code : 'VND', name : 'Vietnamese Dong'},
{code : 'VUV', name : 'Vanuatu Vatu'},
{code : 'WST', name : 'Samoan Tala'},
{code : 'XAF', name : 'CFA Franc BEAC'},
{code : 'XAG', name : 'Silver Ounce'},
{code : 'XAU', name : 'Gold Ounce'},
{code : 'XCD', name : 'East Caribbean Dollar'},
{code : 'XDR', name : 'Special Drawing Rights'},
{code : 'XOF', name : 'CFA Franc BCEAO'},
{code : 'XPF', name : 'CFP Franc'},
{code : 'YER', name : 'Yemeni Rial'},
{code : 'ZAR', name : 'South African Rand'},
{code : 'ZMK', name : 'Zambian Kwacha (pre-2013)'},
{code : 'ZMW', name : 'Zambian Kwacha'},
{code : 'ZWL', name : 'Zimbabwean Dollar'}
]
From_Currency_Code = null;
From_Currency_Name = null;
To_Currency_Code = null;
To_Currency_Name = null;
Exchange_Rate = null;
SelectedTypeChange = 0;
SelectedCodeChange = null;
Change = {
Identif:null,
Montant:0,
AdresseP:null,
Destination:null,
NomP:null,
PrenomP:null,
FromCurrencyCode :null,
FromCurrencyName :null,
ToCurrencyCode:null,
ToCurrencyName:null,
ExchangeRate :0,
MontantConverted:0,
ChangeType :0,
CodeEmploye:0,
IdAgence:0
};
ChangeTable = [];
chartChangeD : AmChart;
//-- END ATTRIBUTS
//-- CONSTRUCTOR
constructor(
private chService: ChangeService,
private authService: AuthenticateService,
private modal: Modal,
private firebase:AngularFireDatabase,
private toastr: ToastsManager,
private spinnerService: Ng4LoadingSpinnerService,
private vcr: ViewContainerRef,
private AmCh: AmChartsService,
private afs: AngularFirestore
) {
this.toastr.setRootViewContainerRef(vcr);
this.spinnerService.show();
}
//-- END CONSTRUCTOR
//-- INITIALIZING EMPLOYE DATA
ngOnInit() {
this.authService.getUsernameInfo$().subscribe(
res => {
this.authService.getUserInfo$(res.data.userName).subscribe(
resp => {
this.Employe = resp;
this.GetListChange();
this.getCurrencyGraph();
}
);
});
}
//-- END INITIALIZING EMPLOYE DATA
/*async ngAfterViewInit() {
await this.loadScript('../../../assets/js/plugins/jquery/jquery.min.js');
await this.loadScript("../../../assets/js/plugins/jquery/jquery-ui.min.js");
await this.loadScript("../../../assets/js/plugins/bootstrap/bootstrap.min.js");
await this.loadScript("../../../assets/js/plugins/icheck/icheck.min.js");
await this.loadScript("../../../assets/js/plugins/mcustomscrollbar/jquery.mCustomScrollbar.min.js");
await this.loadScript("../../../assets/js/plugins/smartwizard/jquery.smartWizard-2.0.min.js");
await this.loadScript("../../../assets/js/plugins/scrolltotop/scrolltopcontrol.js");
await this.loadScript("../../../assets/js/plugins/rickshaw/d3.v3.js");
await this.loadScript("../../../assets/js/plugins/rickshaw/rickshaw.min.js");
await this.loadScript("../../../assets/js/plugins/bootstrap/bootstrap-datepicker.js");
await this.loadScript("../../../assets/js/plugins/bootstrap/bootstrap-timepicker.min.js");
await this.loadScript("../../../assets/js/plugins/bootstrap/bootstrap-colorpicker.js");
await this.loadScript("../../../assets/js/plugins/bootstrap/bootstrap-file-input.js");
await this.loadScript("../../../assets/js/plugins/bootstrap/bootstrap-select.js");
await this.loadScript("../../../assets/js/plugins/tagsinput/jquery.tagsinput.min.js");
await this.loadScript("../../../assets/js/plugins/owl/owl.carousel.min.js");
await this.loadScript("../../../assets/js/plugins/knob/jquery.knob.min.js");
await this.loadScript("../../../assets/js/plugins/moment.min.js");
await this.loadScript("../../../assets/js/plugins/daterangepicker/daterangepicker.js");
await this.loadScript("../../../assets/js/plugins/summernote/summernote.js");
await this.loadScript("../../../assets/js/plugins.js");
await this.loadScript("../../../assets/js/actions.js");
await this.loadScript("../../../assets/js/demo_dashboard.js");
}
private loadScript(scriptUrl: string) {
return new Promise((resolve, reject) => {
const scriptElement = document.createElement('script')
scriptElement.src = scriptUrl
scriptElement.onload = resolve
document.body.appendChild(scriptElement)
})
}*/
//-- NG CHANGE CHECKBOX
SelectedTypeChangeFunction(){
if(this.SelectedTypeChange == 0){
this.SelectedTypeChange = 1
}else{
this.SelectedTypeChange = 0
}
}
//-- END NG CHANGE CHECKBOX
//-- ADD CHANGE OPERATION
AddDeviseOperation(){
const dialogRef = this.modal.confirm()
.size('sm')
.cancelBtn("No")
.okBtn("Yes")
.isBlocking(true)
.title('Confirmation Alert')
.body(`
<p>Are you sure you want to confirm this Currency Operation ?</p>
`)
.open().result.then((dialog: any) =>
{
this.Change.CodeEmploye = this.Employe.codePersonne;
this.Change.IdAgence = this.Employe.agence.codeAgence;
this.chService.AddDeviseOperation(this.Change).subscribe(
data => {
if(data.idChange == 0){
this.showError(data.messageResult);
}else{
this.showValid(data.messageResult);
this.GetListChange();
//-- PUSHING DATA INTO FIREBASE
var dateN = new Date();
var dateString = dateN.getUTCHours()+':'+dateN.getUTCMinutes()+':'+dateN.getUTCSeconds();
this.afs.collection('notification').add({'typeChange':this.Change.ChangeType,'nomUtilisateur':this.Employe.username,'montant':this.Change.Montant,'montantConvertie':this.Change.MontantConverted,'fromCurrCode':this.Change.FromCurrencyCode,'toCurrCode':this.Change.ToCurrencyCode,'typeOperation':'Change','typeNotification' : 'EMPLOYE', 'idAgence': this.Employe.agence.codeAgence,'date':dateString});
//-- END PUSHING DATA INTO FIREBASE
}
}
)
})
.catch((err: any) => {
this.showError("Transaction Canceled");
});
}
//-- END ADD CHANGE OPERATION
GetAmountConverted(MontantC){
if(MontantC == null || MontantC <= 0 || this.SelectedCodeChange == null){
this.showError("Make sure all Fields are not empty !");
}else{
if(this.SelectedTypeChange == 1){
this.chService.ConvertisseurDevise("TND",this.SelectedCodeChange).subscribe(
res => {
this.Change.ChangeType = 1;
this.Change.FromCurrencyCode = res["Realtime Currency Exchange Rate"]["1. From_Currency Code"];;
this.Change.FromCurrencyName = res["Realtime Currency Exchange Rate"]["2. From_Currency Name"];
this.Change.ToCurrencyCode = res["Realtime Currency Exchange Rate"]["3. To_Currency Code"];
this.Change.ToCurrencyName = res["Realtime Currency Exchange Rate"]["4. To_Currency Name"];
this.Change.ExchangeRate = res["Realtime Currency Exchange Rate"]["5. Exchange Rate"];
this.Change.MontantConverted = parseFloat(res["Realtime Currency Exchange Rate"]["5. Exchange Rate"])*MontantC;
}
)
}else{
this.chService.ConvertisseurDevise(this.SelectedCodeChange,"TND").subscribe(
res => {
this.Change.ChangeType = 0;
this.Change.FromCurrencyCode = res["Realtime Currency Exchange Rate"]["1. From_Currency Code"];;
this.Change.FromCurrencyName = res["Realtime Currency Exchange Rate"]["2. From_Currency Name"];
this.Change.ToCurrencyCode = res["Realtime Currency Exchange Rate"]["3. To_Currency Code"];
this.Change.ToCurrencyName = res["Realtime Currency Exchange Rate"]["4. To_Currency Name"];
this.Change.ExchangeRate = res["Realtime Currency Exchange Rate"]["5. Exchange Rate"];
this.Change.MontantConverted = parseFloat(res["Realtime Currency Exchange Rate"]["5. Exchange Rate"])*MontantC;
this.Change.AdresseP = null;
this.Change.Destination = null;
}
)
}
}
}
handleAddressChange(address) {
this.Change.Destination = address.formatted_address;
}
//-- CONVERTING CURRENCY
ConvertDevise(){
if(this.Amount <= 0 || this.Amount == null){
this.showError("Empty Amount !");
}else{
this.chService.ConvertisseurDevise(this.From,this.To).subscribe(
res => {
this.From_Currency_Code = res["Realtime Currency Exchange Rate"]["1. From_Currency Code"];
this.From_Currency_Name = res["Realtime Currency Exchange Rate"]["2. From_Currency Name"];
this.To_Currency_Code = res["Realtime Currency Exchange Rate"]["3. To_Currency Code"];
this.To_Currency_Name = res["Realtime Currency Exchange Rate"]["4. To_Currency Name"];
this.Exchange_Rate = res["Realtime Currency Exchange Rate"]["5. Exchange Rate"];
this.AmountConverted = parseFloat(res["Realtime Currency Exchange Rate"]["5. Exchange Rate"])*this.Amount;
const dialogRef = this.modal.alert()
.size('sm')
.okBtn("Close")
.isBlocking(true)
.title('Realtime Currency Exchange Rate')
.body(`
<!-- LIST GROUP WITH BADGES -->
<div class="panel panel-default">
<div class="panel-body">
<ul class="list-group border-bottom">
<li class="list-group-item">From Code<span class="badge badge-info">`+this.From_Currency_Code+`</span></li>
<li class="list-group-item">From Name<span class="badge badge-danger">`+this.From_Currency_Name+`</span></li>
<li class="list-group-item">To Code<span class="badge badge-info">`+this.To_Currency_Code+`</span></li>
<li class="list-group-item">To Name<span class="badge badge-danger">`+this.To_Currency_Name+`</span></li>
<li class="list-group-item">Exchange Rate<span class="badge badge-danger">`+this.Exchange_Rate+`</span></li>
<li class="list-group-item">Amount <span class="badge badge-danger">`+this.AmountConverted+`</span></li>
</ul>
</div>
</div>
<!-- END LIST GROUP WITH BADGES -->
`)
.open().result.then((dialog: any) =>
{
})
.catch((err: any) => {
});
});
}
}
//-- END CONVERTING CURRENCY
_keyPress(event: any) {
const pattern = /[a-z\A-Z\ \é\è]/;
let inputChar = String.fromCharCode(event.charCode);
if (!pattern.test(inputChar)) {
// invalid character, prevent input
event.preventDefault();
}
}
//-- PUSHING DATA INTO FIREBASE
SetDataCurrencyF(rateD,dateD){
this.firebase.database.ref('currency').push(
{rate:rateD,
date: dateD
}
);
}
//-- END PUSHING DATA INTO FIREBASE
//-- Currency Statistical Graph
getCurrencyGraph(){
//var returnArrDate = [];
//var returnArrRate = [];
var chartData = [];
this.chService.ConvertisseurDevise("TND","EUR").subscribe(
res => {
this.SetDataCurrencyF(res["Realtime Currency Exchange Rate"]["5. Exchange Rate"],res["Realtime Currency Exchange Rate"]["6. Last Refreshed"]);
this.firebase.database.ref("currency").once('value').then(function(snapshot){
snapshot.forEach(function(childSnapshot) {
var item = childSnapshot.val();
item.key = childSnapshot.key;
chartData.push( {
"date": item.date,
"value": item.rate,
"volume": item.rate
});
});
localStorage.setItem("chartData",JSON.stringify(chartData));
});
this.chartChangeD = this.AmCh.makeChart( "chartdivD", {
"type": "stock",
"theme": "light",
"categoryAxesSettings": {
"minPeriod": "mm"
},
"dataSets": [ {
"color": "#b0de09",
"fieldMappings": [ {
"fromField": "value",
"toField": "value"
}, {
"fromField": "volume",
"toField": "volume"
} ],
"dataProvider": JSON.parse(localStorage.getItem("chartData")),
"categoryField": "date"
} ],
"panels": [ {
"showCategoryAxis": false,
"title": "Value",
"percentHeight": 70,
"stockGraphs": [ {
"id": "g1",
"valueField": "value",
"type": "smoothedLine",
"lineThickness": 2,
"bullet": "round"
} ],
"stockLegend": {
"valueTextRegular": " ",
"markerType": "none"
}
}, {
"title": "Volume",
"percentHeight": 30,
"stockGraphs": [ {
"valueField": "volume",
"type": "column",
"cornerRadiusTop": 2,
"fillAlphas": 1
} ],
"stockLegend": {
"valueTextRegular": " ",
"markerType": "none"
}
} ],
"chartScrollbarSettings": {
"graph": "g1",
"usePeriod": "10mm",
"position": "top"
},
"chartCursorSettings": {
"valueBalloonsEnabled": true
},
"periodSelector": {
"position": "top",
"dateFormat": "YYYY-MM-DD JJ:NN",
"inputFieldWidth": 150,
"periods": [ {
"period": "hh",
"count": 1,
"label": "1 hour"
}, {
"period": "hh",
"count": 2,
"label": "2 hours"
}, {
"period": "hh",
"count": 5,
"selected": true,
"label": "5 hour"
}, {
"period": "hh",
"count": 12,
"label": "12 hours"
}, {
"period": "MAX",
"label": "MAX"
} ]
},
"panelsSettings": {
"usePrefixes": true
},
"export": {
"enabled": true,
"position": "bottom-right"
}
} );
}
);
this.spinnerService.hide();
}
//-- END CURRENCY Statistical Graph
//-- Get List Change
GetListChange(){
this.chService.GetListChanges(this.Employe.codePersonne).subscribe(
res => {
this.ChangeTable = res;
}
);
}
//-- END GET LIST CHANGE
//-- CONTROLLING
showError(msg) {
this.toastr.error(msg, "Error Message");
}
showValid(msg) {
this.toastr.success(msg, 'Confirming message!');
}
//-- END CONTROLLING
}
|
#pragma once
#include <iostream>
#include <random>
#include <ctime>
#include <array>
#include <Windows.h>
#include <d3d11_1.h>
#include <directxcolors.h>
#include <D3DX11.h>
#include <xnamath.h>
#include <d3dcompiler.h>
#include "Effects.h"
#include <vector>
#include <SpriteBatch.h>
#include <SpriteFont.h>
#pragma comment(lib, "d3d11.lib")
#pragma comment(lib, "d3dx11.lib")
#pragma comment(lib,"DirectXTK.lib")
using namespace std;
struct VertexTemplate
{
XMFLOAT3 Pos;
XMFLOAT4 color;
};
class DrawingClass
{
public:
wchar_t ClassName[9]{ 0 };
HRESULT CompileShaderFromMemory(const char* szdata, SIZE_T len, LPCSTR szEntryPoint, LPCSTR szShaderModel, ID3DBlob** ppBlobOut);
HRESULT InitWindow();
HRESULT InitDevice(bool Multisampling, ID3D11Device* g_pd3dDevice, ID3D11DeviceContext* g_pImmediateContext, IDXGISwapChain* g_pSwapChain);
void GenerateClassName(wchar_t* ClassName, int Length);
void ClearRenderTarget();
void Render(IDXGISwapChain* g_pSwapChain);
HRESULT DrawString(const char* Text, DirectX::XMFLOAT2 Pos, DirectX::XMVECTOR Color);
HRESULT DrawRect(ID3D11DeviceContext* g_pImmediateContext, XMFLOAT2 From, XMFLOAT2 To);
HRESULT DrawLine(ID3D11DeviceContext* g_pImmediateContext, XMFLOAT2 pos1, XMFLOAT2 pos2);
HRESULT DrawCircle(ID3D11DeviceContext* g_pImmediateContext,XMFLOAT2 center, UINT radius, UINT numVertices);
DrawingClass(int width, int height, bool Multisampling) :Width{ width }, Height{ height }{}
/*{
InitWindow();
InitDevice(Multisampling);
}*/
HWND g_hWnd;
WNDPROC lpfnWndProc;
private:
HINSTANCE g_hInst = NULL;
D3D_DRIVER_TYPE g_driverType = D3D_DRIVER_TYPE_HARDWARE;
D3D_FEATURE_LEVEL g_featureLevel = D3D_FEATURE_LEVEL_11_0;
//ID3D11Device* g_pd3dDevice = nullptr;
//ID3D11DeviceContext* g_pImmediateContext = nullptr;
//IDXGISwapChain* g_pSwapChain = nullptr;
ID3D11RenderTargetView* g_pRenderTargetView = nullptr;
ID3D11VertexShader* g_pVertexShader = NULL;
ID3D11PixelShader* g_pPixelShader = NULL;
ID3D11InputLayout* g_pVertexLayout = NULL;
ID3D11Buffer* g_pVertexBuffer = NULL;
ID3D11Texture2D* g_pDepthStencilBuffer = NULL;
ID3D11DepthStencilView* g_pDepthView = NULL;
ID3D11DepthStencilState* g_pDepthState = NULL;
ID3D11RasterizerState* g_pRasterizerState = NULL;
DirectX::SpriteFont* m_pFont;
DirectX::SpriteBatch* m_pBatch;
int Width;
int Height;
};
|
#!/bin/sh
. ./trace.sh
. ./sendtobitcoinnode.sh
. ./bitcoin.sh
createbatcher() {
trace "Entering createbatcher()..."
# POST http://192.168.111.152:8080/createbatcher
#
# Will UPDATE the batcher if it already exists (as per label)
#
# args:
# - batcherLabel, optional, id can be used to reference the batcher
# - confTarget, optional, overriden by batchspend's confTarget, default Bitcoin Core conf_target will be used if not supplied
# NOTYET - feeRate, sat/vB, optional, overrides confTarget if supplied, overriden by batchspend's feeRate, default Bitcoin Core fee policy will be used if not supplied
#
# response:
# - batcherId, the batcher id
#
# BODY {"batcherLabel":"lowfees","confTarget":32}
# NOTYET BODY {"batcherLabel":"highfees","feeRate":231.8}
local request=${1}
local response
local returncode
local label=$(echo "${request}" | jq -r ".batcherLabel")
trace "[createbatcher] label=${label}"
local conf_target=$(echo "${request}" | jq ".confTarget")
trace "[createbatcher] conf_target=${conf_target}"
local feerate=$(echo "${request}" | jq ".feeRate")
trace "[createbatcher] feerate=${feerate}"
# if [ "${feerate}" != "null" ]; then
# # If not null, let's nullify conf_target since feerate overrides it
# conf_target="null"
# trace "[createbatcher] Overriding conf_target=${conf_target}"
# fi
local batcher_id
batcher_id=$(sql "INSERT INTO batcher (label, conf_target, feerate)"\
" VALUES ('${label}', ${conf_target}, ${feerate})"\
" ON CONFLICT (label) DO"\
" UPDATE SET conf_target=${conf_target}, feerate=${feerate}"\
" RETURNING id" \
"SELECT id FROM batcher WHERE label='${label}'")
returncode=$?
trace_rc ${returncode}
if [ "${returncode}" -ne "0" ]; then
trace "[createbatcher] Could not insert"
response='{"result":null,"error":{"code":-32700,"message":"Could not create/update batcher","data":'${request}'}}'
else
trace "[createbatcher] Inserted or updated, response=${batcher_id}"
response='{"result":{"batcherId":'${batcher_id}'},"error":null}'
fi
echo "${response}"
}
updatebatcher() {
trace "Entering updatebatcher()..."
# POST http://192.168.111.152:8080/updatebatcher
#
# args:
# - batcherId, optional, batcher id to update, will update default batcher if not supplied
# - batcherLabel, optional, id can be used to reference the batcher, will update default batcher if not supplied, if id is present then change the label with supplied text
# - confTarget, optional, new confirmation target for the batcher
# NOTYET - feeRate, sat/vB, optional, new feerate for the batcher
#
# response:
# - batcherId, the batcher id
# - batcherLabel, the batcher label
# - confTarget, the batcher default confirmation target
# NOTYET - feeRate, the batcher default feerate
#
# BODY {"batcherId":5,"confTarget":12}
# NOTYET BODY {"batcherLabel":"highfees","feeRate":400}
# NOTYET BODY {"batcherId":3,"batcherLabel":"ultrahighfees","feeRate":800}
# BODY {"batcherLabel":"fast","confTarget":2}
local request=${1}
local response
local whereclause
local returncode
local id=$(echo "${request}" | jq ".batcherId")
trace "[updatebatcher] id=${id}"
local label=$(echo "${request}" | jq -r ".batcherLabel")
trace "[updatebatcher] label=${label}"
local conf_target=$(echo "${request}" | jq ".confTarget")
trace "[updatebatcher] conf_target=${conf_target}"
local feerate=$(echo "${request}" | jq ".feeRate")
trace "[updatebatcher] feerate=${feerate}"
if [ "${id}" = "null" ] && [ "${label}" = "null" ]; then
# If id and label are null, use default batcher
trace "[updatebatcher] Using default batcher 1"
id=1
fi
# if [ "${feerate}" != "null" ]; then
# # If not null, let's nullify conf_target since feerate overrides it
# conf_target="null"
# trace "[updatebatcher] Overriding conf_target=${conf_target}"
# fi
if [ "${id}" = "null" ]; then
whereclause="label='${label}'"
else
whereclause="id = ${id}"
fi
sql "UPDATE batcher set label='${label}', conf_target=${conf_target}, feerate=${feerate} WHERE ${whereclause}"
returncode=$?
trace_rc ${returncode}
if [ "${returncode}" -ne 0 ]; then
response='{"result":null,"error":{"code":-32700,"message":"Could not update batcher","data":'${request}'}}'
else
response='{"result":{"batcherId":'${id}'},"error":null}'
fi
echo "${response}"
}
addtobatch() {
trace "Entering addtobatch()..."
# POST http://192.168.111.152:8080/addtobatch
#
# args:
# - address, required, desination address
# - amount, required, amount to send to the destination address
# - outputLabel, optional, if you want to reference this output
# - batcherId, optional, the id of the batcher to which the output will be added, default batcher if not supplied, overrides batcherLabel
# - batcherLabel, optional, the label of the batcher to which the output will be added, default batcher if not supplied
# - webhookUrl, optional, the webhook to call when the batch is broadcast
#
# response:
# - batcherId, the id of the batcher
# - outputId, the id of the added output
# - nbOutputs, the number of outputs currently in the batch
# - oldest, the timestamp of the oldest output in the batch
# - total, the current sum of the batch's output amounts
#
# BODY {"address":"2N8DcqzfkYi8CkYzvNNS5amoq3SbAcQNXKp","amount":0.00233}
# BODY {"address":"2N8DcqzfkYi8CkYzvNNS5amoq3SbAcQNXKp","amount":0.00233,"batcherId":34,"webhookUrl":"https://myCypherApp:3000/batchExecuted"}
# BODY {"address":"2N8DcqzfkYi8CkYzvNNS5amoq3SbAcQNXKp","amount":0.00233,"batcherLabel":"lowfees","webhookUrl":"https://myCypherApp:3000/batchExecuted"}
# BODY {"address":"2N8DcqzfkYi8CkYzvNNS5amoq3SbAcQNXKp","amount":0.00233,"batcherId":34,"webhookUrl":"https://myCypherApp:3000/batchExecuted"}
local request=${1}
local response
local returncode=0
local inserted_id
local row
local address=$(echo "${request}" | jq -r ".address")
trace "[addtobatch] address=${address}"
local amount=$(echo "${request}" | jq ".amount")
trace "[addtobatch] amount=${amount}"
local label=$(echo "${request}" | jq -r ".outputLabel")
trace "[addtobatch] label=${label}"
local batcher_id=$(echo "${request}" | jq ".batcherId")
trace "[addtobatch] batcher_id=${batcher_id}"
local batcher_label=$(echo "${request}" | jq -r ".batcherLabel")
trace "[addtobatch] batcher_label=${batcher_label}"
local webhook_url=$(echo "${request}" | jq -r ".webhookUrl")
trace "[addtobatch] webhook_url=${webhook_url}"
# Let's lowercase bech32 addresses
address=$(lowercase_if_bech32 "${address}")
local isvalid
isvalid=$(validateaddress "${address}" | jq ".result.isvalid")
if [ "${isvalid}" != "true" ]; then
response='{"result":null,"error":{"code":-32700,"message":"Invalid address","data":'${request}'}}'
trace "[addtobatch] Invalid address"
trace "[addtobatch] responding=${response}"
echo "${response}"
return 1
fi
if [ "${batcher_id}" = "null" ] && [ "${batcher_label}" = "null" ]; then
# If batcher_id and batcher_label are null, use default batcher
trace "[addtobatch] Using default batcher 1"
batcher_id=1
fi
if [ "${batcher_id}" = "null" ]; then
# Using batcher_label
batcher_id=$(sql "SELECT id FROM batcher WHERE label='${batcher_label}'")
returncode=$?
trace_rc ${returncode}
fi
if [ -z "${batcher_id}" ]; then
# batcherLabel not found
response='{"result":null,"error":{"code":-32700,"message":"batcher not found","data":'${request}'}}'
else
# Check if address already pending for this batcher...
inserted_id=$(sql "SELECT id FROM recipient WHERE LOWER(address)=LOWER('${address}') AND tx_id IS NULL AND batcher_id=${batcher_id}")
returncode=$?
trace_rc ${returncode}
if [ -n "${inserted_id}" ]; then
response='{"result":null,"error":{"code":-32700,"message":"Duplicated address","data":'${request}'}}'
trace "[addtobatch] Duplicated address"
trace "[addtobatch] responding=${response}"
echo "${response}"
return 1
fi
# Insert the new destination
inserted_id=$(sql "INSERT INTO recipient (address, amount, webhook_url, batcher_id, label)"\
" VALUES ('${address}', ${amount}, '${webhook_url}', ${batcher_id}, '${label}')"\
" RETURNING id")
returncode=$?
trace_rc ${returncode}
if [ "${returncode}" -ne 0 ]; then
response='{"result":null,"error":{"code":-32700,"message":"Could not add to batch","data":'${request}'}}'
else
row=$(sql "SELECT COUNT(id), MIN(inserted_ts), SUM(amount) FROM recipient WHERE tx_id IS NULL AND batcher_id=${batcher_id}")
returncode=$?
trace_rc ${returncode}
local count=$(echo "${row}" | cut -d '|' -f1)
trace "[addtobatch] count=${count}"
local oldest=$(echo "${row}" | cut -d '|' -f2)
trace "[addtobatch] oldest=${oldest}"
local total=$(echo "${row}" | cut -d '|' -f3)
trace "[addtobatch] total=${total}"
response='{"result":{"batcherId":'${batcher_id}',"outputId":'${inserted_id}',"nbOutputs":'${count}',"oldest":"'${oldest}'","total":'${total}'},"error":null}'
fi
fi
echo "${response}"
}
removefrombatch() {
trace "Entering removefrombatch()..."
# POST http://192.168.111.152:8080/removefrombatch
#
# args:
# - outputId, required, id of the output to remove
#
# response:
# - batcherId, the id of the batcher
# - outputId, the id of the removed output if found
# - nbOutputs, the number of outputs currently in the batch
# - oldest, the timestamp of the oldest output in the batch
# - total, the current sum of the batch's output amounts
#
# BODY {"id":72}
local request=${1}
local response
local returncode=0
local row
local batcher_id
local id=$(echo "${request}" | jq ".outputId")
trace "[removefrombatch] id=${id}"
if [ "${id}" = "null" ]; then
# id is required
trace "[removefrombatch] id missing"
response='{"result":null,"error":{"code":-32700,"message":"outputId is required","data":'${request}'}}'
else
# We don't want to remove an already spent output
batcher_id=$(sql "SELECT batcher_id FROM recipient WHERE id=${id} AND tx_id IS NULL")
returncode=$?
trace_rc ${returncode}
if [ -n "${batcher_id}" ]; then
sql "DELETE FROM recipient WHERE id=${id}"
returncode=$?
trace_rc ${returncode}
if [ "${returncode}" -ne 0 ]; then
response='{"result":null,"error":{"code":-32700,"message":"Output was not removed","data":'${request}'}}'
else
row=$(sql "SELECT COUNT(id), COALESCE(MIN(inserted_ts), DATE '0001-01-01'), COALESCE(SUM(amount), 0.00000000) FROM recipient WHERE tx_id IS NULL AND batcher_id=${batcher_id}")
returncode=$?
trace_rc ${returncode}
local count=$(echo "${row}" | cut -d '|' -f1)
trace "[removefrombatch] count=${count}"
local oldest=$(echo "${row}" | cut -d '|' -f2)
trace "[removefrombatch] oldest=${oldest}"
local total=$(echo "${row}" | cut -d '|' -f3)
trace "[removefrombatch] total=${total}"
response='{"result":{"batcherId":'${batcher_id}',"outputId":'${id}',"nbOutputs":'${count}',"oldest":"'${oldest}'","total":'${total}'},"error":null}'
fi
else
response='{"result":null,"error":{"code":-32700,"message":"Output not found or already spent","data":'${request}'}}'
fi
fi
echo "${response}"
}
batchspend() {
trace "Entering batchspend()..."
# POST http://192.168.111.152:8080/batchspend
#
# args:
# - batcherId, optional, id of the batcher to execute, overrides batcherLabel, default batcher will be spent if not supplied
# - batcherLabel, optional, label of the batcher to execute, default batcher will be executed if not supplied
# - confTarget, optional, overrides default value of createbatcher, default to value of createbatcher, default Bitcoin Core conf_target will be used if not supplied
# NOTYET - feeRate, optional, overrides confTarget if supplied, overrides default value of createbatcher, default to value of createbatcher, default Bitcoin Core value will be used if not supplied
#
# response:
# - batcherId, id of the executed batcher
# - confTarget, conf_target used for the spend
# - nbOutputs, the number of outputs spent in the batch
# - oldest, the timestamp of the oldest output in the spent batch
# - total, the sum of the spent batch's output amounts
# - txid, the batch transaction id
# - hash, the transaction hash
# - tx details: firstseen, size, vsize, replaceable, fee
# - outputs
#
# BODY {}
# BODY {"batcherId":"34","confTarget":12}
# NOTYET BODY {"batcherLabel":"highfees","feeRate":233.7}
# BODY {"batcherId":"411","confTarget":6}
local request=${1}
local response
local returncode=0
local row
local whereclause
local batcher_id=$(echo "${request}" | jq ".batcherId")
trace "[batchspend] batcher_id=${batcher_id}"
local batcher_label=$(echo "${request}" | jq -r ".batcherLabel")
trace "[batchspend] batcher_label=${batcher_label}"
local conf_target=$(echo "${request}" | jq ".confTarget")
trace "[batchspend] conf_target=${conf_target}"
local feerate=$(echo "${request}" | jq ".feeRate")
trace "[batchspend] feerate=${feerate}"
if [ "${batcher_id}" = "null" ] && [ "${batcher_label}" = "null" ]; then
# If batcher_id and batcher_label are null, use default batcher
trace "[batchspend] Using default batcher 1"
batcher_id=1
fi
if [ "${batcher_id}" = "null" ]; then
# Using batcher_label
whereclause="label='${batcher_label}'"
else
whereclause="id=${batcher_id}"
fi
local batcher=$(sql "SELECT id, conf_target, feerate FROM batcher WHERE ${whereclause}")
returncode=$?
trace_rc ${returncode}
if [ -z "${batcher}" ]; then
# batcherLabel not found
response='{"result":null,"error":{"code":-32700,"message":"batcher not found","data":'${request}'}}'
else
# All good, let's try to batch spend!
# NOTYET
# We'll use supplied feerate
# If not supplied, we'll use supplied conf_target
# If not supplied, we'll use batcher default feerate
# If not set, we'll use batcher default conf_target
# If not set, default Bitcoin Core fee policy will be used
# We'll use the supplied conf_target
# If not supplied, we'll use the batcher default conf_target
# If not set, default Bitcoin Core fee policy will be used
# if [ "${feerate}" != "null" ]; then
# # If not null, let's nullify conf_target since feerate overrides it
# conf_target=
# trace "[batchspend] Overriding conf_target=${conf_target}"
# else
# if [ "${conf_target}" = "null" ]; then
# feerate=$(echo "${batcher}" | cut -d '|' -f3)
# if [ -z "${feerate}" ]; then
# # If null, let's use batcher conf_target
# conf_target=$(echo "${batcher}" | cut -d '|' -f2)
# fi
# fi
# fi
if [ "${conf_target}" = "null" ]; then
conf_target=$(echo "${batcher}" | cut -d '|' -f2)
trace "[batchspend] Using batcher default conf_target=${conf_target}"
fi
batcher_id=$(echo "${batcher}" | cut -d '|' -f1)
local batching=$(sql "SELECT address, amount, id, webhook_url FROM recipient WHERE tx_id IS NULL AND batcher_id=${batcher_id}")
trace "[batchspend] batching=${batching}"
local data
local recipientsjson
local webhooks_data
local id_inserted
local tx_details
local tx_raw_details
local address
local amount
local IFS=$'\n'
for row in ${batching}
do
trace "[batchspend] row=${row}"
address=$(echo "${row}" | cut -d '|' -f1)
trace "[batchspend] address=${address}"
amount=$(echo "${row}" | cut -d '|' -f2)
trace "[batchspend] amount=${amount}"
recipient_id=$(echo "${row}" | cut -d '|' -f3)
trace "[batchspend] recipient_id=${recipient_id}"
webhook_url=$(echo "${row}" | cut -d '|' -f4)
trace "[batchspend] webhook_url=${webhook_url}"
if [ -z "${recipientsjson}" ]; then
whereclause="${recipient_id}"
recipientsjson="\"${address}\":${amount}"
webhooks_data="{\"outputId\":${recipient_id},\"address\":\"${address}\",\"amount\":${amount},\"webhookUrl\":\"${webhook_url}\"}"
else
whereclause="${whereclause},${recipient_id}"
recipientsjson="${recipientsjson},\"${address}\":${amount}"
webhooks_data="${webhooks_data},{\"outputId\":${recipient_id},\"address\":\"${address}\",\"amount\":${amount},\"webhookUrl\":\"${webhook_url}\"}"
fi
done
local bitcoincore_args="{\"method\":\"sendmany\",\"params\":[\"\", {${recipientsjson}}"
if [ -n "${conf_target}" ]; then
bitcoincore_args="${bitcoincore_args}, 1, \"\", null, null, ${conf_target}"
fi
bitcoincore_args="${bitcoincore_args}]}"
data=$(send_to_spender_node "${bitcoincore_args}")
returncode=$?
trace_rc ${returncode}
trace "[batchspend] data=${data}"
if [ "${returncode}" -eq 0 ]; then
local txid=$(echo "${data}" | jq -r ".result")
trace "[batchspend] txid=${txid}"
# Let's get transaction details on the spending wallet so that we have fee information
tx_details=$(get_transaction ${txid} "spender")
tx_raw_details=$(get_rawtransaction ${txid} | tr -d '\n')
# Amounts and fees are negative when spending so we absolute those fields
local tx_hash=$(echo "${tx_raw_details}" | jq -r '.result.hash')
local tx_ts_firstseen=$(echo "${tx_details}" | jq '.result.timereceived')
local tx_amount=$(echo "${tx_details}" | jq '.result.amount | fabs' | awk '{ printf "%.8f", $0 }')
local tx_size=$(echo "${tx_raw_details}" | jq '.result.size')
local tx_vsize=$(echo "${tx_raw_details}" | jq '.result.vsize')
local tx_replaceable=$(echo "${tx_details}" | jq -r '.result."bip125-replaceable"')
trace "[batchspend] tx_replaceable=${tx_replaceable}"
tx_replaceable=$([ "${tx_replaceable}" = "yes" ] && echo "true" || echo "false")
trace "[batchspend] tx_replaceable=${tx_replaceable}"
local fees=$(echo "${tx_details}" | jq '.result.fee | fabs' | awk '{ printf "%.8f", $0 }')
# Get the info on the batch before setting it to done
row=$(sql "SELECT COUNT(id), COALESCE(MIN(inserted_ts), DATE '0001-01-01'), COALESCE(SUM(amount), 0.00000000) FROM recipient WHERE tx_id IS NULL AND batcher_id=${batcher_id}")
returncode=$?
trace_rc ${returncode}
# Let's insert the txid in our little DB -- then we'll already have it when receiving confirmation
id_inserted=$(sql "INSERT INTO tx (txid, hash, confirmations, timereceived, fee, size, vsize, is_replaceable, conf_target)"\
" VALUES ('${txid}', '${tx_hash}', 0, ${tx_ts_firstseen}, ${fees}, ${tx_size}, ${tx_vsize}, ${tx_replaceable}, ${conf_target})"\
" RETURNING id" \
"SELECT id FROM tx WHERE txid='${txid}'")
returncode=$?
trace_rc ${returncode}
if [ "${returncode}" -eq 0 ]; then
trace "[batchspend] id_inserted: ${id_inserted}"
sql "UPDATE recipient SET tx_id=${id_inserted} WHERE id IN (${whereclause})"
trace_rc $?
fi
# Use the selected row above (before the insert)
local count=$(echo "${row}" | cut -d '|' -f1)
trace "[batchspend] count=${count}"
local oldest=$(echo "${row}" | cut -d '|' -f2)
trace "[batchspend] oldest=${oldest}"
local total=$(echo "${row}" | cut -d '|' -f3)
trace "[batchspend] total=${total}"
response='{"result":{"batcherId":'${batcher_id}',"confTarget":'${conf_target}',"nbOutputs":'${count}',"oldest":"'${oldest}'","total":'${total}
response="${response},\"status\":\"accepted\",\"txid\":\"${txid}\",\"hash\":\"${tx_hash}\",\"details\":{\"firstseen\":${tx_ts_firstseen},\"size\":${tx_size},\"vsize\":${tx_vsize},\"replaceable\":${tx_replaceable},\"fee\":${fees}},\"outputs\":[${webhooks_data}]}"
response="${response},\"error\":null}"
batch_webhooks "[${webhooks_data}]" '"batcherId":'${batcher_id}',"confTarget":'${conf_target}',"nbOutputs":'${count}',"oldest":"'${oldest}'","total":'${total}',"status":"accepted","txid":"'${txid}'","hash":"'${tx_hash}'","details":{"firstseen":'${tx_ts_firstseen}',"size":'${tx_size}',"vsize":'${tx_vsize}',"replaceable":'${tx_replaceable}',"fee":'${fees}'}'
else
local message=$(echo "${data}" | jq -e ".error.message")
response='{"result":null,"error":{"code":-32700,"message":'${message}',"data":'${request}'}}'
fi
fi
trace "[batchspend] responding=${response}"
echo "${response}"
}
batch_check_webhooks() {
trace "Entering batch_check_webhooks()..."
local webhooks_data
local address
local amount
local recipient_id
local webhook_url
local batcher_id
local txid
local tx_hash
local tx_ts_firstseen
local tx_size
local tx_vsize
local tx_replaceable
local fees
local conf_target
local row
local count
local oldest
local total
local tx_id
local batching=$(sql "SELECT address, amount, r.id, webhook_url, b.id, t.txid, t.hash, t.timereceived, t.fee, t.size, t.vsize, t.is_replaceable::text, t.conf_target, t.id FROM recipient r, batcher b, tx t WHERE r.batcher_id=b.id AND r.tx_id=t.id AND NOT calledback AND tx_id IS NOT NULL AND webhook_url IS NOT NULL")
trace "[batch_check_webhooks] batching=${batching}"
local IFS=$'\n'
for row in ${batching}
do
trace "[batch_check_webhooks] row=${row}"
address=$(echo "${row}" | cut -d '|' -f1)
trace "[batch_check_webhooks] address=${address}"
amount=$(echo "${row}" | cut -d '|' -f2)
trace "[batch_check_webhooks] amount=${amount}"
recipient_id=$(echo "${row}" | cut -d '|' -f3)
trace "[batch_check_webhooks] recipient_id=${recipient_id}"
webhook_url=$(echo "${row}" | cut -d '|' -f4)
trace "[batch_check_webhooks] webhook_url=${webhook_url}"
batcher_id=$(echo "${row}" | cut -d '|' -f5)
trace "[batch_check_webhooks] batcher_id=${batcher_id}"
txid=$(echo "${row}" | cut -d '|' -f6)
trace "[batch_check_webhooks] txid=${txid}"
tx_hash=$(echo "${row}" | cut -d '|' -f7)
trace "[batch_check_webhooks] tx_hash=${tx_hash}"
tx_ts_firstseen=$(echo "${row}" | cut -d '|' -f8)
trace "[batch_check_webhooks] tx_ts_firstseen=${tx_ts_firstseen}"
fees=$(echo "${row}" | cut -d '|' -f9)
trace "[batch_check_webhooks] fees=${fees}"
tx_size=$(echo "${row}" | cut -d '|' -f10)
trace "[batch_check_webhooks] tx_size=${tx_size}"
tx_vsize=$(echo "${row}" | cut -d '|' -f11)
trace "[batch_check_webhooks] tx_vsize=${tx_vsize}"
tx_replaceable=$(echo "${row}" | cut -d '|' -f12)
trace "[batch_check_webhooks] tx_replaceable=${tx_replaceable}"
conf_target=$(echo "${row}" | cut -d '|' -f13)
trace "[batch_check_webhooks] conf_target=${conf_target}"
tx_id=$(echo "${row}" | cut -d '|' -f14)
trace "[batch_check_webhooks] tx_id=${tx_id}"
webhooks_data="{\"outputId\":${recipient_id},\"address\":\"${address}\",\"amount\":${amount},\"webhookUrl\":\"${webhook_url}\"}"
# I know this query for each output is not very efficient, but this function should not execute often, only in case of
# failed callbacks on batches...
# Get the info on the batch
row=$(sql "SELECT COUNT(id), COALESCE(MIN(inserted_ts), DATE '0001-01-01'), COALESCE(SUM(amount), 0.00000000) FROM recipient r WHERE tx_id='${tx_id}'")
# Use the selected row above
count=$(echo "${row}" | cut -d '|' -f1)
trace "[batchspend] count=${count}"
oldest=$(echo "${row}" | cut -d '|' -f2)
trace "[batchspend] oldest=${oldest}"
total=$(echo "${row}" | cut -d '|' -f3)
trace "[batchspend] total=${total}"
batch_webhooks "[${webhooks_data}]" '"batcherId":'${batcher_id}',"confTarget":'${conf_target}',"nbOutputs":'${count}',"oldest":"'${oldest}'","total":'${total}',"status":"accepted","txid":"'${txid}'","hash":"'${tx_hash}'","details":{"firstseen":'${tx_ts_firstseen}',"size":'${tx_size}',"vsize":'${tx_vsize}',"replaceable":'${tx_replaceable}',"fee":'${fees}'}'
done
}
batch_webhooks() {
trace "Entering batch_webhooks()..."
# webhooks_data:
# {"outputId":1,"address":"1abc","amount":0.12,"webhookUrl":"https://bleah.com/batchwebhook"}"
local webhooks_data=${1}
trace "[batch_webhooks] webhooks_data=${webhooks_data}"
# tx:
# {"batcherId":1,"txid":"abc123","hash":"abc123","details":{"firstseen":123123,"size":200,"vsize":141,"replaceable":true,"fee":0.00001}}'
local tx=${2}
trace "[batch_webhooks] tx=${tx}"
local outputs
local output_id
local address
local amount
local webhook_url
local body
local successful_recipient_ids
local returncode
local response
outputs=$(echo "${webhooks_data}" | jq -Mc ".[]")
local output
local IFS=$'\n'
for output in ${outputs}
do
webhook_url=$(echo "${output}" | jq -r ".webhookUrl")
trace "[batch_webhooks] webhook_url=${webhook_url}"
if [ -z "${webhook_url}" ] || [ "${webhook_url}" = "null" ]; then
trace "[batch_webhooks] Empty webhook_url, skipping"
continue
fi
output_id=$(echo "${output}" | jq ".outputId")
trace "[batch_webhooks] output_id=${output_id}"
address=$(echo "${output}" | jq ".address")
trace "[batch_webhooks] address=${address}"
amount=$(echo "${output}" | jq ".amount")
trace "[batch_webhooks] amount=${amount}"
body='{"outputId":'${output_id}',"address":'${address}',"amount":'${amount}','${tx}'}'
trace "[batch_webhooks] body=${body}"
response=$(notify_web "${webhook_url}" "${body}" ${TOR_ADDR_WATCH_WEBHOOKS})
returncode=$?
trace_rc ${returncode}
if [ "${returncode}" -eq 0 ]; then
if [ -n "${successful_recipient_ids}" ]; then
successful_recipient_ids="${successful_recipient_ids},${output_id}"
else
successful_recipient_ids="${output_id}"
fi
else
trace "[batch_webhooks] callback failed, won't set to true in DB"
fi
done
if [ -n "${successful_recipient_ids}" ]; then
trace "[batch_webhooks] We have successful callbacks, let's update the db..."
sql "UPDATE recipient SET calledback=true, calledback_ts=CURRENT_TIMESTAMP WHERE id IN (${successful_recipient_ids})"
trace_rc $?
else
trace "[batch_webhooks] We don't have successful callbacks, no need to update the db!"
fi
}
listbatchers() {
trace "Entering listbatchers()..."
# curl (GET) http://192.168.111.152:8080/listbatchers
#
# {"result":[
# {"batcherId":1,"batcherLabel":"default","confTarget":6,"nbOutputs":12,"oldest":123123,"total":0.86990143},
# {"batcherId":2,"batcherLabel":"lowfee","confTarget":32,"nbOutputs":44,"oldest":123123,"total":0.49827387},
# {"batcherId":3,"batcherLabel":"highfee","confTarget":2,"nbOutputs":7,"oldest":123123,"total":4.16843782}
# ],
# "error":null}
local batchers=$(sql "SELECT b.id, '{\"batcherId\":' || b.id || ',\"batcherLabel\":\"' || b.label || '\",\"confTarget\":' || conf_target || ',\"nbOutputs\":' || COUNT(r.id) || ',\"oldest\":\"' ||COALESCE(MIN(r.inserted_ts), DATE '0001-01-01') || '\",\"total\":' ||COALESCE(SUM(amount), 0.00000000) || '}' FROM batcher b LEFT JOIN recipient r ON r.batcher_id=b.id AND r.tx_id IS NULL GROUP BY b.id ORDER BY b.id")
trace "[listbatchers] batchers=${batchers}"
local returncode
local response
local batcher
local jsonstring
local IFS=$'\n'
for batcher in ${batchers}
do
jsonstring=$(echo ${batcher} | cut -d '|' -f2)
if [ -z "${response}" ]; then
response='{"result":['${jsonstring}
else
response="${response},${jsonstring}"
fi
done
response=${response}'],"error":null}'
trace "[listbatchers] responding=${response}"
echo "${response}"
}
getbatcher() {
trace "Entering getbatcher()..."
# POST (GET) http://192.168.111.152:8080/getbatcher
#
# args:
# - batcherId, optional, id of the batcher, overrides batcerhLabel, default batcher will be used if not supplied
# - batcherLabel, optional, label of the batcher, default batcher will be used if not supplied
#
# response:
# {"result":{"batcherId":1,"batcherLabel":"default","confTarget":6,"nbOutputs":12,"oldest":123123,"total":0.86990143},"error":null}
#
# BODY {}
# BODY {"batcherId":34}
local request=${1}
local response
local returncode=0
local batcher
local whereclause
local batcher_id=$(echo "${request}" | jq ".batcherId")
trace "[getbatcher] batcher_id=${batcher_id}"
local batcher_label=$(echo "${request}" | jq -r ".batcherLabel")
trace "[getbatcher] batcher_label=${batcher_label}"
if [ "${batcher_id}" = "null" ] && [ "${batcher_label}" = "null" ]; then
# If batcher_id and batcher_label are null, use default batcher
trace "[getbatcher] Using default batcher 1"
batcher_id=1
fi
if [ "${batcher_id}" = "null" ]; then
# Using batcher_label
whereclause="b.label='${batcher_label}'"
else
# Using batcher_id
whereclause="b.id=${batcher_id}"
fi
batcher=$(sql "SELECT b.id, '{\"batcherId\":' || b.id || ',\"batcherLabel\":\"' || b.label || '\",\"confTarget\":' || conf_target || ',\"nbOutputs\":' || COUNT(r.id) || ',\"oldest\":\"' ||COALESCE(MIN(r.inserted_ts), DATE '0001-01-01') || '\",\"total\":' ||COALESCE(SUM(amount), 0.00000000) || '}' FROM batcher b LEFT JOIN recipient r ON r.batcher_id=b.id AND r.tx_id IS NULL WHERE ${whereclause} GROUP BY b.id")
trace "[getbatcher] batcher=${batcher}"
if [ -n "${batcher}" ]; then
batcher=$(echo "${batcher}" | cut -d '|' -f2)
response='{"result":'${batcher}',"error":null}'
else
response='{"result":null,"error":{"code":-32700,"message":"batcher not found","data":'${request}'}}'
fi
echo "${response}"
}
getbatchdetails() {
trace "Entering getbatchdetails()..."
# POST (GET) http://192.168.111.152:8080/getbatchdetails
#
# args:
# - batcherId, optional, id of the batcher, overrides batcherLabel, default batcher will be used if not supplied
# - batcherLabel, optional, label of the batcher, default batcher will be used if not supplied
# - txid, optional, if you want the details of an executed batch, supply the batch txid, will return current pending batch
# if not supplied
#
# response:
# {"result":{
# "batcherId":34,
# "batcherLabel":"Special batcher for a special client",
# "confTarget":6,
# "nbOutputs":83,
# "oldest":123123,
# "total":10.86990143,
# "txid":"af867c86000da76df7ddb1054b273ca9e034e8c89d049b5b2795f9f590f67648",
# "hash":"af867c86000da76df7ddb1054b273ca9e034e8c89d049b5b2795f9f590f67648",
# "details":{
# "firstseen":123123,
# "size":424,
# "vsize":371,
# "replaceable":yes,
# "fee":0.00004112
# },
# "outputs":[
# "1abc":0.12,
# "3abc":0.66,
# "bc1abc":2.848,
# ...
# ]
# }
# },"error":null}
#
# BODY {}
# BODY {"batcherId":34}
local request=${1}
local response
local returncode=0
local batch
local tx
local outputsjson
local whereclause
local batcher_id=$(echo "${request}" | jq ".batcherId")
trace "[getbatchdetails] batcher_id=${batcher_id}"
local batcher_label=$(echo "${request}" | jq -r ".batcherLabel")
trace "[getbatchdetails] batcher_label=${batcher_label}"
local txid=$(echo "${request}" | jq -r ".txid")
trace "[getbatchdetails] txid=${txid}"
if [ "${batcher_id}" = "null" ] && [ "${batcher_label}" = "null" ]; then
# If batcher_id and batcher_label are null, use default batcher
trace "[getbatchdetails] Using default batcher 1"
batcher_id=1
fi
if [ "${batcher_id}" = "null" ]; then
# Using batcher_label
whereclause="b.label='${batcher_label}'"
else
# Using batcher_id
whereclause="b.id=${batcher_id}"
fi
if [ "${txid}" != "null" ]; then
# Using txid
whereclause="${whereclause} AND t.txid='${txid}'"
else
# null txid
whereclause="${whereclause} AND t.txid IS NULL"
outerclause="AND r.tx_id IS NULL"
fi
# First get the batch summary
batch=$(sql "SELECT b.id, COALESCE(t.id, NULL), '{\"batcherId\":' || b.id || ',\"batcherLabel\":\"' || b.label || '\",\"confTarget\":' || b.conf_target || ',\"nbOutputs\":' || COUNT(r.id) || ',\"oldest\":\"' || COALESCE(MIN(r.inserted_ts), DATE '0001-01-01') || '\",\"total\":' || COALESCE(SUM(amount), 0.00000000) FROM batcher b LEFT JOIN recipient r ON r.batcher_id=b.id ${outerclause} LEFT JOIN tx t ON t.id=r.tx_id WHERE ${whereclause} GROUP BY b.id, t.id")
trace "[getbatchdetails] batch=${batch}"
if [ -n "${batch}" ]; then
local tx_id
local outputs
tx_id=$(echo "${batch}" | cut -d '|' -f2)
trace "[getbatchdetails] tx_id=${tx_id}"
if [ -n "${tx_id}" ]; then
# Using txid
outerclause="AND r.tx_id=${tx_id}"
tx=$(sql "SELECT '\"txid\":\"' || txid || '\",\"hash\":\"' || hash || '\",\"details\":{\"firstseen\":' || timereceived || ',\"size\":' || size || ',\"vsize\":' || vsize || ',\"replaceable\":' || is_replaceable || ',\"fee\":' || fee || '}' FROM tx WHERE id=${tx_id}")
else
# null txid
outerclause="AND r.tx_id IS NULL"
fi
batcher_id=$(echo "${batch}" | cut -d '|' -f1)
outputs=$(sql "SELECT '{\"outputId\":' || id || ',\"outputLabel\":\"' || COALESCE(label, '') || '\",\"address\":\"' || address || '\",\"amount\":' || amount || ',\"addedTimestamp\":\"' || inserted_ts || '\"}' FROM recipient r WHERE batcher_id=${batcher_id} ${outerclause}")
local output
local IFS=$'\n'
for output in ${outputs}
do
if [ -n "${outputsjson}" ]; then
outputsjson="${outputsjson},${output}"
else
outputsjson="${output}"
fi
done
batch=$(echo "${batch}" | cut -d '|' -f3)
response='{"result":'${batch}
if [ -n "${tx}" ]; then
response=${response}','${tx}
else
response=${response}',"txid":null,"hash":null'
fi
response=${response}',"outputs":['${outputsjson}']},"error":null}'
else
response='{"result":null,"error":{"code":-32700,"message":"batch not found or no corresponding txid","data":'${request}'}}'
fi
echo "${response}"
}
# curl localhost:8888/listbatchers | jq
# curl -d '{}' localhost:8888/getbatcher | jq
# curl -d '{}' localhost:8888/getbatchdetails | jq
# curl -d '{"outputLabel":"test002","address":"1abd","amount":0.0002}' localhost:8888/addtobatch | jq
# curl -d '{}' localhost:8888/batchspend | jq
# curl -d '{"outputId":1}' localhost:8888/removefrombatch | jq
# curl -d '{"batcherLabel":"lowfees","confTarget":32}' localhost:8888/createbatcher | jq
# curl localhost:8888/listbatchers | jq
# curl -d '{"batcherLabel":"lowfees"}' localhost:8888/getbatcher | jq
# curl -d '{"batcherLabel":"lowfees"}' localhost:8888/getbatchdetails | jq
# curl -d '{"batcherLabel":"lowfees","outputLabel":"test002","address":"1abd","amount":0.0002}' localhost:8888/addtobatch | jq
# curl -d '{"batcherLabel":"lowfees"}' localhost:8888/batchspend | jq
# curl -d '{"batcherLabel":"lowfees","outputId":9}' localhost:8888/removefrombatch | jq
|
#!/bin/bash
set -e
BRANCH=${BRANCH:?'missing BRANCH env var'}
IMAGE="${REPO:?'missing REPO env var'}:latest"
unset major minor patch
if [[ "$BRANCH" == "master" ]]; then
TAG="latest"
elif [[ $BRANCH =~ ^v([0-9]+)\.([0-9]+)\.([0-9]+)$ ]]; then
major="${BASH_REMATCH[1]}"
minor="${BASH_REMATCH[2]}"
patch="${BASH_REMATCH[3]}"
TAG=${major}.${minor}.${patch}
echo "BRANCH is a release tag: major=$major, minor=$minor, patch=$patch"
else
# TODO why do we do /// ?
TAG="${BRANCH///}"
fi
echo "TRAVIS_BRANCH=$TRAVIS_BRANCH, REPO=$REPO, BRANCH=$BRANCH, TAG=$TAG, IMAGE=$IMAGE"
# add major, major.minor and major.minor.patch tags
if [[ -n $major ]]; then
docker tag $IMAGE $REPO:${major}
if [[ -n $minor ]]; then
docker tag $IMAGE $REPO:${major}.${minor}
if [[ -n $patch ]]; then
docker tag $IMAGE $REPO:${major}.${minor}.${patch}
fi
fi
fi
# Do not enable echo before the `docker login` command to avoid revealing the password.
set -x
docker login -u $DOCKER_USER -p $DOCKER_PASS
if [[ "${REPO}" == "jaegertracing/jaeger-opentelemetry-collector" ]]; then
# TODO remove once Jaeger OTEL collector is stable
docker push $REPO:latest
else
# push all tags, therefore push to repo
docker push $REPO
fi
SNAPSHOT_IMAGE="$REPO-snapshot:$TRAVIS_COMMIT"
echo "Pushing snapshot image $SNAPSHOT_IMAGE"
docker tag $IMAGE $SNAPSHOT_IMAGE
docker push $SNAPSHOT_IMAGE
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.cloud = void 0;
var cloud = {
"viewBox": "0 0 512 512",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M358.719,272.734C364.484,263.125,368,252.016,368,240c0-35.344-28.656-64-64-64c-29.375,0-53.859,19.906-61.391,46.875\r\n\t\tC233.875,213.75,221.625,208,208,208c-26.5,0-48,21.5-48,48c0,5.75,1.188,11.203,3.031,16.312c-1-0.094-2-0.312-3.031-0.312\r\n\t\tc-17.672,0-32,22.328-32,40s14.328,40,32,40h192c17.672,0,32-22.328,32-40C384,296.641,373.156,275.828,358.719,272.734z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M358.719,272.734C364.484,263.125,368,252.016,368,240c0-35.344-28.656-64-64-64c-29.375,0-53.859,19.906-61.391,46.875\r\n\t\tC233.875,213.75,221.625,208,208,208c-26.5,0-48,21.5-48,48c0,5.75,1.188,11.203,3.031,16.312c-1-0.094-2-0.312-3.031-0.312\r\n\t\tc-17.672,0-32,22.328-32,40s14.328,40,32,40h192c17.672,0,32-22.328,32-40C384,296.641,373.156,275.828,358.719,272.734z"
},
"children": []
}]
}, {
"name": "path",
"attribs": {
"d": "M256,0C114.609,0,0,114.609,0,256s114.609,256,256,256s256-114.609,256-256S397.391,0,256,0z M256,472\r\n\t\tc-119.297,0-216-96.703-216-216S136.703,40,256,40s216,96.703,216,216S375.297,472,256,472z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M256,0C114.609,0,0,114.609,0,256s114.609,256,256,256s256-114.609,256-256S397.391,0,256,0z M256,472\r\n\t\tc-119.297,0-216-96.703-216-216S136.703,40,256,40s216,96.703,216,216S375.297,472,256,472z"
},
"children": []
}]
}]
}]
};
exports.cloud = cloud;
|
#!/bin/bash
help () {
printf "Script: 02-db-bootstrap.sh\n"
printf "Usage: 02-db-bootstrap.sh [ -s ] [ -r ] [ -d ]\n"
printf " -- \nWhere: \n"
printf " -s The inital AWS Stack name which is to create the vpc, subnet, and Aurora cluster. \n"
printf " Whatever value is set when running script 01-install_prereq.sh is what should be retained\n"
printf " for the rest of the demo scripts. This flag can be avoided if AURORA_DB_CFSTACK_NAME is \n"
printf " set as an environment variable. \n"
printf " -r The AWS Region we're running this demo in. This setting needs to stay the same across all scripts run.\n"
printf " Using this flag can be avoided if AWS_DEFAULT_REGION is set as an environment variable where running this script\n"
printf " -h show help page.\n"
}
while getopts 's:r:h' flag; do
case "${flag}" in
s) AURORA_DB_CFSTACK_NAME="${OPTARG}" ;;
r) AWS_DEFAULT_REGION="${OPTARG}" ;;
h) show_help='true' ;;
*) error "Unexpected option ${flag}" ;;
esac
done
if [[ $show_help == "true" ]]
then
help
exit 0
fi
if [ -z $AURORA_DB_CFSTACK_NAME ]
then
printf "The AWS stack var isn't set. Please use -s to set this stack, and ensure it's already completed sucessfully.\n"
help
exit 1
else
printf "The AWS Stack is set, continuing.\n"
fi
if [ -z $AWS_DEFAULT_REGION ]
then
printf "The AWS default region var isn't set. Please use -r to set this stack.\n"
help
exit 1
else
printf "The AWS default region is set, continuing.\n"
fi
## Mainline
echo "DB stack name is:" $AURORA_DB_CFSTACK_NAME
SrcRDSEndPoint=$(aws cloudformation describe-stacks --stack-name $AURORA_DB_CFSTACK_NAME | jq -r '.Stacks[].Outputs[] | select(.OutputKey=="RDSEndPoint") | .OutputValue')
TgtRDSEndPoint=$(aws cloudformation describe-stacks --stack-name $AURORA_DB_CFSTACK_NAME | jq -r '.Stacks[].Outputs[] | select(.OutputKey=="RDSEndPoint") | .OutputValue')
ClusterName=$(aws cloudformation describe-stacks --stack-name $AURORA_DB_CFSTACK_NAME | jq -r '.Stacks[].Outputs[] | select(.OutputKey=="ClusterName") | .OutputValue')
echo "RDS Source endpoint:" $SrcRDSEndPoint
echo "RDS Destination endpoint" $TgtRDSEndPoint
echo "RDS Cluster name" $ClusterName
export SrcDBUsername="pgadmin"
export SrcDBPassword="auradmin"
export TgtDBUsername="pgadmin"
export TgtDBPassword="auradmin"
SYNC_STATUS=""
while [ "$SYNC_STATUS" != "in-sync" ];
do
echo "waiting for database to restart "
SYNC_STATUS=$(aws rds describe-db-clusters --db-cluster-identifier $ClusterName | jq -r '.DBClusters[].DBClusterMembers[].DBClusterParameterGroupStatus')
echo "Database Sync status is:" $SYNC_STATUS
sleep 10;
done
sleep 2
echo "Creating database schema"
psql postgres://$SrcDBUsername:$SrcDBPassword@$SrcRDSEndPoint -f schema.sql
echo "Loading sample data"
psql postgres://$SrcDBUsername:$SrcDBPassword@$SrcRDSEndPoint -c"\copy data_mart.organization(org_name) from 'org.csv'"
psql postgres://$SrcDBUsername:$SrcDBPassword@$SrcRDSEndPoint -c"\copy data_mart.events (operation,value,event_type,org_id,created_at) from 'events.csv' delimiter ',';"
echo "Running pg_dump to extract pre and post data schema dedination"
pg_dump postgres://$SrcDBUsername:$SrcDBPassword@$SrcRDSEndPoint -t data_mart.events -s -U postgres --section=pre-data > data_mart.events-pre-schema.sql
pg_dump postgres://$SrcDBUsername:$SrcDBPassword@$SrcRDSEndPoint -t data_mart.events -s -U postgres --section=post-data > data_mart.events-post-schema.sql
echo "Editing dump file to convert table defination from non-partitioned to partitioned"
sed -i 's/data_mart/data_mart_new/g' data_mart.events-pre-schema.sql
var1="CONSTRAINT ck_valid_operation CHECK (((operation = 'C'::bpchar) OR (operation = 'D'::bpchar)))"
line=(`grep -n "$var1" data_mart.events-pre-schema.sql | awk -F ':' '{print $1}'`)
line=$((line+1))
substitute=" )PARTITION BY RANGE (created_at);"
sed -i "${line}s/.*/$substitute/" data_mart.events-pre-schema.sql
|
#!/bin/bash
curl -sc /tmp/cookie "https://drive.google.com/uc?export=download&id=1RvQZhjsn5EH6O8vXKy_8KIwr5i-s0ecE" > /dev/null
CODE="$(awk '/_warning_/ {print $NF}' /tmp/cookie)"
curl -Lb /tmp/cookie "https://drive.google.com/uc?export=download&confirm=${CODE}&id=1RvQZhjsn5EH6O8vXKy_8KIwr5i-s0ecE" -o resources.tar.gz
tar -zxvf resources.tar.gz
rm resources.tar.gz
echo Download finished.
|
#! /usr/bin/env bash
KERNEL_VER="5.14-rc5"
VER_STAND="5.14.0"
VER_STR="051400rc5"
while [[ $# -gt 0 ]]; do
PROG_ARGS+=("${1}")
case "${1}" in
-amd|--amd64)
mkdir /tmp/ubuntukernel$KERNEL_VER
cd /tmp/ubuntukernel$KERNEL_VER
wget https://kernel.ubuntu.com/~kernel-ppa/mainline/v5.14-rc5/amd64/linux-headers-5.14.0-051400rc5-generic_5.14.0-051400rc5.202108081730_amd64.deb
wget https://kernel.ubuntu.com/~kernel-ppa/mainline/v5.14-rc5/amd64/linux-headers-5.14.0-051400rc5_5.14.0-051400rc5.202108081730_all.deb
wget https://kernel.ubuntu.com/~kernel-ppa/mainline/v5.14-rc5/amd64/linux-image-unsigned-5.14.0-051400rc5-generic_5.14.0-051400rc5.202108081730_amd64.deb
wget https://kernel.ubuntu.com/~kernel-ppa/mainline/v5.14-rc5/amd64/linux-modules-5.14.0-051400rc5-generic_5.14.0-051400rc5.202108081730_amd64.deb
sudo dpkg -i *.deb
cd
rm -r /tmp/ubuntukernel$KERNEL_VER
if [ -f "/boot/initrd.img-$VER_STAND-$VER_STR-generic" ]
then
echo linux $KERNEL_VER is successfully installed!
else
echo an error occurred while installing
fi
break
;;
-arm|--arm64)
mkdir /tmp/ubuntukernel$KERNEL_VER
cd /tmp/ubuntukernel$KERNEL_VER
wget https://kernel.ubuntu.com/~kernel-ppa/mainline/v5.14-rc5/arm64/linux-headers-5.14.0-051400rc5-generic_5.14.0-051400rc5.202108081730_arm64.deb
wget https://kernel.ubuntu.com/~kernel-ppa/mainline/v5.14-rc5/arm64/linux-image-unsigned-5.14.0-051400rc5-generic_5.14.0-051400rc5.202108081730_arm64.deb
wget https://kernel.ubuntu.com/~kernel-ppa/mainline/v5.14-rc5/arm64/linux-modules-5.14.0-051400rc5-generic_5.14.0-051400rc5.202108081730_arm64.deb
sudo dpkg -i *.deb
cd
rm -r /tmp/ubuntukernel$KERNEL_VER
break
;;
-r|--remove)
echo only remove kernel if you have a newer one!
sleep 2
sudo apt remove linux-headers-$VER_STAND-$VER_STR
sudo apt remove linux-image-unsigned-$VER_STAND-$VER_STR-generic
sudo apt remove linux-modules-$VER_STAND-$VER_STR-generic
if [ -f "/boot/initrd.img-$VER_STAND-$VER_STR-generic" ]
then
echo linux $KERNEL_VER is successfully removed!
else
echo an error occurred while removing linux $KERNEL_VER
fi
break
;;
esac
done
|
import { push } from 'react-router-redux'
export const navigateTo = (path) => (dispatch) => {
dispatch(push(path))
}
|
echo 'call kill.sh'
get_pids() {
ps -f -U `whoami` | grep apis-main | grep java | while read _USER_ _PID_ _OTHERS_; do
echo $_PID_
done
}
while true; do
_PIDS_=`get_pids`
if [ -z "$_PIDS_" ] ; then
break
fi
echo kill -KILL $_PIDS_
kill -KILL $_PIDS_
sleep 1
done
echo '... done'
|
<reponame>bsnote/node-statvfs
// Copyright (c) 2012, <NAME>. All rights reserved.
var assert = require('assert');
var statvfs = require('../lib');
assert.ok(statvfs);
assert.equal(typeof (statvfs), 'function');
statvfs('/tmp', function (err, stats) {
assert.ifError(err);
assert.ok(stats.bsize);
assert.ok(stats.frsize);
assert.ok(stats.blocks);
assert.ok(stats.bfree);
assert.ok(stats.bavail);
assert.ok(stats.files);
assert.ok(stats.ffree);
assert.ok(stats.favail);
assert.ok(stats.fsid);
assert.ok(stats.flag !== undefined);
assert.ok(stats.namemax);
});
|
require(['jquery', 'layer', 'WebUploader', 'registLayer', 'forgetpw', 'validate', 'placeholders', 'cookie'], function($, layer, WebUploader, registLayer, forgetpw) {
//忘记密码弹窗
$("#forgetPw").on("click", function() {
forgetpw();
})
$(".main>.payChoice>.balance>.b2").hide();
/* 错误信息弹窗 */
$("#tanchang").hide();
var args = {
goodsId: getQueryString("goodsId"),
leaseType: getQueryString("leaseType"),
count: getQueryString("count"),
orderPayType: null,
mobile: null,
orderNo: getQueryString("orderNo"),
isRelet: getQueryString("isRelet")
}
getList();
function getQueryString(name) {
var reg = new RegExp('(^|&)' + name + '=([^&]*)(&|$)', 'i');
var r = window.location.search.substr(1).match(reg);
if (r != null) {
return unescape(r[2]);
}
return null;
}
var qq = false;
var phone = false;
function sell() {
var phonenumber = $(".phonenumber").val();
var qqnumber = $(".qqnumber").val();
$.ajax({
url: BASE_URL.sell + "newOrderSale/updateSaleOrderPhoneAndQq.htm",
type: "get",
dataType: "jsonp",
data: {
phone: phonenumber,
qq: qqnumber,
orderNo: args.orderNo,
},
success: function(data) {
console.dir(data);
}
})
}
function payEvent(qq, phone) {
if (qq == true && phone == true) {
$(".payBtn").removeClass("events");
sell();
} else {
$(".payBtn").addClass("events");
}
}
//QQ号码验证
$(".qqnumber").blur(function() {
var qqnumber = $(".qqnumber").val();
var qqreg = /^[1-9][0-9]{4,10}$/;
if (qqnumber == "") {
$(".qqerror").text("请输入QQ号码!");
qq = false;
payEvent(qq, phone);
} else if (!qqreg.test(qqnumber)) {
$(".qqerror").text("QQ号字符最大长度为11!");
qq = false;
payEvent(qq, phone);
} else {
$(".qqerror").text("");
qq = true;
payEvent(qq, phone);
}
})
//手机号码验证
$(".phonenumber").blur(function() {
var phonenumber = $(".phonenumber").val();
var reg = /^(13[0-9]|14[5-9]|15[012356789]|166|17[0-8]|18[0-9]|19[8-9])[0-9]{8}$/;
if (phonenumber == "") {
$(".phoneerror").text("请输入手机号码!");
phone = false;
payEvent(qq, phone);
} else if (!reg.test(phonenumber)) {
$(".phoneerror").text("请输入正确的手机号码格式!");
phone = false;
payEvent(qq, phone);
} else {
$(".phoneerror").text("");
phone = true;
payEvent(qq, phone);
}
})
/* 是否选中余额支付 */
$(".main>.payChoice>.pay>.choicePay>.passWord").hide();
$(".main>.payChoice>.balance>.b2 input").change(function() {
if ($(this).is(':checked')) {
$('input:radio').removeAttr("checked");
$("input[name=pay]").prop("disabled", "disabled");
$(".main>.payChoice>.pay>.choicePay>.passWord").show();
} else {
// console.log(123);
$("input[name=pay]").removeAttr("disabled");
$("input[name=pay]:eq(0)").prop('checked', true);
$(".main>.payChoice>.pay>.choicePay>.passWord").hide();
}
});
/* 点击确认支付 */
$(".main .payBtn").click(function() {
if (!$(".xieyi input").get(0).checked) {
layer.open({
title: '提示',
content: "请阅读并同意《虚贝资产租赁协议》"
})
return;
}
payment();
function payment() {
/* 判断用户信息有没有拿到 */
if (args.mobile == null) {
$.ajax({
url: BASE_URL.user + "businessUser/findUserDetail",
type: "get",
dataType: "jsonp",
data: {
loginToken: $.cookie("loginToken")
},
success: function(data) {
if (data.code == 1) {
args.mobile = data.result.user.mobile;
}
}
})
}
args.orderPayType = $('input:radio:checked').val() || $('.main>.payChoice>.balance>.b2 input').val();
/* 判断支付类型 */
/* zfb wx 直接创建订单 */
if (args.orderPayType == "zfb" || args.orderPayType == "wx") {
window.location.href = "./goods_pay.html?orderPayType=" + args.orderPayType + "&orderNo=" + args.orderNo;
// window.location.href = BASE_URL.order + "master/order/createOrder?orderPayType=" + args.orderPayType + "&goodsId=" + args.goodsId + "&userId=" + $.cookie("loginToken") + "&count=" + args.count + "&leaseType=" + args.leaseType + "&businessNo=xubei" + "&mobile=" + args.mobile + "&orderNo=" + args.orderNo + "&isRelet=" + args.isRelet;
} else {
/* ye 调用接口 创建订单 */
/* 获取密码 */
var pw = $(".main>.payChoice>.pay>.choicePay>.passWord>input").val();
$.ajax({
// url: BASE_URL.order + 'master/order/createOrderBalancePay',
url: BASE_URL.pay + 'pay/blancePay',
type: "get",
data: {
orderNo: args.orderNo,
type: "sell",
payPwd: pw,
token: $.cookie('loginToken'),
},
dataType: "jsonp",
success: function(data) {
if (data.code == 1) {
//window.location.href = data.result.return_url + "?orderNo=" + data.result.order_no
window.location.href = "../lease_success_xabuy.html?orderNo=" + args.orderNo;
} else {
$(".error").text("*" + data.message);
}
}
})
}
}
});
function getList() {
$.ajax({
//url: BASE_URL.order + "master/order/findOrderPayParams",
url: BASE_URL.sell + "newOrderSale/findOrderDetail.htm",
type: "get",
dataType: "jsonp",
data: {
orderNo: args.orderNo
},
success: function(data) {
if (data.retCode == 200) {
var data1 = data.result;
$(".prompt .t2").text(data1.goodsTitle);
$(".main>.payChoice>.balance>.b2 b").text(data1.orderItemPrice)
var sum = data1.orderItemPrice;
$(".prompt .sum-money span").text(data1.orderItemPrice);
/* 获取账户余额 */
$.ajax({
//url: BASE_URL.org.user + "businessUser/findUserDetail",
url: BASE_URL.pay + "pay/findBalance",
type: "get",
dataType: "jsonp",
data: {
token: $.cookie("loginToken")
},
success: function(data) {
if (data.code == 1) {
var ye = data.result.AVAILABLE_AMOUNT;
$(".main>.payChoice>.balance>.b1 span").text(ye);
/* 判断合计金额是否大于余额 */
if (parseFloat(ye) >= parseFloat(sum)) {
$(".main>.payChoice>.balance>.b2").show();
}
}
}
})
} else {
if (data.message == "请登录") {
window.location.href = "http://www.xubei.com/login.htm";
} else {
$("#tanchang p").text(data.message);
// $("#tanchang").css("z-index", "100000").show();
}
}
}
})
}
});
|
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright (c) 2010 <NAME>. All rights reserved.
#
"""Capture the output of a command and test its exit code at the same time.
"""
#end_pymotw_header
import subprocess
try:
output = subprocess.check_output(
'echo to stdout; echo to stderr 1>&2; exit 1',
shell=True,
stderr=subprocess.STDOUT,
)
except subprocess.CalledProcessError as err:
print 'ERROR:', err
else:
print 'Have %d bytes in output' % len(output)
print output
|
const { readFile } = require('fs').promises;
readLocalFile = async() => {
try {
let data;
try {
data = await readFile('microservices/products/src/call-backs/input.txt', { encoding: 'utf8' });
} catch (err) {
console.error("Error reading file", err);
}
//console.log(data.toString());
console.log((data !== undefined)? data.toString(): 'No Data');
console.log("Program Ended 1");
} catch (err) {
console.error("Error in program: " , err);
}
};
readAnotherFile = async() => {
try {
let data;
try {
data = await readFile('microservices/products/src/call-backs/output.txt', { encoding: 'utf8' });
} catch (err) {
console.error("Error reading file", err);
}
//console.log(data.toString());
console.log((data !== undefined)? data.toString(): 'No Data');
console.log("Program Ended 2");
} catch (err) {
console.error("Error in program: " , err);
}
};
readLocalFile();
readAnotherFile();
|
<filename>fbcnms-packages/fbcnms-alarms/hooks/useForm.js
/**
* Copyright 2020 The Magma Authors.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @flow
* @format
*/
import * as React from 'react';
type InputChangeFunc<TFormState, TVal> = (
formUpdate: FormUpdate<TFormState, TVal>,
) => (event: $Shape<SyntheticInputEvent<HTMLElement>>) => void;
type FormUpdate<TFormState, TVal = string> = (
val: TVal,
event: SyntheticInputEvent<HTMLElement>,
) => $Shape<TFormState>;
export default function useForm<TFormState: {}>({
initialState,
onFormUpdated,
}: {
initialState: $Shape<TFormState>,
onFormUpdated?: (state: TFormState) => void,
}): {|
formState: TFormState,
updateFormState: (update: $Shape<TFormState>) => TFormState,
handleInputChange: InputChangeFunc<TFormState, *>,
updateListItem: (
listName: $Keys<TFormState>,
idx: number,
update: $ElementType<TFormState, $Keys<TFormState>>,
) => void,
addListItem: (listName: $Keys<TFormState>, item: {}) => void,
removeListItem: (listName: $Keys<TFormState>, idx: number) => void,
setFormState: (f: TFormState) => void,
|} {
const [formState, setFormState] = React.useState<TFormState>(initialState);
const formUpdatedRef = React.useRef(onFormUpdated);
React.useEffect(() => {
formUpdatedRef.current = onFormUpdated;
}, [onFormUpdated]);
const updateFormState = React.useCallback(
update => {
const nextState = {
...formState,
...update,
};
setFormState(nextState);
if (typeof formUpdatedRef.current === 'function') {
formUpdatedRef.current(nextState);
}
return nextState;
},
[formState, formUpdatedRef, setFormState],
);
/**
* Immutably updates an item in an array on T.
* usage:
* //formState: {list: [{x:1},{x:2}]};
* updateListItem('list', 0, {x:0})
* //formState: {{list: [{x:0},{x:2}]}}
*/
const updateListItem = React.useCallback(
(
listName: $Keys<TFormState>,
idx: number,
update: $ElementType<TFormState, $Keys<TFormState>>,
) => {
updateFormState({
[listName]: immutablyUpdateArray(
formState[listName] || [],
idx,
update,
),
});
},
[formState, updateFormState],
);
const removeListItem = React.useCallback(
(listName: $Keys<TFormState>, idx: number) => {
if (!formState[listName]) {
return;
}
updateFormState({
[listName]: formState[listName].filter((_, i) => i !== idx),
});
},
[formState, updateFormState],
);
const addListItem = React.useCallback(
<TItem>(listName: $Keys<TFormState>, item: TItem) => {
updateFormState({
[listName]: [...(formState[listName] || []), item],
});
},
[formState, updateFormState],
);
/**
* Passes the event value to an updater function which returns an update
* object to be merged into the form.
*/
const handleInputChange = React.useCallback(
(formUpdate: FormUpdate<TFormState>) => (
event: SyntheticInputEvent<HTMLElement>,
) => {
const value = event.target.value;
updateFormState(formUpdate(value, event));
},
[updateFormState],
);
return {
formState,
updateFormState,
handleInputChange,
updateListItem,
addListItem,
removeListItem,
setFormState,
};
}
/**
* Copies array with the element at idx immutably merged with update
*/
function immutablyUpdateArray<T>(
array: Array<T>,
idx: number,
update: $Shape<T>,
) {
return array.map((item, i) => {
if (i !== idx) {
return item;
}
return {...item, ...update};
});
}
|
const calculateTotalLandArea = (cities) => {
const totalLandArea = {};
cities.forEach(city => {
totalLandArea[city.name] = city.landArea;
});
return totalLandArea;
};
|
#!/bin/bash
# backup apt packages
# To restore:
# - 'dpkg --set-selections < selections.txt && apt-get dselect-upgrade'
hash dpkg 2>/dev/null || { echo >&2 "You need to install dpkg. Aborting."; exit 1; }
hash apt-key 2>/dev/null || { echo >&2 "You need to install apt. Aborting."; exit 1; }
if ! dpkg --get-selections > installed_packages.log; then
echo >&2 "ERROR: Failed to export package selections."
exit 1
fi
if ! apt-key exportall > repositories.keys 2>/dev/null; then
echo >&2 "ERROR: Failed to export package repositories."
exit 1
fi
exit 0
|
<filename>app/src/main/java/com/wyp/materialqqlite/FileUtils.java
package com.wyp.materialqqlite;
import java.io.BufferedReader;
import java.io.File;
import java.io.InputStream;
import java.io.InputStreamReader;
import android.content.Context;
import android.os.Environment;
public class FileUtils {
public static boolean hasSDCard() {
return Environment.getExternalStorageState()
.equals(Environment.MEDIA_MOUNTED);
}
// "/storage/sdcard/"
public static String getSDCardDir() {
return Environment.getExternalStorageDirectory().getAbsolutePath() + "/";
}
// "/data/data/包名/files/"
public static String getAppFilesDir(Context context) {
return context.getFilesDir().getAbsolutePath() + "/";
}
// "/data/data/包名/cache/"
public static String getAppCacheDir(Context context) {
return context.getCacheDir().getAbsoluteFile() + "/";
}
public static void deleteFile(String strFileName) {
File file = new File(strFileName);
deleteFile(file);
}
public static void deleteFile(File file) {
if (!file.exists()) {
return;
} else {
if (file.isFile()) {
file.delete();
return;
}
if (file.isDirectory()) {
File[] childFile = file.listFiles();
if (childFile == null || childFile.length == 0) {
file.delete();
return;
}
for (File f : childFile) {
deleteFile(f);
}
file.delete();
}
}
}
public static String readFromAssets(Context context, String fileName) {
try {
InputStream is = context.getResources().getAssets().open(fileName);
InputStreamReader reader = new InputStreamReader(is);
BufferedReader bufReader = new BufferedReader(reader);
String strLine = "";
String Result = "";
while((strLine = bufReader.readLine()) != null)
Result += strLine;
return Result;
} catch (Exception e) {
e.printStackTrace();
}
return "";
}
}
|
package io.bhex.api.client.impl;
import io.bhex.api.client.BHexApiRestClient;
import io.bhex.api.client.constant.BHexConstants;
import io.bhex.api.client.domain.account.*;
import io.bhex.api.client.domain.account.request.*;
import io.bhex.api.client.domain.general.BrokerInfo;
import io.bhex.api.client.domain.market.*;
import io.bhex.api.client.service.BHexApiService;
import java.util.List;
import static io.bhex.api.client.impl.BHexApiServiceGenerator.createService;
import static io.bhex.api.client.impl.BHexApiServiceGenerator.executeSync;
/**
* Implementation of BHex's REST API using Retrofit with synchronous/blocking method calls.
*/
public class BHexApiRestClientImpl implements BHexApiRestClient {
private final BHexApiService bHexApiService;
public BHexApiRestClientImpl(String baseUrl, String apiKey, String secret) {
bHexApiService = createService(baseUrl, BHexApiService.class, apiKey, secret);
}
// General endpoints
@Override
public void ping() {
executeSync(bHexApiService.ping());
}
@Override
public Long getServerTime() {
return executeSync(bHexApiService.getServerTime()).getServerTime();
}
@Override
public BrokerInfo getBrokerInfo() {
return executeSync(bHexApiService.getBrokerInfo());
}
@Override
public OrderBook getOrderBook(String symbol, Integer limit) {
return executeSync(bHexApiService.getOrderBook(symbol, limit));
}
@Override
public List<TradeHistoryItem> getTrades(String symbol, Integer limit) {
return executeSync(bHexApiService.getTrades(symbol, limit));
}
@Override
public List<Candlestick> getCandlestickBars(String symbol, CandlestickInterval interval, Long startTime, Long endTime, Integer limit) {
return executeSync(bHexApiService.getCandlestickBars(symbol, interval.getIntervalId(), startTime, endTime, limit));
}
@Override
public TickerStatistics get24HrPriceStatistics(String symbol) {
return executeSync(bHexApiService.get24HrPriceStatistics(symbol));
}
@Override
public TickerPrice getPrice(String symbol) {
return executeSync(bHexApiService.getLatestPrice(symbol));
}
@Override
public BookTicker getBookTicker(String symbol) {
return executeSync(bHexApiService.getBookTicker(symbol));
}
@Override
public Index getIndex(String symbol) {
return executeSync(bHexApiService.getIndex(symbol));
}
@Override
public NewOrderResponse newOrder(NewOrder order) {
return executeSync(bHexApiService.newOrder(order.getSymbol(), order.getSide(), order.getType(),
order.getTimeInForce(), order.getQuantity(), order.getPrice(), order.getNewClientOrderId(), order.getStopPrice(),
order.getIcebergQty(), order.getRecvWindow(), order.getTimestamp()));
}
@Override
public Order getOrderStatus(OrderStatusRequest orderStatusRequest) {
return executeSync(bHexApiService.getOrderStatus(orderStatusRequest.getOrderId(), orderStatusRequest.getOrigClientOrderId(),
orderStatusRequest.getRecvWindow(), orderStatusRequest.getTimestamp()));
}
@Override
public CancelOrderResponse cancelOrder(CancelOrderRequest cancelOrderRequest) {
return executeSync(bHexApiService.cancelOrder(cancelOrderRequest.getOrderId(), cancelOrderRequest.getClientOrderId(),
cancelOrderRequest.getRecvWindow(), cancelOrderRequest.getTimestamp()));
}
@Override
public List<Order> getOpenOrders(OpenOrderRequest orderRequest) {
return executeSync(bHexApiService.getOpenOrders(orderRequest.getSymbol(), orderRequest.getLimit(),
orderRequest.getRecvWindow(), orderRequest.getTimestamp()));
}
@Override
public List<Order> getHistoryOrders(HistoryOrderRequest orderRequest) {
return executeSync(bHexApiService.getHistroyOrders(orderRequest.getOrderId(), orderRequest.getStartTime(), orderRequest.getEndTime(),
orderRequest.getLimit(), orderRequest.getRecvWindow(), orderRequest.getTimestamp()));
}
@Override
public Account getAccount(Long recvWindow, Long timestamp) {
return executeSync(bHexApiService.getAccount(recvWindow, timestamp));
}
@Override
public List<Trade> getMyTrades(MyTradeRequest request) {
return executeSync(bHexApiService.getMyTrades(request.getFromId(), request.getToId(), request.getStartTime(), request.getEndTime(),
request.getLimit(), request.getRecvWindow(), request.getTimestamp()));
}
@Override
public List<DepositOrder> getDepositOrders(DepositOrderRequest request) {
return executeSync(bHexApiService.getDepositOrders(request.getToken(), request.getStartTime(), request.getEndTime(), request.getFromId(),
request.getLimit(), request.getRecvWindow(), request.getTimestamp()));
}
@Override
public String startUserDataStream(Long recvWindow, Long timestamp) {
return executeSync(bHexApiService.startUserDataStream(recvWindow, timestamp)).toString();
}
@Override
public void keepAliveUserDataStream(String listenKey, Long recvWindow, Long timestamp) {
executeSync(bHexApiService.keepAliveUserDataStream(listenKey, recvWindow, timestamp));
}
@Override
public void closeUserDataStream(String listenKey, Long recvWindow, Long timestamp) {
executeSync(bHexApiService.closeAliveUserDataStream(listenKey, recvWindow, timestamp));
}
}
|
def map_response_to_value(response):
response = response.lower()
if response in ["yes", "y", "1"]:
return "1"
elif response in ["i", "idk", "i dont know", "i don't know", "2"]:
return "2"
elif response in ["probably", "p", "3"]:
return "3"
elif response in ["probably not", "pn", "4"]:
return "4"
else:
return "Invalid response"
|
<filename>docs/demo.js<gh_stars>1-10
const { watch, lastRun, series, parallel, src, dest } = require('gulp');
var clean = require('gulp-clean');
var FtpDeploy = require("ftp-deploy");
var ftpDeploy = new FtpDeploy();
var configDist = {
host: "o-pen.com.cn",
port: 21,
// this would upload everything except dot files
include: ['*', '**/*'],
};
|
echo "Удаление старых MogeneratorPONSO"
cd "${SRCROOT}/conferences/Classes/Models/GeneratedPONSO/"
rm *.*
echo " "
echo "Переименование ponso c нашей логикой для того что бы их не перегенерировал mogenerator"
cd "${SRCROOT}/conferences/Classes/Models/PlainObjects/"
for file in *.*
do
filename="${file/PlainObject.*/}"
ext="${file##*.}"
mv $file ${filename}ModelObject.${ext}
done
echo " "
echo "Запуск скрипта mogenerator"
/usr/local/bin/mogenerator \
--model "${PROJECT_DIR}/conferences/Classes/Models/CoreDataModel/Conference.xcdatamodeld" \
--machine-dir "${PROJECT_DIR}/conferences/Classes/Models/GeneratedMO/" \
--human-dir "${PROJECT_DIR}/conferences/Classes/Models/ManagedObjects/" \
--template-var arc=true
echo "Generating PONSO"
/usr/local/bin/mogenerator \
--model "${PROJECT_DIR}/conferences/Classes/Models/CoreDataModel/Conference.xcdatamodeld" \
--machine-dir "${PROJECT_DIR}/conferences/Classes/Models/GeneratedPONSO/" \
--human-dir "${PROJECT_DIR}/conferences/Classes/Models/PlainObjects/" \
--template-path "${PROJECT_DIR}/Scripts/PONSOTemplates" \
--base-class "NSObject" \
--template-var arc=true
echo " "
echo "Переименование ponso c нашей логикой обратно"
cd "${SRCROOT}/conferences/Classes/Models/GeneratedPONSO/"
for file in *.*
do
filename="${file/ModelObject.*/}"
ext="${file##*.}"
mv $file ${filename}PlainObject.${ext}
done
echo " "
cd "${SRCROOT}/conferences/Classes/Models/PlainObjects/"
for file in *.*
do
filename="${file/ModelObject.*/}"
ext="${file##*.}"
mv $file ${filename}PlainObject.${ext}
done
|
class Song:
def __init__(self, title, duration):
self.title = title
self.duration = duration
class Artist:
def __init__(self, name):
self.name = name
class MusicLibrary:
def __init__(self):
self.songs = []
self.artists = []
def add_song(self, title, duration):
new_song = Song(title, duration)
self.songs.append(new_song)
def add_artist(self, name):
new_artist = Artist(name)
self.artists.append(new_artist)
def associate_song_with_artist(self, song_title, artist_name):
for song in self.songs:
if song.title == song_title:
for artist in self.artists:
if artist.name == artist_name:
song.artist = artist
return
def get_songs_by_artist(self, artist_name):
songs_by_artist = []
for song in self.songs:
if hasattr(song, 'artist') and song.artist.name == artist_name:
songs_by_artist.append(song.title)
return songs_by_artist
# Example usage
library = MusicLibrary()
library.add_song("Song 1", "3:45")
library.add_song("Song 2", "4:20")
library.add_artist("Artist 1")
library.associate_song_with_artist("Song 1", "Artist 1")
library.get_songs_by_artist("Artist 1") # Output: ['Song 1']
|
<filename>src/app/components/styles/procure/tender-item-info.ts
import { Component } from '@angular/core';
import { DescriptorBase } from '../../../model';
import * as moment from 'moment';
import { ProcureItemInfoComponent } from './procure-item-info';
const tooltips = require('./tooltips.json');
@Component({
selector: 'tender-item-info',
templateUrl: './tender-item-info.html',
})
export class TenderItemInfoComponent extends ProcureItemInfoComponent {
awardees_expanded = false;
publisher() {
return this.item['publisher'];
}
tenderType() {
return {
office: 'מכרז משרדי',
central: 'מכרז מרכזי',
exemption: 'בקשת פטור ממכרז',
}[this.item['tender_type']];
}
alertText() {
const lastWeek = moment().subtract(7, 'days');
if (this.item['start_date'] &&
moment(this.item['start_date']).isAfter(lastWeek)) {
return 'חדש!';
}
if (this.item['last_update_date'] &&
moment(this.item['last_update_date']).isAfter(lastWeek)) {
return 'עודכן!';
}
return null;
}
lastUpdateDate() {
if (this.item['last_update_date']) {
return this.format_date(this.item['last_update_date']);
}
if (this.item['__last_modified_at']) {
return this.format_date(this.item['__last_modified_at']);
}
}
itemTitle() {
if (this.item['tender_kind'] === 'central') {
return this.item['page_title'];
}
return this.item['description'];
}
totalAmount() {
return this.item['contract_volume'] || this.item['volume'];
}
totalPaid() {
return this.item['contract_executed'];
}
actionables_aux() {
return this.item['actionable_tips'];
}
sliced_awardees() {
const awardees = this.item['awardees'];
if (awardees && awardees.length > 3 && !this.awardees_expanded) {
return awardees.slice(0, 3);
}
return awardees;
}
open_document(attachment: any) {
window.open(attachment.link, '_blank');
}
}
|
<gh_stars>1-10
const Chillastic = require('./index'); // Replace with 'require('chillastic')' if you're outside of this repo
const REDIS_HOST = 'localhost';
const REDIS_PORT = 6379;
const CHILL_PORT = 8080;
const chillastic = Chillastic(REDIS_HOST, REDIS_PORT, CHILL_PORT);
// Start it up!
chillastic.run();
|
def fibonacci(n):
if n == 0:
return 0
elif n == 1:
return 1
else:
return fibonacci(n - 1) + fibonacci(n - 2)
print(fibonacci(6)) // 8
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/KVKCalendar/KVKCalendar.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/KVKCalendar/KVKCalendar.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
#!/bin/bash
OLD_PWD=$PWD
PACKAGE_DIR=$1
ARCH=$DUB_ARCH
PLATFORM=$DUB_PLATFORM
BUILD_TYPE=$DUB_BUILD_TYPE
if [ "$PLATFORM"="posix" ]; then
PLATFORM=linux
fi
DEST_DIR=$PACKAGE_DIR/c/build/$ARCH-$BUILD_TYPE
if [ ! -d "$DEST_DIR" ]; then
mkdir -p "$DEST_DIR"
fi
if [ -f "$DEST_DIR/liblua5.1.a" ] && [ -z $DUB_FORCE ]; then
exit
fi
cd "$PACKAGE_DIR/c/lua5.1.5"
if [ $ARCH = "x86_64" ]; then
ARCH_LUA="64"
elif [ $ARCH = "x86" ]; then
ARCH_LUA="32"
else
ARCH_LUA=$ARCH
fi
MYCFLAGS=-fPIC
make ARCH=$ARCH_LUA PLATFORM=$PLATFORM
mv "$PACKAGE_DIR/c/lua5.1.5/src/liblua5.1.a" "$DEST_DIR"
make clean
cd "$OLD_PWD"
|
<filename>analytics-api/app/controllers/ExperimentController.scala
package controllers
import akka.actor.{ActorRef, ActorSystem}
import akka.pattern.ask
import javax.inject.{Inject, Named}
import org.ekstep.analytics.api._
import org.ekstep.analytics.api.service.ExperimentAPIService.{CreateExperimentRequest, _}
import org.ekstep.analytics.api.util.JSONUtils
import play.api.Configuration
import play.api.libs.json.Json
import play.api.mvc.{Request, _}
import scala.concurrent.ExecutionContext
class ExperimentController @Inject() (
@Named("experiment-actor") experimentActor: ActorRef,
system: ActorSystem,
configuration: Configuration,
cc: ControllerComponents
) (implicit ec: ExecutionContext)
extends BaseController(cc, configuration) {
def createExperiment() = Action.async { request: Request[AnyContent] =>
val body: String = Json.stringify(request.body.asJson.get)
val res = ask(experimentActor, CreateExperimentRequest(body, config)).mapTo[ExperimentBodyResponse]
res.map { x =>
result(x.responseCode, JSONUtils.serialize(x))
}
}
def getExperiment(experimentId: String) = Action.async { request: Request[AnyContent] =>
val res = ask(experimentActor, GetExperimentRequest(experimentId, config)).mapTo[Response]
res.map { x =>
result(x.responseCode, JSONUtils.serialize(x))
}
}
}
|
# Copyright 2019 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash
caffe_xilinx_dir='../../../caffe-xilinx/'
model_type='yolov4'
threshold='0.001'
model_file='../../quantized/quantize_test.prototxt'
model_weights='../../quantized/quantize_train_test.caffemodel'
image_root='../../data/coco2014/val2014'
list_file='../../data/coco/5k.txt'
result_file='../../data/result.json'
yolov4_detect_path="/build/examples/yolo/yolov4_detect.bin"
yolov4_detect_path_docker="/bin/yolov4_detect"
caffe_xilinx_dir_docker="/opt/vitis_ai/conda/envs/vitis-ai-caffe/"
caffe_path() {
exec_name=$1
exec_path=$caffe_xilinx_dir$(eval echo '$'"${exec_name}_path")
if [ ! -f "$exec_path" ]; then
echo >&2 "$exec_path does not exist, try use path in pre-build docker"
exec_path=$caffe_xilinx_dir_docker$(eval echo '$'"${exec_name}_path_docker")
fi
echo "$exec_path"
}
caffe_exec() {
exec_path=$(caffe_path "$1")
shift
$exec_path "$@"
}
# Test images
caffe_exec yolov4_detect $model_file $model_weights $image_root $list_file \
-mode eval \
-confidence_threshold $threshold \
-out_file $result_file \
-model_type $model_type
python eval_coco.py
|
#!/usr/bin/env bash
#
#SBATCH --job-name=reddit_umap
#SBATCH --array=1-50
#SBATCH --time=02:00:00
#SBATCH -N 1
#SBATCH -c 8
#SBATCH --mem=5GB
#SBATCH -o ./../../datum/reddit/output/slurm/slurm-%A_%a.out
echo "now processing task id:: " ${SLURM_ARRAY_TASK_ID}
python3 reddit_cluster.py --job_array_task_id=${SLURM_ARRAY_TASK_ID} --plot=True --toy=False --pre_or_post='pre'
echo 'Finished.'
# python3 -i reddit_cluster.py --job_array_task_id=1 --toy=True --pre_or_post='pre'
# tesla-k20:2
# GEFORCEGTX1080TI:4
# --gres=gpu:1
# https://hpc-uit.readthedocs.io/en/latest/jobs/examples.html
|
<reponame>iyang0/DMI-frontend
/**
* Test sagas
*/
/* eslint-disable redux-saga/yield-effects */
import { put, takeLatest } from 'redux-saga/effects';
// global store actions and constants
import { LOAD_LIST } from 'containers/App/constants';
import { loadListSuccess, loadListError } from 'containers/App/actions';
import listData, { getList } from '../saga';
/* eslint-disable redux-saga/yield-effects */
describe('getList Saga', () => {
let getListGenerator;
// We have to test twice, once for a successful load and once for an unsuccessful one
// so we do all the stuff that happens beforehand automatically in the beforeEach
beforeEach(() => {
getListGenerator = getList();
const selectDescriptor = getListGenerator.next().value;
expect(selectDescriptor).toMatchSnapshot();
});
it('should dispatch the reposLoaded action if it requests the data successfully', () => {
const response = ['abc', 'xyz'];
const putDescriptor = getListGenerator.next(response).value;
expect(putDescriptor).toEqual(put(loadListSuccess(response)));
});
it('should call the repoLoadingError action if the response errors', () => {
const response = new Error('Some error');
const putDescriptor = getListGenerator.throw(response).value;
expect(putDescriptor).toEqual(put(loadListError(response)));
});
});
describe('listDataSaga Saga', () => {
const listDataSaga = listData();
it('should start task to watch for LOAD_LIST action', () => {
const takeLatestDescriptor = listDataSaga.next().value;
expect(takeLatestDescriptor).toEqual(takeLatest(LOAD_LIST, getList));
});
});
|
<filename>Vue/vue-source-code/webpack.config.js
var HtmlWebpackPlugin = require('html-webpack-plugin');
var path = require('path');
var FlowWebpackPlugin = require('flow-webpack-plugin');
module.exports = {
mode: 'development',
entry: './src/index.js',
output: {
path: path.resolve(__dirname, './dist'),
filename: 'index_bundle.js'
},
plugins: [
new HtmlWebpackPlugin({
title:'vue-copy',
favicon:'./public/favicon.ico',
template: './public/index.html'
}),
//new FlowWebpackPlugin({
// flowArgs: ['check']
//})
]
}
|
<gh_stars>0
package models.SkillContainer;
import models.Skill.Skill;
import java.util.ArrayList;
/**
* Created by Matthew on 4/13/2016.
*/
public class ActiveSkillContianer extends SkillContainer {
public ActiveSkillContianer(SkillContainer basicSkillContianer, SkillContainer occupationSkillContainer){
ArrayList<Skill> basicSkillContainerListOfSkills = basicSkillContianer.getListOfSkills();
ArrayList<Skill> occupationSkillContainerListOfSkills = occupationSkillContainer.getListOfSkills();
setSkillActive(basicSkillContainerListOfSkills.get(1));
setSkillActive(basicSkillContainerListOfSkills.get(2));
setSkillActive(occupationSkillContainerListOfSkills.get(0));
setSkillActive(occupationSkillContainerListOfSkills.get(1));
}
public void setSkillActive(Skill skill) {
listOfSkills.add(skill);
}
public void setSkillDeactivated(Skill skill) {
listOfSkills.remove(skill);
}
}
|
import { useEffect, useState } from 'react';
import {getTreesManifest} from '@utils/getTreesManifest'
import { ALL, SN, WAITING, WORKING } from '@common/constants';
export default function useSnRepoValidation({authentication, owner, server, languageId, refresh}) {
const [{snRepoTree,
snRepoTreeManifest,
snManifestSha,
snRepoTreeStatus},
setValues
] = useState({snRepoTree:null, snRepoTreeManifest:null, snManifestSha:null, snRepoTreeStatus:WAITING})
// Translation Notes Hook
// Example: https://qa.door43.org/api/v1/repos/vi_gl/vi_sn/git/trees/master?recursive=true&per_page=99999
useEffect(() => {
async function getReposTrees() {
setValues({snRepoTree: null, snRepoTreeManifest: null, snRepoTreeStatus: WORKING})
const url = `${server}/api/v1/repos/${owner}/${languageId}_sn/git/trees/master?recursive=false&per_page=999999`
const {RepoTree: _tree, Manifest: _manifest, ManifestSha: _manifestSha, RepoTreeStatus: _errorMesage} = await getTreesManifest(authentication, url)
setValues({snRepoTree: _tree, snRepoTreeManifest: _manifest, snManifestSha: _manifestSha, snRepoTreeStatus: _errorMesage})
}
if (authentication && owner && server && languageId) {
if ( refresh === SN || refresh === ALL ) {
getReposTrees()
}
}
}, [authentication, owner, server, languageId, refresh])
return {
state: {
snRepoTree,
snRepoTreeManifest,
snManifestSha,
snRepoTreeStatus,
},
}
}
|
export function nested(flatArr: any, options?: {}): any[];
|
package com.metaring.springbootappexample.service.auth;
import java.util.concurrent.CompletableFuture;
public class VerifyIdentificationFunctionalityImpl extends VerifyIdentificationFunctionality {
@Override
protected CompletableFuture<Void> preConditionCheck(IdentificationDataModel input) throws Exception {
return end;
}
@Override
protected CompletableFuture<Boolean> call(IdentificationDataModel input) throws Exception {
return end(true);
}
@Override
protected CompletableFuture<Void> postConditionCheck(IdentificationDataModel input, Boolean output)
throws Exception {
return end;
}
}
|
#!/bin/sh
# CYBERWATCH SAS - 2017
#
# Security fix for DSA-2740-1
#
# Security announcement date: 2013-08-23 00:00:00 UTC
# Script generation date: 2017-01-01 21:06:38 UTC
#
# Operating System: Debian 7 (Wheezy)
# Architecture: x86_64
#
# Vulnerable packages fix on version:
# - python-django:1.4.5-1+deb7u1
#
# Last versions recommanded by security team:
# - python-django:1.4.22-1+deb7u2
#
# CVE List:
# - CVE-2013-6044
#
# More details:
# - https://www.cyberwatch.fr/vulnerabilites
#
# Licence: Released under The MIT License (MIT), See LICENSE FILE
sudo apt-get install --only-upgrade python-django=1.4.22-1+deb7u2 -y
|
#!/bin/bash
# compress with maximum settings
# see https://superuser.com/questions/281573/what-are-the-best-options-to-use-when-compressing-files-using-7-zip
# for the all dataset, include also the metadata of the filtered ones
7z a -t7z -m0=lzma -mx=9 -mfb=64 -md=32m -ms=on "all-binaries-metadata.7z" "all/" "all.pretty.json" "filtered.pretty.json" "filtered.list.txt"
7z a -t7z -m0=lzma -mx=9 -mfb=64 -md=32m -ms=on "filtered-binaries-metadata.7z" "filtered/" "filtered.pretty.json" "filtered.list.txt"
|
#!/bin/bash
# Copyright 2019 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
set -ex
# This script makes use of the following environment variables defined
# in the Dockerfile:
# - ARCHES
# - DISTRIBUTION
main() {
for arch in ${ARCHES[@]}; do
# Only create the chroot once.
if [ ! -d "/var/cache/pbuilder/base-${DISTRIBUTION}-${arch}.cow" ]; then
# Create the pbuilder chroot.
DIST="${DISTRIBUTION}" ARCH="${arch}" git-pbuilder create \
--mirror http://deb.debian.org/debian \
--debootstrapopts \
--keyring="/usr/share/keyrings/debian-archive-keyring.gpg" \
--keyring="${HOME}/llvm-keyring.gpg"
fi
done
}
main "$@"
|
import React, { useEffect } from 'react';
import {
View,
Text,
StyleSheet
} from 'react-native';
import Geolocation from '@react-native-community/geolocation';
import axios from 'axios';
const Restaurants = () => {
const [location, setLocation] = useState(null);
const [restaurants, setRestaurants] = useState([]);
useEffect(() => {
Geolocation.getCurrentPosition(info => {
setLocation(info.coords);
});
}, []);
useEffect(() => {
if (location) {
axios
.get(
`https://restaurants-api.com/location?lat=${
location.latitude
}&lon=${location.longitude}`
)
.then(response => {
setRestaurants(response.data.restaurants);
});
}
}, [location]);
return (
<View style={styles.container}>
<Text>Nearby Restaurants:</Text>
{restaurants.map(restaurant => (
<Text key={restaurant.id}>{restaurant.name}</Text>
))}
</View>
);
};
const styles = StyleSheet.create({
container: {
flex: 1,
alignItems: 'center',
justifyContent: 'center'
}
});
export default Restaurants;
|
import React from 'react';
import ReactDOM from 'react-dom'
import Dlzc from './module/dlzc/Dlzc'
ReactDOM.render(
<Dlzc/>,
document.getElementById('dlzc_wrapper')
);
|
/*
* (C) Copyright 2016-2018, by <NAME> and Contributors.
*
* JGraphT : a free Java graph-theory library
*
* This program and the accompanying materials are dual-licensed under
* either
*
* (a) the terms of the GNU Lesser General Public License version 2.1
* as published by the Free Software Foundation, or (at your option) any
* later version.
*
* or (per the licensee's choosing)
*
* (b) the terms of the Eclipse Public License v1.0 as published by
* the Eclipse Foundation.
*/
package org.jgrapht.alg.flow;
import java.util.*;
import java.util.stream.*;
import org.jgrapht.*;
import org.jgrapht.alg.connectivity.ConnectivityInspector;
import org.jgrapht.alg.util.IntegerVertexFactory;
import org.jgrapht.generate.*;
import org.jgrapht.graph.*;
import junit.framework.*;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
/**
* Test class for the PadbergRaoOddMinimumCutset implementation
*
* @author <NAME>
*/
public class PadbergRaoOddMinimumCutsetTest
{
private void runTest(
SimpleWeightedGraph<Integer, DefaultWeightedEdge> network, Set<Integer> oddVertices,
boolean useTreeCompression)
{
PadbergRaoOddMinimumCutset<Integer, DefaultWeightedEdge> padbergRaoOddMinimumCutset =
new PadbergRaoOddMinimumCutset<>(network);
double cutValue =
padbergRaoOddMinimumCutset.calculateMinCut(oddVertices, useTreeCompression);
Set<Integer> sourcePartition = padbergRaoOddMinimumCutset.getSourcePartition();
Set<Integer> sinkPartition = padbergRaoOddMinimumCutset.getSinkPartition();
Set<DefaultWeightedEdge> cutEdges = padbergRaoOddMinimumCutset.getCutEdges();
Set<Integer> intersection = new HashSet<>(sourcePartition);
intersection.retainAll(sinkPartition);
assertTrue(intersection.isEmpty());
Set<Integer> union = new HashSet<>(sourcePartition);
union.addAll(sinkPartition);
assertEquals(network.vertexSet(), union);
assertTrue(PadbergRaoOddMinimumCutset.isOddVertexSet(sourcePartition, oddVertices));
assertTrue(PadbergRaoOddMinimumCutset.isOddVertexSet(sinkPartition, oddVertices));
Set<DefaultWeightedEdge> expectedCutEdges = network
.edgeSet().stream()
.filter(
e -> sourcePartition.contains(network.getEdgeSource(e))
^ sourcePartition.contains(network.getEdgeTarget(e)))
.collect(Collectors.toSet());
assertEquals(expectedCutEdges, cutEdges);
double expectedWeight = cutEdges.stream().mapToDouble(network::getEdgeWeight).sum();
assertEquals(expectedWeight, cutValue,0);
// Verify whether the returned odd cut-set is indeed of minimum weight. To verify this, we
// exhaustively iterate over all possible cutsets.
GusfieldGomoryHuCutTree<Integer, DefaultWeightedEdge> gusfieldGomoryHuCutTreeAlgorithm =
new GusfieldGomoryHuCutTree<>(network);
SimpleWeightedGraph<Integer, DefaultWeightedEdge> gomoryHuCutTree =
gusfieldGomoryHuCutTreeAlgorithm.getGomoryHuTree();
Set<DefaultWeightedEdge> edges = new LinkedHashSet<>(gomoryHuCutTree.edgeSet());
boolean foundBest = false; // Just to make sure that our brute-force approach is exhaustive
for (DefaultWeightedEdge edge : edges) {
Integer source = gomoryHuCutTree.getEdgeSource(edge);
Integer target = gomoryHuCutTree.getEdgeTarget(edge);
double edgeWeight = gomoryHuCutTree.getEdgeWeight(edge);
gomoryHuCutTree.removeEdge(edge); // Temporarily remove edge
Set<Integer> partition =
new ConnectivityInspector<>(gomoryHuCutTree).connectedSetOf(source);
if (PadbergRaoOddMinimumCutset.isOddVertexSet(partition, oddVertices)) { // If the
// source
// partition forms an
// odd cutset, check
// whether the cut
// isn't better than
// the one we already
// found.
assertTrue(cutValue <= edgeWeight);
foundBest |= cutValue == edgeWeight;
}
gomoryHuCutTree.addEdge(source, target, edge); // Place edge back
}
assertTrue(foundBest);
}
@Test
public void testIsOddSetMethod()
{
Set<Integer> vertices = new HashSet<>(Arrays.asList(1, 2, 3, 4, 5, 6));
Set<Integer> oddVertices1 = new HashSet<>(Arrays.asList(1, 2, 3, 7));
Set<Integer> oddVertices2 = new HashSet<>(Arrays.asList(1, 2, 3, 4));
assertTrue(PadbergRaoOddMinimumCutset.isOddVertexSet(vertices, oddVertices1));
assertFalse(PadbergRaoOddMinimumCutset.isOddVertexSet(vertices, oddVertices2));
}
/**
* Test the example graph from the paper Odd Minimum Cut-Sets and b-Matchings by Padberg and Rao
*/
@Test
public void testExampleGraph()
{
SimpleWeightedGraph<Integer, DefaultWeightedEdge> network =
new SimpleWeightedGraph<>(DefaultWeightedEdge.class);
Graphs.addAllVertices(network, Arrays.asList(1, 2, 3, 4, 5, 6));
Graphs.addEdge(network, 1, 2, 10);
Graphs.addEdge(network, 1, 6, 8);
Graphs.addEdge(network, 2, 6, 3);
Graphs.addEdge(network, 2, 3, 4);
Graphs.addEdge(network, 2, 5, 2);
Graphs.addEdge(network, 6, 3, 2);
Graphs.addEdge(network, 6, 4, 2);
Graphs.addEdge(network, 6, 5, 3);
Graphs.addEdge(network, 5, 3, 4);
Graphs.addEdge(network, 5, 4, 7);
Graphs.addEdge(network, 3, 4, 5);
Set<Integer> oddVertices = new HashSet<>(Arrays.asList(2, 3, 5, 6));
this.runTest(network, oddVertices, true);
this.runTest(network, oddVertices, false);
}
/**
* Test disconnected graph
*/
@Test
public void testDisconnectedGraph()
{
SimpleWeightedGraph<Integer, DefaultWeightedEdge> network =
new SimpleWeightedGraph<>(DefaultWeightedEdge.class);
Graphs.addAllVertices(network, Arrays.asList(0, 1, 2, 3, 4));
Graphs.addEdge(network, 0, 1, 3);
Graphs.addEdge(network, 1, 2, 4);
Graphs.addEdge(network, 0, 2, 7);
Graphs.addEdge(network, 3, 4, 9);
Set<Integer> oddVertices = new HashSet<>(Arrays.asList(0, 1, 2, 4));
this.runTest(network, oddVertices, true);
this.runTest(network, oddVertices, false);
}
/**
* Another graph to test
*/
@Test
public void testGraph()
{
SimpleWeightedGraph<Integer, DefaultWeightedEdge> network =
new SimpleWeightedGraph<Integer, DefaultWeightedEdge>(DefaultWeightedEdge.class);
network.addVertex(7);
network.addVertex(10);
network.addVertex(12);
network.addVertex(3);
network.addVertex(1);
network.addVertex(5);
network.addVertex(6);
Graphs.addEdge(network, 1, 12, 1.0);
Graphs.addEdge(network, 3, 5, 1.0);
Graphs.addEdge(network, 5, 6, 1.0);
Graphs.addEdge(network, 6, 12, 4.0);
Set<Integer> oddVertices = new LinkedHashSet<Integer>(Arrays.asList(7, 10, 12, 3));
this.runTest(network, oddVertices, true);
this.runTest(network, oddVertices, false);
}
/**
* Test random graphs
*/
@Test
public void testRandomGraphs()
{
Random rand = new Random(0);
for (int i = 0; i < 8; i++) {
SimpleWeightedGraph<Integer, DefaultWeightedEdge> randomGraph =
new SimpleWeightedGraph<>(DefaultWeightedEdge.class);
int vertices = rand.nextInt((30 - 10) + 1) + 10; // 10-30 vertices
double p = 0.01 * (rand.nextInt((85 - 50) + 1) + 50); // p=[0.5;0.85]
GnpRandomGraphGenerator<Integer, DefaultWeightedEdge> graphGen =
new GnpRandomGraphGenerator<>(vertices, p);
graphGen.generateGraph(randomGraph, new IntegerVertexFactory(0), null);
for (DefaultWeightedEdge edge : randomGraph.edgeSet())
randomGraph.setEdgeWeight(edge, rand.nextInt(150));
for (int j = 0; j < 8; j++) {
// Select a random subset of vertices of even cardinality. These will be the 'odd'
// vertices.
int max = vertices - 1;
int min = 2;
if (max % 2 == 1)
--max;
int nrOfOddVertices = min + 2 * (int) (rand.nextDouble() * ((max - min) / 2 + 1)); // even
// number
// between
// 2
// and
// |V|-1
Set<Integer> oddVertices = new LinkedHashSet<>(nrOfOddVertices);
List<Integer> allVertices = new ArrayList<>(randomGraph.vertexSet());
for (int k = 0; k < nrOfOddVertices; k++) {
oddVertices.add(allVertices.remove(rand.nextInt(allVertices.size())));
}
this.runTest(randomGraph, oddVertices, true);
this.runTest(randomGraph, oddVertices, false);
}
}
}
}
|
<filename>src/test/java/io/reactivex/observers/ObserverFusion.java
/**
* Copyright (c) 2016-present, RxJava Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.observers;
import io.reactivex.Observable;
import io.reactivex.functions.*;
import io.reactivex.internal.fuseable.*;
/**
* Utility methods that return functional interfaces to support assertions regarding fusion
* in a TestObserver.
* <p>Don't move this class as it needs package-private access to TestObserver's internals.
*/
public enum ObserverFusion {
;
/**
* Returns a function that takes a Flowable and returns a TestObserver that
* is set up according to the parameters and is subscribed to the Flowable.
* <p>
* Use this as follows:
* <pre>
* source
* .to(ObserverFusion.test(QueueFuseable.ANY, false))
* .assertResult(0);
* </pre>
* @param <T> the value type
* @param mode the fusion mode to request, see {@link QueueDisposable} constants.
* @param cancelled should the TestObserver cancelled before the subscription even happens?
* @return the new Function instance
*/
public static <T> Function<Observable<T>, TestObserver<T>> test(
final int mode, final boolean cancelled) {
return new TestFunctionCallback<T>(mode, cancelled);
}
/**
* Returns a Consumer that asserts on its TestObserver parameter that
* the upstream is Fuseable (sent a QueueDisposable subclass in onSubscribe).
* <p>
* Use this as follows:
* <pre>
* source
* .to(ObserverFusion.test(0, QueueFuseable.ANY, false))
* .assertOf(ObserverFusion.assertFuseable());
* </pre>
* @param <T> the value type
* @return the new Consumer instance
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public static <T> Consumer<TestObserver<T>> assertFuseable() {
return (Consumer)AssertFuseable.INSTANCE;
}
static final class AssertFusionConsumer<T> implements Consumer<TestObserver<T>> {
private final int mode;
AssertFusionConsumer(int mode) {
this.mode = mode;
}
@Override
public void accept(TestObserver<T> to) throws Exception {
to.assertFusionMode(mode);
}
}
static final class TestFunctionCallback<T> implements Function<Observable<T>, TestObserver<T>> {
private final int mode;
private final boolean cancelled;
TestFunctionCallback(int mode, boolean cancelled) {
this.mode = mode;
this.cancelled = cancelled;
}
@Override
public TestObserver<T> apply(Observable<T> t) throws Exception {
TestObserver<T> to = new TestObserver<T>();
to.setInitialFusionMode(mode);
if (cancelled) {
to.cancel();
}
t.subscribe(to);
return to;
}
}
enum AssertFuseable implements Consumer<TestObserver<Object>> {
INSTANCE;
@Override
public void accept(TestObserver<Object> to) throws Exception {
to.assertFuseable();
}
}
/**
* Returns a Consumer that asserts on its TestObserver parameter that
* the upstream is not Fuseable (didn't sent a QueueDisposable subclass in onSubscribe).
* <p>
* Use this as follows:
* <pre>
* source
* .to(ObserverFusion.test(0, QueueFuseable.ANY, false))
* .assertOf(ObserverFusion.assertNotFuseable());
* </pre>
* @param <T> the value type
* @return the new Consumer instance
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public static <T> Consumer<TestObserver<T>> assertNotFuseable() {
return (Consumer)AssertNotFuseable.INSTANCE;
}
enum AssertNotFuseable implements Consumer<TestObserver<Object>> {
INSTANCE;
@Override
public void accept(TestObserver<Object> to) throws Exception {
to.assertNotFuseable();
}
}
/**
* Returns a Consumer that asserts on its TestObserver parameter that
* the upstream is Fuseable (sent a QueueDisposable subclass in onSubscribe)
* and the established the given fusion mode.
* <p>
* Use this as follows:
* <pre>
* source
* .to(ObserverFusion.test(0, QueueFuseable.ANY, false))
* .assertOf(ObserverFusion.assertFusionMode(QueueFuseable.SYNC));
* </pre>
* @param <T> the value type
* @param mode the expected established fusion mode, see {@link QueueFuseable} constants.
* @return the new Consumer instance
*/
public static <T> Consumer<TestObserver<T>> assertFusionMode(final int mode) {
return new AssertFusionConsumer<T>(mode);
}
/**
* Constructs a TestObserver with the given required fusion mode.
* @param <T> the value type
* @param mode the requested fusion mode, see {@link QueueFuseable} constants
* @return the new TestSubscriber
*/
public static <T> TestObserver<T> newTest(int mode) {
TestObserver<T> to = new TestObserver<T>();
to.setInitialFusionMode(mode);
return to;
}
/**
* Assert that the TestSubscriber received a fuseabe QueueFuseable.and
* is in the given fusion mode.
* @param <T> the value type
* @param to the TestSubscriber instance
* @param mode the expected mode
* @return the TestSubscriber
*/
public static <T> TestObserver<T> assertFusion(TestObserver<T> to, int mode) {
return to.assertOf(ObserverFusion.<T>assertFuseable())
.assertOf(ObserverFusion.<T>assertFusionMode(mode));
}
}
|
<filename>src/foam/lib/query/FooEnum.js
foam.ENUM({
package: 'foam.lib.query',
name: 'FooEnum',
values: [
'FOO',
'BAR'
]
});
|
package io.github.batizhao.service;
import com.baomidou.mybatisplus.extension.service.IService;
import io.github.batizhao.domain.RoleMenu;
import java.util.List;
/**
* @author batizhao
* @since 2020-09-14
**/
public interface RoleMenuService extends IService<RoleMenu> {
/**
* 更新角色菜单
* @param roleMenuList
* @return
*/
Boolean updateRoleMenus(List<RoleMenu> roleMenuList);
}
|
import tempfile
import logging
# Set up logging
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger(__name__)
class TemporaryFile:
def __init__(self, params, name):
self.params = params
self.name = name
self.directory = tempfile.mkdtemp(prefix='dragons.')
def prepare(self):
log.debug("ETIFile prepare()")
def recover(self):
log.debug("ETIFile recover()")
|
#!/bin/bash
cd ~/blis
make clean
make
echo "MAKE DONE"
|
import { ThemingProps, HTMLChakraProps } from "@chakra-ui/system";
export interface HeadingProps extends HTMLChakraProps<"h2">, ThemingProps<"Heading"> {
}
export declare const Heading: import("@chakra-ui/system").ComponentWithAs<"h2", HeadingProps>;
//# sourceMappingURL=heading.d.ts.map
|
#!/bin/bash
source /root/entry-point.sh
pushd ${LI3DS_ROOT_PATH}
./scripts/create_overlay_ws.sh
./scripts/get_and_build_with_catkin.sh
popd
|
#include <iostream>
int main()
{
int prev_num = 0;
int cur_num = 1;
int next_num;
std::cout << prev_num << " ";
for (int i = 0; i < 9; i++)
{
next_num = prev_num + cur_num;
std::cout << next_num << " ";
prev_num = cur_num;
cur_num = next_num;
}
std::cout << std::endl;
return 0;
}
|
import nltk
from nltk.stem.wordnet import WordNetLemmatizer
from sklearn.feature_extraction.text import TfidfVectorizer
#List of questions and answers
qa_pairs = {
"What is a function in Python?": "In Python, a function is a block of code that can be reused multiple times with different input parameters to perform a certain task.",
"What is a variable?": "A variable is a name for a location in memory where a value can be stored",
"What is a loop?": "A loop is a programming construct that allows a set of instructions to be repeated or iterated until a certain condition is met"
}
#Create a lemmatizer
lemmatizer = WordNetLemmatizer()
#Create and train a tf-idf vectorizer
vectorizer = TfidfVectorizer(tokenizer=lambda doc: doc, lowercase=False)
#Create a matrix of tokens
X = vectorizer.fit_transform(qa_pairs.keys())
#Create a function to answer questions
def answer_question(question):
#Tokenize the question
question_tokens = nltk.word_tokenize(question)
#Lemmatize the tokens
question_tokens = [lemmatizer.lemmatize(token, pos='v') for token in question_tokens]
#Transform the question
query_vector = vectorizer.transform([question_tokens])
#Calculate the cosine similarity of the query vector and the matrix of tokens
cosine_similarities = X.dot(query_vector.T).T
#Return the answer with the highest similarity
return qa_pairs[qa_pairs.keys()[cosine_similarities.argsort().item()[-1]]]
#Test the model
print(answer_question("What is a loop in programming?"))
# Output:
A loop is a programming construct that allows a set of instructions to be repeated or iterated until a certain condition is met
|
<filename>test/src/components/molecules/Article/index.js
import React from 'react';
function Article(props) {
return <article>{props.children}</article>;
}
export default Article;
|
import unittest
from si import amp
class TestAmp(unittest.TestCase):
def test_no_key(self):
with self.assertRaises(amp.AmpError) as ex:
amp.Amp("", "amp_agents")
self.assertEqual("'key' can't not be empty", str(ex.exception))
def test_no_amp_agents(self):
with self.assertRaises(amp.AmpError) as ex:
amp.Amp("key", "")
self.assertEqual("'amp_agents' can't be empty", str(ex.exception))
def test_bad_amp_agent(self):
with self.assertRaises(amp.AmpError) as ex:
amp.Amp.parse_agent("bad_amp_agent")
self.assertEqual('bad amp agent bad_amp_agent', str(ex.exception))
def test_bad_protocol(self):
with self.assertRaises(amp.AmpError) as ex:
amp.Amp.parse_agent("ftp://ftp_server")
self.assertEqual("method in ftp://ftp_server must be 'http' or 'https'", str(ex.exception))
def test_good_agents(self):
http, host, port = amp.Amp.parse_agent("http://localhost:8080")
self.assertFalse(http)
self.assertEqual("localhost", host)
self.assertEqual(8080, port)
http, host, port = amp.Amp.parse_agent("http://localhost")
self.assertFalse(http)
self.assertEqual("localhost", host)
self.assertEqual(8100, port)
http, host, port = amp.Amp.parse_agent("https://localhost:8080")
self.assertTrue(http)
self.assertEqual("localhost", host)
self.assertEqual(8080, port)
http, host, port = amp.Amp.parse_agent("https://localhost")
self.assertTrue(http)
self.assertEqual("localhost", host)
self.assertEqual(8100, port)
if __name__ == '__main__':
unittest.main()
|
package testdemo.junit5demo;
import io.qameta.allure.Feature;
import io.qameta.allure.Owner;
import io.qameta.allure.Story;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.RepeatedTest;
import org.junit.jupiter.api.parallel.Execution;
import org.junit.jupiter.api.parallel.ExecutionMode;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Date;
import static org.junit.jupiter.api.Assertions.assertEquals;
//CorpStaffDataAnalyseServiceImpl
//还有一个解决方案,手动加锁
@Slf4j
@Feature("SimpleDateFormat并发不安全示例")
@Owner("zhzh.yin")
public class ConcurrentTest {
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
DateTimeFormatter dtf = DateTimeFormatter.ofPattern("yyyy/MM/dd HH:mm:ss");
//StringBuffer是线程安全的,也就是多线程修改同一个StringBuffer对象的时候,过程是同步的,当然这就导致了StringBuffer的效率降低,毕竟如果要提升安全性,就必须要损失一定的效率。
//synchronized
//加锁
private static final ThreadLocal<SimpleDateFormat> THREAD_LOCAL = new ThreadLocal<SimpleDateFormat>() {
@Override
protected SimpleDateFormat initialValue() {
return new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
}
};
// @RepeatedTest(500)
@Story("并发报错:simpleDateFormat线程不安全")
@Execution(ExecutionMode.CONCURRENT)
//解决方案:用DateTimeFormatter
public void testFailure() throws ParseException, InterruptedException {
String dateString = simpleDateFormat.format(new Date());
log.info(dateString);
Date time = simpleDateFormat.parse(dateString);
String dateString2 = simpleDateFormat.format(time);
assertEquals(dateString, dateString2);
}
// @RepeatedTest(500)
@Story("解决方案:局部变量")
@Execution(ExecutionMode.CONCURRENT)
//解决方案:局部变量
public void testSuc2() throws ParseException, InterruptedException {
SimpleDateFormat simpleDateFormat1 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
String dateString = simpleDateFormat1.format(new Date());
log.info(dateString);
Date time = simpleDateFormat1.parse(dateString);
String dateString2 = simpleDateFormat1.format(time);
assertEquals(dateString, dateString2);
}
// @RepeatedTest(500)
@Story("解决方案:ThreadLocal")
@Execution(ExecutionMode.CONCURRENT)
//解决方案:使用ThreadLocal,每个线程都拥有自己的SimpleDateFormat对象副本。
public void testSuc3() throws ParseException, InterruptedException {
SimpleDateFormat simpleDateFormat2 = THREAD_LOCAL.get();
if (simpleDateFormat2 == null) {
simpleDateFormat2 = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
}String dateString = simpleDateFormat2.format(new Date());
log.info(dateString);
Date time = simpleDateFormat2.parse(dateString);
String dateString2 = simpleDateFormat2.format(time);
assertEquals(dateString, dateString2);
}
// @RepeatedTest(500)
@Story("解决方案:使用DateTimeFormatter")
@Execution(ExecutionMode.CONCURRENT)
public void testSuc1() throws ParseException, InterruptedException {
String dateNow = LocalDateTime.now().format(dtf);
String dateNow2=LocalDateTime.parse(dateNow,dtf).format(dtf);
assertEquals(dateNow, dateNow2);
}
}
|
package com.yin.springboot.user.center.server.service;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.util.List;
import com.yin.springboot.user.center.mapper.OauthAccessTokenMapper;
import com.yin.springboot.user.center.domain.OauthAccessToken;
import com.yin.springboot.user.center.server.OauthAccessTokenService;
@Service
public class OauthAccessTokenServiceImpl implements OauthAccessTokenService{
@Resource
private OauthAccessTokenMapper oauthAccessTokenMapper;
@Override
public int updateBatch(List<OauthAccessToken> list) {
return oauthAccessTokenMapper.updateBatch(list);
}
@Override
public int batchInsert(List<OauthAccessToken> list) {
return oauthAccessTokenMapper.batchInsert(list);
}
@Override
public int insertOrUpdate(OauthAccessToken record) {
return oauthAccessTokenMapper.insertOrUpdate(record);
}
@Override
public int insertOrUpdateSelective(OauthAccessToken record) {
return oauthAccessTokenMapper.insertOrUpdateSelective(record);
}
}
|
fn try_find_cached<'a>(
&self,
id: impl AsRef<oid>,
buffer: &'a mut Vec<u8>,
pack_cache: &mut impl crate::cache::DecodeEntry,
) -> Result<Option<(git_object::Data<'a>, Option<crate::data::entry::Location>)>, Self::Error> {
// Search for the cached entry based on the provided ID
match pack_cache.find_entry(id.as_ref()) {
Ok(Some(entry)) => {
// If the entry is found, decode the data and return it along with the location
let data = entry.decode_data(buffer)?;
Ok(Some((data, Some(entry.location))))
}
Ok(None) => Ok(None), // If the entry is not found, return None
Err(err) => Err(err), // Handle and propagate any potential errors
}
}
|
package com.imooc.scala.batch.transformation
import org.apache.flink.api.scala.ExecutionEnvironment
/**
* cross:获取两个数据集的笛卡尔积
* Created by xuwei
*/
object BatchCrossScala {
def main(args: Array[String]): Unit = {
val env = ExecutionEnvironment.getExecutionEnvironment
import org.apache.flink.api.scala._
//初始化第一份数据
val text1 = env.fromCollection(Array(1, 2))
//初始化第二份数据
val text2 = env.fromCollection(Array("a", "b"))
//执行cross操作
text1.cross(text2).print()
}
}
|
<gh_stars>1-10
#ifndef CORE_GRAPHICS_RAY_H_
#define CORE_GRAPHICS_RAY_H_
#include <core-math/matrix4x4.h>
#include <core-math/matrix3x3.h>
namespace ml {
template<class FloatType>
class Ray
{
public:
Ray()
{
}
inline Ray(const vec3<FloatType> &o, const vec3<FloatType> &d) {
m_origin = o;
m_direction = d.getNormalized();
m_inverseDirection = vec3<FloatType>((FloatType)1.0 / m_direction.x, (FloatType)1.0 / m_direction.y, (FloatType)1.0 / m_direction.z);
m_sign.x = (m_inverseDirection.x < (FloatType)0);
m_sign.y = (m_inverseDirection.y < (FloatType)0);
m_sign.z = (m_inverseDirection.z < (FloatType)0);
}
inline vec3<FloatType> getHitPoint(FloatType t) const {
return m_origin + t * m_direction;
}
inline const vec3<FloatType>& getOrigin() const {
return m_origin;
}
inline const vec3<FloatType>& getDirection() const {
return m_direction;
}
inline const vec3<FloatType>& getInverseDirection() const {
return m_inverseDirection;
}
inline const vec3i& getSign() const {
return m_sign;
}
inline void transform(const Matrix4x4<FloatType>& m) {
*this = Ray(m * m_origin, m.transformNormalAffine(m_direction));
}
inline void rotate(const Matrix3x3<FloatType>& m) {
*this = Ray(m_origin, m * m_direction);
}
inline void translate(const vec3<FloatType>& p) {
*this = Ray(m_origin + p, m_direction);
}
private:
vec3<FloatType> m_direction;
vec3<FloatType> m_inverseDirection;
vec3<FloatType> m_origin;
vec3i m_sign;
};
template<class FloatType>
Ray<FloatType> operator*(const Matrix4x4<FloatType>& m, const Ray<FloatType>& r) {
Ray<FloatType> res = r;
res.transform(m);
return res;
}
template<class FloatType>
std::ostream& operator<<(std::ostream& os, const Ray<FloatType>& r) {
os << r.getOrigin() << " | " << r.getDirection();
return os;
}
typedef Ray<float> Rayf;
typedef Ray<double> Rayd;
} // namespace ml
#endif // CORE_GRAPHICS_RAY_H_
|
<reponame>lananh265/social-network<filename>node_modules/react-icons-kit/feather/zapOff.js<gh_stars>1-10
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.zapOff = void 0;
var zapOff = {
"viewBox": "0 0 24 24",
"children": [{
"name": "polyline",
"attribs": {
"points": "12.41 6.75 13 2 10.57 4.92"
},
"children": []
}, {
"name": "polyline",
"attribs": {
"points": "18.57 12.91 21 10 15.66 10"
},
"children": []
}, {
"name": "polyline",
"attribs": {
"points": "8 8 3 14 12 14 11 22 16 16"
},
"children": []
}, {
"name": "line",
"attribs": {
"x1": "1",
"y1": "1",
"x2": "23",
"y2": "23"
},
"children": []
}],
"attribs": {
"fill": "none",
"stroke": "currentColor",
"stroke-width": "2",
"stroke-linecap": "round",
"stroke-linejoin": "round"
}
};
exports.zapOff = zapOff;
|
#!/bin/sh
# Directory we run from: blah/contrast.app/Contents/MacOS
mydir=`dirname "$0"`
# Extra quoting required because $dirname may have spaces and double-quotes get
# eaten below.
scriptcmd="cd \\\"${mydir}/../../..\\\" ; \\\"${mydir}/contrast\\\" ; exit"
osascript \
-e 'tell application "Terminal"' \
-e 'activate' \
-e "do script \"$scriptcmd\"" \
-e 'set background color of window 1 to {52224, 65535, 65535}' \
-e 'set normal text color of window 1 to "black"' \
-e 'set cursor color of window 1 to "black"' \
-e 'set custom title of window 1 to "contrast"' \
-e 'end tell'
|
#!/bin/bash
#
# Copyright (c) 2010 Yahoo! Inc. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License. See accompanying LICENSE file.
#
function printUsage() {
echo
echo " Usage : oozie-setup.sh <OPTIONS>"
echo " [-extjs EXTJS_PATH] (expanded or ZIP, to enable the Oozie webconsole)"
echo " [-hadoop HADOOP_VERSION HADOOP_PATH] (Hadoop version [0.20.1|0.20.2|0.20.104|0.20.200]"
echo " and Hadoop install dir)"
echo " [-jars JARS_PATH] (multiple JAR path separated by ':')"
echo " (without options does default setup, without the Oozie webconsole)"
echo
echo " EXTJS can be downloaded from http://www.extjs.com/learn/Ext_Version_Archives"
echo
}
# resolve links - $0 may be a softlink
PRG="${0}"
while [ -h "${PRG}" ]; do
ls=`ls -ld "${PRG}"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "${PRG}"`/"$link"
fi
done
BASEDIR=`dirname ${PRG}`
BASEDIR=`cd ${BASEDIR}/..;pwd`
source ${BASEDIR}/bin/oozie-sys.sh -silent
addExtjs=""
addJars=""
addHadoopJars=""
extjsHome=""
jarsPath=""
hadoopVersion=""
hadoopPath=""
inputWar="${OOZIE_HOME}/oozie.war"
outputWar="${CATALINA_BASE}/webapps/oozie.war"
outputWarExpanded="${CATALINA_BASE}/webapps/oozie"
while [ $# -gt 0 ]
do
if [ "$1" = "-extjs" ]; then
shift
if [ $# -eq 0 ]; then
echo
echo "Missing option value, ExtJS path"
echo
printUsage
exit -1
fi
extjsHome=$1
addExtjs=true
elif [ "$1" = "-jars" ]; then
shift
if [ $# -eq 0 ]; then
echo
echo "Missing option value, JARs path"
echo
printUsage
exit -1
fi
jarsPath=$1
addJars=true
elif [ "$1" = "-hadoop" ]; then
shift
if [ $# -eq 0 ]; then
echo
echo "Missing option values, HADOOP_VERSION & HADOOP_HOME_PATH"
echo
printUsage
exit -1
elif [ $# -eq 1 ]; then
echo
echo "Missing option value, HADOOP_HOME_PATH"
echo
printUsage
exit -1
fi
hadoopVersion=$1
shift
hadoopPath=$1
addHadoopJars=true
else
printUsage
exit -1
fi
shift
done
if [ -e "${CATALINA_PID}" ]; then
echo
echo "ERROR: Stop Oozie first"
echo
exit -1
fi
if [ -e "${outputWar}" ]; then
chmod -f u+w ${outputWar}
rm -rf ${outputWar}
fi
rm -rf ${outputWarExpanded}
echo
if [ "${addExtjs}${addJars}${addHadoopJars}" == "" ]; then
echo "INFO: Doing default installation, Oozie webconsole disabled"
cp ${inputWar} ${outputWar}
else
OPTIONS=""
if [ "${addExtjs}" != "" ]; then
OPTIONS="-extjs ${extjsHome}"
fi
if [ "${addJars}" != "" ]; then
OPTIONS="${OPTIONS} -jars ${jarsPath}"
fi
if [ "${addHadoopJars}" != "" ]; then
OPTIONS="${OPTIONS} -hadoop ${hadoopVersion} ${hadoopPath}"
fi
${OOZIE_HOME}/bin/addtowar.sh -inputwar ${inputWar} -outputwar ${outputWar} ${OPTIONS}
if [ "$?" != "0" ]; then
exit -1
fi
fi
echo "INFO: Oozie is ready to be started"
echo
|
<reponame>PetukhovVictor/compiler2<filename>src/Compiler/VM/Deep/strings.py<gh_stars>1-10
# -*- coding: utf-8 -*-
from ..Helpers.types import Types
from ..Helpers.base import dbstore, dbload
from ..Helpers.loop import Loop
from ..Helpers.commands import Dup, Store, Push, Mul, DMalloc, Load, Compare, DBStore, Add, DBLoad, Jnz, Label, Jump, Jz, Sub
class StringCompiler:
@staticmethod
def store(commands, data):
""" Генерация инструкций для записи строки из стека в heap memory. """
str_start_pointer = data.var(Types.INT)
end_str_pointer = data.var(Types.INT)
# Добавляем к требуемому размеру памяти 1 - для escape-нуля (маркера конца строки)
commands.add(Push, 1)
commands.add(Add)
commands.add(Dup)
# Выделяем память размером = числу на стеке (ранее мы записали туда длину строки)
commands.add(DMalloc, 0)
commands.add(Dup)
commands.add(Store, str_start_pointer)
# Выносим инвариант цикла - указатель на конец строки - в переменную
commands.add(Add)
commands.add(Store, end_str_pointer)
def cycle_body(_counter, b, c):
# Последовательно сохраняем все символы в выделенной памяти в обратном порядке (т. к. берем со стека)
dbstore(end_str_pointer, _counter, commands, invert=True, value=-2)
counter = Loop.stack(commands, data, cycle_body, load_counter=False, return_counter=True)
# Дописываем 0 в последнюю ячейку памяти - это маркер конца строки
commands.add(Push, 0)
dbstore(str_start_pointer, counter, commands)
# Отдаем на стек указатель на начало строки для дальнейшего использования
commands.add(Load, str_start_pointer)
@staticmethod
def strlen(commands, data, type):
""" Генерация инструкций для получения длины строки, находящейся на стеке. """
str_start_pointer = data.var(Types.INT)
# Разыменовываем лежащий на стеке указатель и записываем его в переменную
commands.add(Store, str_start_pointer)
# Считываем строку из памяти до конца (пока не встретим 0), подсчитывая кол-во символов (его кладем на стек)
Loop.data(commands, data, str_start_pointer, memory_type='heap')
@staticmethod
def strget(commands, data, type):
""" Генерация инструкций для получения определенного символа строки """
# Прибавляем к номеру ячейки с началом строки номер требуемого символа (offset)
commands.add(Add)
# Загружаем на стек символ по номеру его ячейки в heap memory
commands.add(DBLoad, 0)
@staticmethod
def strset(commands, data, type):
""" Генерация инструкций для замены определенного символа строки """
# Вычисляем ячейки heap memory, где находится заменяемый символ
commands.add(Add)
# Производим замену символа
commands.add(DBStore, 0)
@staticmethod
def strsub(commands, data, type):
""" Генерация инструкций для получение подстроки строки """
substr_length = data.var(Types.INT)
substr_start_pointer = data.var(Types.INT)
finish_label = data.label()
# Сохраняем длину подстроки
commands.add(Store, substr_length)
commands.add(Add)
commands.add(Store, substr_start_pointer)
# Кладем на стек 0 - маркер конца строки
commands.add(Push, 0)
def cycle_body(_counter, a, b):
commands.add(Load, _counter)
commands.add(Load, substr_length)
commands.add(Compare, 5)
# Если уже прочитали и записали подстркоу требуемой длины - выходим из цикла
commands.add(Jnz, finish_label)
# Загружаем очередной символ подстроки из heap memory
dbload(substr_start_pointer, _counter, commands)
Loop.data(commands, data, substr_start_pointer, cycle_body, load_counter=False, memory_type='heap')
commands.add(Label, finish_label)
# Записываем на стек длину подстроки + 1 (для маркера конца строки - нуля)
commands.add(Load, substr_length)
StringCompiler.store(commands, data)
@staticmethod
def strdup(commands, data, type):
""" Генерация инструкций для дублирования строки """
str_start_pointer = data.var(Types.INT)
# Разыменовываем лежащий на стеке указатель и записываем его в переменную
commands.add(Store, str_start_pointer)
# Кладем на стек 0 - маркер конца строки
commands.add(Push, 0)
def cycle_body(_counter, a, b):
dbload(str_start_pointer, _counter, commands)
# Читаем строку и кладем её на стек
Loop.data(commands, data, str_start_pointer, cycle_body, memory_type='heap')
StringCompiler.store(commands, data)
@staticmethod
def strcat_first(commands, data, type):
""" Генерация инструкций для дублирования первой из конкатенируемых строки """
str_start_pointer = data.var(Types.INT)
commands.add(Store, str_start_pointer)
commands.add(Push, 0)
def cycle_body(_counter, a, b):
dbload(str_start_pointer, _counter, commands)
# Читаем строку и кладем её на стек
Loop.data(commands, data, str_start_pointer, cycle_body, memory_type='heap')
@staticmethod
def strcat_second(commands, data, type):
""" Генерация инструкций для дублирования второй из конкатенируемых строки и запись её в памяти за первой """
str_start_pointer = data.var(Types.INT)
str_length = data.var(Types.INT)
commands.add(Store, str_start_pointer)
commands.add(Store, str_length)
def cycle_body(_counter, a, b):
dbload(str_start_pointer, _counter, commands)
# Читаем строку и кладем её на стек
Loop.data(commands, data, str_start_pointer, cycle_body, memory_type='heap')
commands.add(Load, str_length)
commands.add(Add)
StringCompiler.store(commands, data)
@staticmethod
def strmake(commands, data):
""" Генерация инструкций для создания строки заданной длины с повторяющимся символом """
str_start_pointer = data.var(Types.INT)
str_length = data.var(Types.INT)
basis_symbol = data.var(Types.CHAR)
finish_label = data.label()
commands.add(Dup)
# Сохраняем длину строки в переменную
commands.add(Store, str_length)
# Выделяем память = указанной длине строки +1 (плюс маркер конца строки - 0)
commands.add(DMalloc, 1)
commands.add(Store, str_start_pointer)
commands.add(Store, basis_symbol)
def cycle_body(_counter, b, c):
commands.add(Load, _counter)
commands.add(Load, str_length)
commands.add(Compare, 5)
commands.add(Jnz, finish_label)
commands.add(Load, basis_symbol)
dbstore(str_start_pointer, _counter, commands)
counter = Loop.simple(commands, data, cycle_body, return_counter=True)
# Сюда переходим после того, как запишем нужное количество символов в создаваемую строку
commands.add(Label, finish_label)
# Дописываем 0 в последнюю ячейку памяти - это маркер конца строки
commands.add(Push, 0)
dbstore(str_start_pointer, counter, commands)
# Отдаем на стек указатель на начало созданной строки для дальнейшего использования
commands.add(Load, str_start_pointer)
@staticmethod
def strcmp(commands, data, type1, type2):
""" Генерация инструкций для посимвольного сравнивания двух строк """
str1_start_pointer = data.var(Types.INT)
str2_start_pointer = data.var(Types.INT)
eq_label = data.label()
not_eq_label = data.label()
finish_label = data.label()
commands.add(Store, str1_start_pointer)
commands.add(Store, str2_start_pointer)
def cycle_body(_counter, a, continue_label):
# Загружаем n-ный символ 1-й строки
dbload(str1_start_pointer, _counter, commands)
# Дублируем на стек для дальнейшей проверки (чтобы не загружать снова)
commands.add(Dup)
# Загружаем n-ный символ 2-й строки
dbload(str2_start_pointer, _counter, commands)
commands.add(Compare, 1)
# Если символы не равны, сразу переходим в секцию not_eq_label и выясняем уже там - какой из них больше
# Это также работает, когда мы достиги конца одной из строк (какой-то символ и 0)
commands.add(Jnz, not_eq_label)
commands.add(Push, 0)
# Сравниваем с 0 ранее продублированный символ (1-й строки) - если он равен нулю, то равен и второй,
# т. к. в эту секцию мы попадаем только при равенстве обоих символов
commands.add(Compare, 0)
# 0 говорит о достижении конца строки - если это не 0, то продолжаем цикл
commands.add(Jz, continue_label)
# Сюда попадаем, когда достигли конца одновременно двух строк - т. е. они полностью равны
commands.add(Jump, eq_label)
counter = Loop.simple(commands, data, cycle_body, return_counter=True)
# Секция полного равенства строк: пишем на стек 0
commands.add(Label, eq_label)
commands.add(Push, 0)
commands.add(Jump, finish_label)
# Секция неравенства строк
commands.add(Label, not_eq_label)
# Загружаем только второй символ - первый у нас уже содержится на стеке (см. тело цикла)
dbload(str2_start_pointer, counter, commands)
# Сравниваем символы оператором <
commands.add(Compare, 2)
# Производим нормировку результата сравнения: 0|1 -> -1|1
commands.add(Push, 2)
commands.add(Mul)
commands.add(Push, 1)
commands.add(Sub)
commands.add(Label, finish_label)
|
#!/bin/sh
SCRIPT="$0"
echo "# START SCRIPT: $SCRIPT"
while [ -h "$SCRIPT" ] ; do
ls=`ls -ld "$SCRIPT"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
SCRIPT="$link"
else
SCRIPT=`dirname "$SCRIPT"`/"$link"
fi
done
if [ ! -d "${APP_DIR}" ]; then
APP_DIR=`dirname "$SCRIPT"`/..
APP_DIR=`cd "${APP_DIR}"; pwd`
fi
executable="./modules/openapi-generator-cli/target/openapi-generator-cli.jar"
if [ ! -f "$executable" ]
then
mvn -B clean package
fi
# if you've executed sbt assembly previously it will use that instead.
export JAVA_OPTS="${JAVA_OPTS} -Xmx1024M -DloggerPath=conf/log4j.properties"
ags="generate -t modules/openapi-generator/src/main/resources/Groovy/ -i modules/openapi-generator/src/test/resources/2_0/petstore.yaml -g groovy -o samples/client/petstore/groovy --additional-properties hideGenerationTimestamp=true $@"
java $JAVA_OPTS -jar $executable $ags
|
#!/bin/bash
# update docsy
git submodule update --init --recursive
# get most recent changes of docsy
# git submodule update --remote --merge
# install npm dependencies
yarn install
# build
# HUGO_ENV="production", enables google Analytics
env HUGO_ENV="production" hugo -D
# container registry
REGISTRY='quay.io/mycontroller'
ALT_REGISTRY='docker.io/mycontroller'
IMAGE_WEBSITE="documentation"
IMAGE_TAG=`git rev-parse --abbrev-ref HEAD`
# debug lines
echo $PWD
ls -alh
git branch
# build image and push to quay.io
docker buildx build \
--push \
--progress=plain \
--platform linux/arm/v6,linux/arm/v7,linux/arm64,linux/amd64 \
--file docker/Dockerfile \
--tag ${REGISTRY}/${IMAGE_WEBSITE}:${IMAGE_TAG} .
# build image and push to docker.io
docker buildx build \
--push \
--progress=plain \
--platform linux/arm/v6,linux/arm/v7,linux/arm64,linux/amd64 \
--file docker/Dockerfile \
--tag ${ALT_REGISTRY}/${IMAGE_WEBSITE}:${IMAGE_TAG} .
|
#!/bin/sh
curl -H "Content-Type: application/json" --data '{"build": true}' -X POST https://registry.hub.docker.com/u/classpip/classpip-services/trigger/$DOCKER_HUB_TOKEN/
|
#!/usr/bin/env bash
##
# @license Copyright 2017 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
##
# usage
# yarn compile-devtools
# This the text here will override the renderer/ files in in the scripts[] array:
# https://github.com/ChromeDevTools/devtools-frontend/blob/master/front_end/audits2/module.json#L20
# (Currently this doesnt include logger or report-features)
files_to_include="\"lighthouse\/renderer\/util.js\", \"lighthouse\/renderer\/dom.js\", \"lighthouse\/renderer\/category-renderer.js\", \"lighthouse\/renderer\/performance-category-renderer.js\", \"lighthouse\/renderer\/crc-details-renderer.js\", \"lighthouse\/renderer\/details-renderer.js\", \"lighthouse\/renderer\/report-renderer.js\","
# -----------------------------
# paths
local_script_path="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
lhroot_path="$local_script_path/../../"
frontend_path="$lhroot_path/node_modules/temp-devtoolsfrontend"
protocol_path="$lhroot_path/node_modules/temp-devtoolsprotocol"
# clone if they're not there
if [ ! -d "$frontend_path" ]; then
git clone --depth=1 git://github.com/ChromeDevTools/devtools-frontend.git "$frontend_path"
fi
if [ ! -d "$protocol_path" ]; then
git clone --depth=1 git://github.com/ChromeDevTools/devtools-protocol.git "$protocol_path"
fi
# update to latest
cd "$frontend_path" && git reset --hard && git fetch origin master && git checkout --quiet --force origin/master
cd "$protocol_path" && git reset --hard && git fetch origin master && git checkout --quiet --force origin/master
cd "$lhroot_path" || exit 1
# copy renderer and lh backgrond into this devtools checkout
yarn devtools "$frontend_path/front_end/"
#
# monkeypatch the audits2 module.json to include any new files we're added that aren't present
#
audit2_modulejson_path="$frontend_path/front_end/audits2/module.json"
# remove existing renderer file mentions
sed -i='' 's/.*\/renderer\/.*//' $audit2_modulejson_path
# add in our hardcoded renderer file mentions
sed -i='' "s/\"Audits2Panel\.js\"/ $files_to_include \"Audits2Panel.js\"/" $audit2_modulejson_path
# compile, finally
python "$frontend_path/scripts/compile_frontend.py" --protocol-externs-file "$protocol_path/externs/protocol_externs.js"
# FYI the compile_frontend script deletes externs/protocol_externs.js when it's done.
|
# frozen_string_literal: true
RSpec.configure do |config|
config.disable_monkey_patching!
config.profile_examples = 10
config.order = :random
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = "doc" if config.files_to_run.one?
Kernel.srand config.seed
end
|
<reponame>alekitto/atlante-js
import Decorator, { DecoratorInterface } from './DecoratorInterface';
import Request from '../Request';
export default
class UrlDecorator extends implementationOf(Decorator) implements DecoratorInterface {
private readonly _baseUrl: string;
/**
* Constructor.
*/
constructor(baseUrl: string) {
super();
this._baseUrl = baseUrl;
}
/**
* @inheritdoc
*/
decorate(request: Request<any>): Request<any> {
let { body = undefined, method, url, headers } = request;
if (-1 === url.indexOf('://')) {
const parsedUrl = new URL(url, this._baseUrl);
url = parsedUrl.href;
}
return { body, method, url, headers };
}
}
|
<filename>src/styles/buttons.js
import { css } from "@emotion/css";
const defaultBtn = css`
.color-scheme-light & {
--btn-background-color: #cfcfcf;
--btn-text-color: #000;
--btn-hover-background-color: #bcbcbc;
--btn-hover-text-color: #000;
--btn-focus-box-shadow-color: #90caf9;
--btn-selected-background-color: #bcbcbc;
}
.color-scheme-dark & {
--btn-background-color: #616161;
--btn-text-color: #f5f5f5;
--btn-hover-background-color: #373737;
--btn-hover-text-color: #f5f5f5;
--btn-focus-box-shadow-color: #90caf9;
--btn-selected-background-color: #373737;
}
padding: 10px 10px 8px;
border-radius: 4px;
border: none;
cursor: pointer;
background-color: var(--btn-background-color);
color: var(--btn-text-color);
:hover,
:active {
outline: none;
background-color: var(--btn-hover-background-color);
color: var(--btn-hover-text-color);
}
:disabled {
opacity: 0.5;
cursor: not-allowed;
}
&.focus-visible {
outline: none;
box-shadow: 0 0 0 4px var(--btn-focus-box-shadow-color);
}
`;
const dismissBtn = css`
.color-scheme-light & {
--dismiss-btn-hover-background-color: #aeaeae;
--dismiss-btn-focus-box-shadow-color: #90caf9;
}
.color-scheme-dark & {
--dismiss-btn-hover-background-color: #424242;
--dismiss-btn-focus-box-shadow-color: #90caf9;
}
background: transparent;
border: none;
padding: 3px;
border-radius: 4px;
color: inherit;
cursor: pointer;
:hover,
:active {
outline: none;
background-color: var(--dismiss-btn-hover-background-color);
}
&.focus-visible {
outline: none;
box-shadow: 0 0 0 4px var(--dismiss-btn-focus-box-shadow-color);
}
.close {
vertical-align: middle;
}
`;
export { defaultBtn, dismissBtn };
|
module RomLoader::ScrapingError
class NoElementFound < StandardError
end
end
|
const countOccurrences = (arr, find) => {
let count = 0;
arr.forEach((item) => {
if (item === find) {
count++;
}
});
return count;
};
|
#!/usr/bin/env bash
set -ex -o pipefail
echo 'travis_fold:start:BUILD'
# Setup environment
cd `dirname $0`
source ./env.sh
cd ../..
$(npm bin)/tsc -v
$(npm bin)/tsc -p tools
cp tools/@angular/tsc-wrapped/package.json dist/tools/@angular/tsc-wrapped
node dist/tools/@angular/tsc-wrapped/src/main -p modules
node dist/tools/@angular/tsc-wrapped/src/main -p modules/@angular/router
echo 'travis_fold:end:BUILD'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.