text stringlengths 1 1.05M |
|---|
import sys
import signal
import time
import random
import speech_recognition as sr
def signal_handler(signal, frame):
sys.exit(0)
signal.signal(signal.SIGINT, signal_handler)
# Setup the recognizer
rec = sr.Recognizer()
mic = sr.Microphone()
# Some useful commands
commands = {
'time': lambda : print(time.strftime('%H:%M:%S')),
'date': lambda : print(time.strftime('%d/%m/%Y')),
'greeting': lambda : print("Hello, how can I help you?"),
'random': lambda : print(random.randint(1, 100))
}
def listen():
while True:
with mic as source:
audio = rec.listen(source)
try:
text = rec.recognize_google(audio)
# Call the corresponding command
commands[text]()
except:
print("Sorry, I didn't understand that")
listen() |
require 'chef/provisioning'
machine_image 'foo'
machine 'quiddle' do
from_image 'foo'
end
machine 'baz' do
from_image 'foo'
end
|
module.exports = {
jwtSecret: process.env.JWT_SECRET || '19064d11-4d18-427e-b08e-d9ebfa40c638'
}; |
<filename>src/routers/PublicRoute.tsx<gh_stars>0
import React from "react";
import { connect } from "react-redux";
import { Redirect, Route } from "react-router-dom";
import { ADMIN_DASHBOARD, SIGNIN, SIGNUP } from "../constants";
import { AppState } from "../redux";
const _PublicRoute: React.FC<PublicRouteProps> = ({ isAuth, role, component: Component, path, ...rest }) => (
<Route
{...rest}
render={(props) => {
// @ts-ignore
const { from } = props.location.state || { from: { pathname: "/" } };
if (isAuth && role === "ADMIN") {
return <Redirect to={ADMIN_DASHBOARD} />;
}
if (isAuth && role === "USER" && (path === SIGNIN || path === SIGNUP)) {
return <Redirect to={from} />;
}
return (
<main className="content">
<Component {...props} />
</main>
);
}}
/>
);
_PublicRoute.defaultProps = {
isAuth: false,
role: "USER",
path: "/",
exact: false,
};
type PublicRouteProps = {
isAuth?: boolean;
role?: string;
component: any;
path?: string;
exact?: boolean;
};
const mapStateToProps = (params: AppState) => ({
isAuth: !!params.auth,
role: params.auth?.role || "",
});
export const PublicRoute = connect(mapStateToProps)(_PublicRoute);
|
<reponame>Martin-BG/Softuni-Java-MVC-Spring-Feb-2019<gh_stars>1-10
package com.minkov.springintroapp.testutils;
import com.minkov.springintroapp.entities.Car;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Scope;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
public class CarsUtils {
// CarUtils.getCars();
public static List<Car> getCars(int count) {
return IntStream.range(0, count)
.mapToObj(index -> new Car(){{
setId(index + 1);
setModel("Model " + index);
}})
.collect(Collectors.toList());
}
// carUtilsFactory
// carUtilsFactory.getCarUtils().getCars();
}
|
SELECT age, COUNT(*) AS num_people
FROM Person
GROUP BY age; |
/* **** Notes
Convert to time out of characters.
//*/
# define CALEND
# define CAR
# include "../../../incl/config.h"
signed(__cdecl cv_time(signed char(*b),cals_event_t(*argp))) {
auto signed i,r;
auto signed short flag;
if(!b) return(0x00);
if(!argp) return(0x00);
// init.
i = (CALS_TIME);
while(i) *(--i+(R(time,*argp))) = (0x00);
r = cv_hr(CALS_HR+(R(time,*argp)),CALS_MN+(R(time,*argp)),b);
if(!r) {
i = (CALS_TIME);
while(i) *(--i+(R(time,*argp))) = (0x00);
}
return(r);
}
|
package info.u250.c2d.graphic.surfaces;
import com.badlogic.gdx.graphics.Mesh;
import com.badlogic.gdx.graphics.Texture;
import com.badlogic.gdx.graphics.Texture.TextureWrap;
import info.u250.c2d.engine.Engine;
/**
* Cure surfaces is a mesh group use the draw method FAN or STRIP of opengles .
* which defined some base points and draw them ordered ,
* @author xjjdog
*/
public abstract class CurveSurfaces implements com.badlogic.gdx.utils.Disposable {
/**
* The render interface rounding the method {@link CurveSurfaces#render(float)}, used to give more controls
* @author xjjdog
*/
public interface CurveSurfacesRender{
/** before render */
public void preRender(float delta);
/** after render */
public void postRender(float delta);
}
private CurveSurfacesRender bRender;
protected CurveSurfaces(){}
public void setbRender(CurveSurfacesRender bRender) {
this.bRender = bRender;
}
public SurfaceData data;
public Texture texture;
/**the main mesh used to draw */
public Mesh mesh;
public CurveSurfaces(SurfaceData data){
this.data = data;
this.build();
}
/**the main build method , we build the mesh and the texture and also the physical object
*/
public final void build(){
texture = Engine.resource(data.texture,Texture.class);
if(null!=this.texture){
this.texture.setWrap(TextureWrap.Repeat, TextureWrap.Repeat);
this.texture.setFilter(Texture.TextureFilter.Linear, Texture.TextureFilter.Linear);
}
if(data.points.size>2 && this.texture !=null)this.doBuild();
}
/** to draw it */
public void render(float delta){
if(null!=bRender){
bRender.preRender(delta);
}
this.doRender(delta);
if(null!=bRender){
bRender.postRender(delta);
}
}
/**the draw method is implement at subclass */
protected abstract void doRender(float delta);
/**the build method is implement at subclass */
protected abstract void doBuild();
public void dispose(){
if(null!=mesh){
mesh.dispose();
mesh = null;
}
}
} |
<?php
// Read the JSON file
$blog_content = json_decode(file_get_contents('blog.json'));
// Get the last 5 posts
$last_posts = array_slice($blog_content, -5);
// Print the posts
foreach ($last_posts as $post) {
echo $post->title . '<br />';
echo $post->body . '<br />';
echo '<hr />';
}
?> |
int findMax(int arr[], int n)
{
int max = arr[0];
for (int i = 1; i < n; i++)
if (arr[i] > max)
max = arr[i];
return max;
} |
<reponame>ooooo-youwillsee/leetcode<filename>0164-Maximum Gap/cpp_0164/Solution2.h
/**
* @author ooooo
* @date 2020/11/26 19:19
*/
#ifndef CPP_0164__SOLUTION2_H_
#define CPP_0164__SOLUTION2_H_
#include <iostream>
#include <vector>
using namespace std;
class Solution {
public:
int maximumGap(vector<int> &nums) {
if (nums.size() < 2) return 0;
int n = nums.size();
vector<int> buf(n);
int a = 1, max_v = *max_element(nums.begin(), nums.end());
while (max_v >= a) {
vector<int> cnt(10), t(10);
for (int i = 0; i < n; ++i) {
int d = (nums[i] / a) % 10;
cnt[d]++;
}
for (int i = 1; i <= 9; ++i) {
t[i] = t[i - 1] + cnt[i - 1];
}
for (int i = 0; i < n; ++i) {
int d = (nums[i] / a) % 10;
buf[t[d]] = nums[i];
t[d]++;
}
copy(buf.begin(), buf.end(), nums.begin());
a *= 10;
}
int diff = 0;
for (int i = 1; i < n; ++i) {
diff = max(diff, nums[i] - nums[i - 1]);
}
return diff;
}
};
#endif //CPP_0164__SOLUTION2_H_
|
<reponame>ebowman/calc
package calc
/*
*/
object Calc39 extends App {
for {
a <- Seq(true, false)
b <- Seq(true, false)
c <- Seq(true, false) if a || b || c
} println((a,b,c))
}
|
Program Recipe:
1. Take in the number to be tested for whether it is prime.
2. Check if the number is less than or equal to 1, if so the number is not prime and return false.
3. Iterate from 2 to the number and check if the number is divisible by any of these numbers.
4. If the number is divisible by any of the numbers, then it is not prime and return false.
5. Otherwise, the number is prime and return true. |
import java.math.BigDecimal;
import java.time.LocalDate;
public class TaxCalculator {
public Money calculateTaxRefundAmount(TaxReturnBean bean) {
if (bean.getReasonForTaxReturnPartYear() == 1) {
return bean.getTaxPayment2018();
} else if (bean.getReasonForTaxReturnPartYear() == 2) {
BigDecimal refundAmount = bean.getTaxPayment2018().getAmount().multiply(new BigDecimal("0.8"));
return Money.of(refundAmount, bean.getTaxPayment2018().getCurrency());
} else if (bean.getReasonForTaxReturnPartYear() == 3) {
BigDecimal refundAmount = bean.getTaxPayment2018().getAmount().multiply(new BigDecimal("0.5"));
return Money.of(refundAmount, bean.getTaxPayment2018().getCurrency());
} else if (bean.getReasonForTaxReturnPartYear() == 4) {
BigDecimal refundAmount = bean.getTaxPayment2018().getAmount().multiply(new BigDecimal("0.3"));
return Money.of(refundAmount, bean.getTaxPayment2018().getCurrency());
} else {
return Money.of(BigDecimal.ZERO, bean.getTaxPayment2018().getCurrency());
}
}
} |
#! /bin/bash
cd /opt/floridsword/setup
dpkg -l puppetlabs-release-pc1 > /dev/null 2>&1
if [ $? -ne 0 ]; then
wget -O /tmp/puppetlabs.deb https://apt.puppetlabs.com/puppetlabs-release-pc1-xenial.deb
sudo dpkg -i /tmp/puppetlabs.deb
rm /tmp/puppetlabs.deb
fi
dpkg -l puppet-agent > /dev/null 2>&1
if [ $? -ne 0 ]; then
sudo apt-get update
apt-get install puppet-agent -y
fi
/opt/puppetlabs/bin/puppet module install saz-dnsmasq
/opt/puppetlabs/bin/puppet module install puppetlabs-apt
/opt/puppetlabs/bin/puppet module install puppet-unattended_upgrades
if [ "$1" != "--skip-puppet" ]; then
/opt/puppetlabs/bin/puppet apply --color=no --debug --verbose \
--environmentpath /opt/floridsword/setup/environments/ \
--environment production \
/opt/floridsword/setup/environments/production/manifests/site.pp
fi
# Echo resetting rc.local if changed
sed -i 's_sh /opt/floridsword/setup/install\.sh.*$_exit 0_' /etc/rc.local
|
import React from "react"
import Typewriter from "typewriter-effect"
import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"
import { faGithub } from "@fortawesome/free-brands-svg-icons"
import { faLink } from "@fortawesome/free-solid-svg-icons"
export default function PopupTerminalWindow({
title,
popupImageAlt,
popupText,
video,
popupImageSrc,
popupGithubLink,
popupLiveLink,
techIcons,
html,
}) {
let link = ""
techIcons = techIcons?.map(icon => (
<li className="techItem tooltip" key={icon}>
<span className="tooltiptext">{`${
icon.charAt(0).toUpperCase() + icon.slice(1)
}`}</span>
<img
className="svgIcon"
src={`https://cdn.jsdelivr.net/npm/simple-icons@v5/icons/${icon}.svg`}
alt="Tech icon"
/>
</li>
))
return (
<div>
<div className="popupTerminaWindowContainer">
<h1 className="popupTerminaWindowHeader">
<Typewriter
onInit={typewriter => {
typewriter
.typeString(`${title}`)
.start()
.callFunction(function (state) {
state.elements.cursor.style.display = "none"
})
}}
/>
</h1>
{video === "false" ? (
<div className="popupTerminalWindowImageContainer">
{(link = popupLiveLink || popupGithubLink) ? ( // eslint-disable-line no-cond-assign
<a href={link} target="_blank" rel="noopener noreferrer">
<img
src={`/${popupImageSrc}`}
className="popupTerminaWindowImage"
alt={popupImageAlt}
></img>
</a>
) : (
<img
src={`/${popupImageSrc}`}
className="popupTerminaWindowImage"
alt={popupImageAlt}
></img>
)}
</div>
) : (
<div className="popupTerminalWindowImageContainer">
<video
width="100%"
height="100%"
controls
autoplay
muted
loop
playsinline
>
<source src={video} type="video/mp4" />
Your browser does not support the video tag.
</video>
</div>
)}
{popupGithubLink || popupLiveLink ? (
<div className="popupTerminalWindowLinkIcons">
{popupLiveLink ? (
<a href={popupLiveLink} target="_blank" rel="noopener noreferrer">
<FontAwesomeIcon
icon={faLink}
className="popupTerminalWindowLinkIcon"
size="2x"
/>
</a>
) : (
""
)}
{popupGithubLink ? (
<a
href={popupGithubLink}
target="_blank"
rel="noopener noreferrer"
>
<FontAwesomeIcon
icon={faGithub}
className="popupTerminalWindowLinkIcon"
size="2x"
/>
</a>
) : (
""
)}
</div>
) : (
""
)}
<div className="popupTerminaWindowTextContainer">
<div
className="popupTerminaWindowText"
dangerouslySetInnerHTML={{ __html: html }}
></div>
<p className="popupTerminaWindowText">{popupText}</p>
{techIcons ? (
<>
<div className="break"></div>
<div className="popupTerminalWindowFooter">
<h4>Tech used:</h4>
<ul className="techItemsList">{techIcons}</ul>
</div>
</>
) : (
""
)}
</div>
</div>
</div>
)
}
|
<reponame>JasonLiu798/javautil
package com.atjl.biz.flow.core;
import com.atjl.biz.flow.api.Flow;
import com.atjl.common.api.resp.ResponseDataDto;
import com.atjl.biz.flow.api.FlowRequest;
import java.lang.reflect.Method;
import java.util.Map;
//import com.sf.inv.dto.flow.FlowResponse;
/**
* span unit
* package of snippet
* 对Snippet的封装
*/
public class SpanUnit {
private Method actionMethod;
private String suid;
private int option;
private Flow snippet;
public SpanUnit() {
}
public void setRequest(FlowRequest req) {
snippet.setResquest(req);
}
public Map<String, Object> getDataContextAll() {
return snippet.getContextDataAll();
}
public void setDataContextAll(Map<String, Object> data) {
snippet.setContextDataAll(data);
}
public void setGlobal(Object param) {
snippet.setGlobal(param);
}
public ResponseDataDto getResponse() {
return snippet.getResponse();
}
/**
* ################ option checkers #####################
*/
public boolean isEnableErrorCodeContinue() {
return FlowOptionGen.isEnableErrorCodeContinue(this.option);
}
public boolean isEnableExceptionContinue() {
return FlowOptionGen.isEnableExceptionContinue(this.option);
}
public boolean isEnableChangeNextFlowId() {
return FlowOptionGen.isEnableChangeNextFlowId(this.option);
}
/**
* ################ setters & getters #####################
*/
public int getOption() {
return option;
}
public SpanUnit setOption(int option) {
this.option = option;
return this;
}
public Method getActionMethod() {
return actionMethod;
}
public void invoke() throws Exception {
actionMethod.setAccessible(true);
actionMethod.invoke(snippet);
}
public SpanUnit setActionMethod(Method actionMethod) {
this.actionMethod = actionMethod;
return this;
}
public String getSuid() {
return suid;
}
public SpanUnit setSuid(String suid) {
this.suid = suid;
return this;
}
public Flow getSnippet() {
return snippet;
}
public SpanUnit setSnippet(Flow snippet) {
this.snippet = snippet;
return this;
}
@Override
public String toString() {
return "SpanUnit{" +
"actionMethod=" + actionMethod +
", suid='" + suid + '\'' +
", option=" + option +
", snippet=" + snippet +
'}';
}
}
|
#!/usr/bin/env bash
#
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Check for circular dependencies
export LC_ALL=C
EXPECTED_CIRCULAR_DEPENDENCIES=(
"chainparamsbase -> util -> chainparamsbase"
"checkpoints -> validation -> checkpoints"
"index/txindex -> validation -> index/txindex"
"policy/fees -> txmempool -> policy/fees"
"policy/policy -> validation -> policy/policy"
"qt/addresstablemodel -> qt/walletmodel -> qt/addresstablemodel"
"qt/bantablemodel -> qt/clientmodel -> qt/bantablemodel"
"qt/nestcoingui -> qt/utilitydialog -> qt/nestcoingui"
"qt/nestcoingui -> qt/walletframe -> qt/nestcoingui"
"qt/nestcoingui -> qt/walletview -> qt/nestcoingui"
"qt/clientmodel -> qt/peertablemodel -> qt/clientmodel"
"qt/paymentserver -> qt/walletmodel -> qt/paymentserver"
"qt/recentrequeststablemodel -> qt/walletmodel -> qt/recentrequeststablemodel"
"qt/sendcoinsdialog -> qt/walletmodel -> qt/sendcoinsdialog"
"qt/transactiontablemodel -> qt/walletmodel -> qt/transactiontablemodel"
"qt/walletmodel -> qt/walletmodeltransaction -> qt/walletmodel"
"rpc/rawtransaction -> wallet/rpcwallet -> rpc/rawtransaction"
"txmempool -> validation -> txmempool"
"validation -> validationinterface -> validation"
"wallet/coincontrol -> wallet/wallet -> wallet/coincontrol"
"wallet/fees -> wallet/wallet -> wallet/fees"
"wallet/rpcwallet -> wallet/wallet -> wallet/rpcwallet"
"wallet/wallet -> wallet/walletdb -> wallet/wallet"
"policy/fees -> policy/policy -> validation -> policy/fees"
"policy/rbf -> txmempool -> validation -> policy/rbf"
"qt/addressbookpage -> qt/nestcoingui -> qt/walletview -> qt/addressbookpage"
"qt/guiutil -> qt/walletmodel -> qt/optionsmodel -> qt/guiutil"
"txmempool -> validation -> validationinterface -> txmempool"
"qt/addressbookpage -> qt/nestcoingui -> qt/walletview -> qt/receivecoinsdialog -> qt/addressbookpage"
"qt/addressbookpage -> qt/nestcoingui -> qt/walletview -> qt/signverifymessagedialog -> qt/addressbookpage"
"qt/guiutil -> qt/walletmodel -> qt/optionsmodel -> qt/intro -> qt/guiutil"
"qt/addressbookpage -> qt/nestcoingui -> qt/walletview -> qt/sendcoinsdialog -> qt/sendcoinsentry -> qt/addressbookpage"
)
EXIT_CODE=0
CIRCULAR_DEPENDENCIES=()
IFS=$'\n'
for CIRC in $(cd src && ../contrib/devtools/circular-dependencies.py {*,*/*,*/*/*}.{h,cpp} | sed -e 's/^Circular dependency: //'); do
CIRCULAR_DEPENDENCIES+=($CIRC)
IS_EXPECTED_CIRC=0
for EXPECTED_CIRC in "${EXPECTED_CIRCULAR_DEPENDENCIES[@]}"; do
if [[ "${CIRC}" == "${EXPECTED_CIRC}" ]]; then
IS_EXPECTED_CIRC=1
break
fi
done
if [[ ${IS_EXPECTED_CIRC} == 0 ]]; then
echo "A new circular dependency in the form of \"${CIRC}\" appears to have been introduced."
echo
EXIT_CODE=1
fi
done
for EXPECTED_CIRC in "${EXPECTED_CIRCULAR_DEPENDENCIES[@]}"; do
IS_PRESENT_EXPECTED_CIRC=0
for CIRC in "${CIRCULAR_DEPENDENCIES[@]}"; do
if [[ "${CIRC}" == "${EXPECTED_CIRC}" ]]; then
IS_PRESENT_EXPECTED_CIRC=1
break
fi
done
if [[ ${IS_PRESENT_EXPECTED_CIRC} == 0 ]]; then
echo "Good job! The circular dependency \"${EXPECTED_CIRC}\" is no longer present."
echo "Please remove it from EXPECTED_CIRCULAR_DEPENDENCIES in $0"
echo "to make sure this circular dependency is not accidentally reintroduced."
echo
EXIT_CODE=1
fi
done
exit ${EXIT_CODE}
|
(function(coinswap) {
coinswap.NavbarView = Backbone.View.extend({
template: _.template($('#template-navbar').html()),
tagName: 'header',
className: 'navbar navbar-default',
initialize: function() {
this.render();
this.listenTo(this.model, 'change:balance change:pending change:page', this.render);
},
render: function() {
console.log('rendering navbar')
this.$el.html(this.template(this.model.attributes));
this.$el.find('[data-toggle="tooltip"]').tooltip({
animation: false,
container: this.$el
});
}
});
})(coinswap);
|
<reponame>Emeshka/graphytica<gh_stars>1-10
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { SqPredicateComponent } from './sq-predicate.component';
describe('SqPredicateComponent', () => {
let component: SqPredicateComponent;
let fixture: ComponentFixture<SqPredicateComponent>;
beforeEach(async () => {
await TestBed.configureTestingModule({
declarations: [ SqPredicateComponent ]
})
.compileComponents();
});
beforeEach(() => {
fixture = TestBed.createComponent(SqPredicateComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
|
//
// Copyright 2021 <NAME>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#include <git_revision.hpp>
#include "age_tester_write_log.hpp"
#include <algorithm>
#include <bitset>
#include <ctime>
#include <fstream>
#include <sstream>
namespace
{
std::string log_header(const std::filesystem::path& rom_path, age::gb_device_type device_type)
{
std::time_t t = std::time(nullptr);
auto* tm = std::gmtime(&t);
std::stringstream result;
result << "--------------------------------------------------------------------------------\n"
<< " emulation logs for: " << rom_path.filename().string() << " " << age::tester::get_device_type_string(device_type) << '\n'
<< " created on: " << std::put_time(tm, "%Y-%m-%dT%H:%M:%SZ") << '\n'
<< " AGE git revision: " << GIT_REV << " (" << GIT_DATE << ")\n"
<< "--------------------------------------------------------------------------------\n"
<< '\n'
<< "T4-cycle T4-16-bit-divider category comments" << '\n'
<< "-------- ------------------- ---------- -------------------------------------\n";
return result.str();
}
std::string category_str(age::gb_log_category category)
{
switch (category)
{
case age::gb_log_category::lc_clock: return "clock";
case age::gb_log_category::lc_cpu: return "cpu";
case age::gb_log_category::lc_events: return "events";
case age::gb_log_category::lc_hdma: return "hdma";
case age::gb_log_category::lc_interrupts: return "interrupts";
case age::gb_log_category::lc_lcd: return "lcd";
case age::gb_log_category::lc_lcd_oam: return "lcd-oam";
case age::gb_log_category::lc_lcd_oam_dma: return "lcd-oam-dma";
case age::gb_log_category::lc_lcd_registers: return "lcd-reg";
case age::gb_log_category::lc_lcd_vram: return "lcd-vram";
case age::gb_log_category::lc_memory: return "memory";
case age::gb_log_category::lc_serial: return "serial";
case age::gb_log_category::lc_sound: return "sound";
case age::gb_log_category::lc_sound_registers: return "sound-reg";
case age::gb_log_category::lc_timer: return "timer";
}
return "";
}
std::string format_log_entry(const age::gb_log_entry& entry)
{
unsigned div = entry.m_div_clock;
std::bitset<4> div4{div >> 12};
std::bitset<4> div3{div >> 8};
std::bitset<4> div2{div >> 4};
std::bitset<4> div1{div};
std::stringstream prefix_str;
prefix_str << std::setw(8) << entry.m_clock << std::setw(0)
<< " " << div4 << "'" << div3 << "'" << div2 << "'" << div1
<< " " << std::left << std::setw(11) << category_str(entry.m_category) << std::setw(0) << std::right
<< " ";
auto prefix = prefix_str.str();
std::stringstream result;
auto start = 0U;
auto end = entry.m_message.find('\n');
while (end != std::string::npos)
{
auto line = entry.m_message.substr(start, end - start);
result << prefix << line << '\n';
start = end + 1;
end = entry.m_message.find('\n', start);
}
result << prefix << entry.m_message.substr(start, end) << '\n';
return result.str();
}
} // namespace
std::string age::tester::get_device_type_string(age::gb_device_type device_type)
{
switch (device_type)
{
case age::gb_device_type::auto_detect:
return "(auto-detect)";
case age::gb_device_type::dmg:
return "(dmg)";
case age::gb_device_type::cgb_abcd:
return "(cgb-a/b/c/d)";
case age::gb_device_type::cgb_e:
return "(cgb-e)";
}
return "(unknown)";
}
void age::tester::write_log(const std::filesystem::path& log_path,
const std::vector<gb_log_entry>& log_entries,
const std::filesystem::path& rom_path,
gb_device_type device_type)
{
if (log_entries.empty())
{
return;
}
std::stringstream log;
log << log_header(rom_path, device_type);
std::for_each(begin(log_entries),
end(log_entries),
[&](const auto& entry) {
log << format_log_entry(entry);
});
#ifdef AGE_COMPILE_LOGGER
std::ofstream file(log_path);
file << log.str();
#else
AGE_UNUSED(log_path);
#endif
}
|
<reponame>ciarand/drone<filename>server/datastore/migrate/helper.go
package migrate
import (
"strconv"
"strings"
"github.com/russross/meddler"
)
// transform is a helper function that transforms sql
// statements to work with multiple database types.
func transform(stmt string) string {
switch meddler.Default {
case meddler.MySQL:
stmt = strings.Replace(stmt, "AUTOINCREMENT", "AUTO_INCREMENT", -1)
case meddler.PostgreSQL:
stmt = strings.Replace(stmt, "INTEGER PRIMARY KEY AUTOINCREMENT", "SERIAL PRIMARY KEY", -1)
stmt = strings.Replace(stmt, "BLOB", "BYTEA", -1)
}
return stmt
}
// rebind is a helper function that changes the sql
// bind type from ? to $ for postgres queries.
func rebind(query string) string {
if meddler.Default != meddler.PostgreSQL {
return query
}
qb := []byte(query)
// Add space enough for 10 params before we have to allocate
rqb := make([]byte, 0, len(qb)+10)
j := 1
for _, b := range qb {
if b == '?' {
rqb = append(rqb, '$')
for _, b := range strconv.Itoa(j) {
rqb = append(rqb, byte(b))
}
j++
} else {
rqb = append(rqb, b)
}
}
return string(rqb)
}
|
<reponame>JoshEliYang/PriceTag<gh_stars>0
package cn.springmvc.service.impl;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import cn.springmvc.daoLvdi.PaymentDetailDAO;
import cn.springmvc.model.PaymentDetail;
import cn.springmvc.service.PaymentDetailService;
@Service
public class PaymentDetailServiceImpl implements PaymentDetailService {
@Autowired
private PaymentDetailDAO paymentDetailDAO;
public List<PaymentDetail> getPaymentDetail(String date) throws Exception {
return paymentDetailDAO.getPaymentDetail(date);
}
public List<PaymentDetail> getAllDetailsByOrdtime(String date) throws Exception {
return paymentDetailDAO.getAllDetailsByOrdtime(date);
}
}
|
<filename>UFO-level-1/static/js/app.js<gh_stars>0
// from data.js
var tableData = data;
// Use D3 to select the table body
var tbody = d3.select("tbody");
//initial table setup
data.forEach(function(x) {
var row = tbody.append("tr");
Object.entries(x).forEach(function([key, value]) {
// Append a cell to the row for each value
var cell = row.append("td");
cell.text(value);
});
});
// new date filter setup
var button = d3.select("#filter-btn");
button.on("click", function()
{
// Prevent the page from refreshing
d3.event.preventDefault();
//clear the initial table
tbody.text("");
//use the new date to filter
var dateInputField = d3.select("#datetime");
var dateInput = dateInputField.property("value");
console.log(dateInput);
var filteredData = tableData.filter(x => x.datetime === dateInput);
console.log(filteredData);
filteredData.forEach(function(x) {
var row = tbody.append("tr");
Object.entries(x).forEach(function([key, value]) {
// Append a cell to the row for each value
var cell = row.append("td");
cell.text(value);
});
});
}); |
<reponame>shyga362/projetoPython
salario = float(input("Digite seu salario: "))
if salario >= 1250.00:
aumento = salario + (0.1 * salario)
print("o seu salario agora é de: {}".format(aumento))
else:
aumento = salario + (0.15 * salario)
print("o seu salario agora é de: {}".format(aumento))
|
#! /usr/bin/env python3
import distutils.dir_util
import os
distutils.dir_util.copy_tree(
os.environ['PERU_MODULE_PATH'],
os.environ['PERU_SYNC_DEST'],
preserve_symlinks=True)
|
#!/bin/sh
tar -xf BenchmarkMutex-1.tar.xz
c++ -std=c++17 BenchmarkMutex.cpp -o BenchmarkMutex -lbenchmark -pthread
echo $? > ~/install-exit-status
echo "#!/bin/sh
./BenchmarkMutex \$@ > \$LOG_FILE 2>1
echo \$? > ~/test-exit-status" > mutex
chmod +x mutex
|
i = 0
z = 0
k = 0
lista = []
while i < x.length do
if x[i][0] == 'VARIEDADE'
z = 0
i += 2
k += 1
while x[i][0] != 'Nº DE' do
#print "#{x[i][0]}\n"
lista.push x[i][0]
i += 1
z += 1
end
else
if z != 0 && z != 18
i -= 1
print "Z = #{z}\n"
z.downto(1) do |e|
print "#{x[i-e][0]}\n"
end
print "#{k}\n=============\n"
z = 0
end
i += 1
end
end
# =====================================
id_list = []
ref_list = []
arquivo = IO.readlines 'entrada.csv';0
i = 0
while i < arquivo.length do
if arquivo[i] == "VARIEDADE\r\n"
arquivo[i] = "======\r\n"
i += 1
while arquivo[i] != "Nº DE\r\n" do
id_list.push arquivo[i]
arquivo[i] = ''
i += 1
end
else
i += 1
end
end;0
id_list.sort.each do |e|
puts e
end;0
first_ref = arquivo.find_index "REF\r\n"
i = first_ref
while i < arquivo.length do
if arquivo[i] == "REF\r\n"
arquivo[i] = "=======\r\n"
i += 1
while arquivo[i] != "CLASSIFICAÇÃO\r\n" do
ref_list.push arquivo[i]
arquivo[i] = ''
i += 1
end
else
i += 1
end
end;0
ref_list.sort.each do |e|
puts e
end;0
# =====================================
class Semente
@nome
@sigla
@id
@foto
@nomes_cientificos = []
@n_sementes_g
@n_dias_germinacao
@necessidade_kg_ha
@ciclo_dias_ver
@ciclo_dias_inv
@espacamento_linha_plantas
@epoca_plantio_r1
@epoca_plantio_r2
@epoca_plantio_r3
@descricao
@tamanho
attr_accessor :nome, :sigla, :id, :foto, :nomes_cientificos, :n_sementes_g, :n_dias_germinacao
attr_accessor :necessidade_kg_ha, :ciclo_dias_inv, :espacamento_linha_plantas, :epoca_plantio_r1
attr_accessor :epoca_plantio_r2, :epoca_plantio_r3, :descricao, :tamanho
def initialize(nome, sigla, id)
@nome, @sigla, @id = nome, sigla, id
@nomes_cientificos = []
@foto, @n_sementes_g, @n_dias_germinacao = ''
@ciclo_dias_inv, @ciclo_dias_ver, @espacamento_linha_plantas = ''
@epoca_plantio_r1, @epoca_plantio_r2, @epoca_plantio_r3 = ''
@descricao, @tamanho, @necessidade_kg_ha = ''
end
def to_a
["nome: #{@nome}",
"sigla: #{@sigla}",
"id: #{@id}",
"foto: #{@foto}",
"nomes_cientificos: #{@nomes_cientificos.to_s}",
"n_sementes_g: #{@n_sementes_g}",
"n_dias_germinacao: #{@n_dias_germinacao}",
"necessidade_kg_ha: #{@necessidade_kg_ha}",
"ciclo_dias_inv: #{@ciclo_dias_inv}",
"ciclo_dias_ver: #{@ciclo_dias_ver}",
"espacamento_linha_plantas: #{@espacamento_linha_plantas}",
"epoca_plantio_R1: #{@epoca_plantio_r1}",
"epoca_plantio_R2: #{@epoca_plantio_r2}",
"epoca_plantio_R3: #{@epoca_plantio_r3}",
"descricao: #{@descricao}",
"tamanho: #{@tamanho}"]
end
def to_s
"nome: #{@nome}," +
"sigla: #{@sigla}," +
"id: #{@id}," +
"foto: #{@foto}," +
"nomes_cientificos: #{@nomes_cientificos.to_s}," +
"n_sementes_g: #{@n_sementes_g}," +
"n_dias_germinacao: #{@n_dias_germinacao}," +
"necessidade_kg_ha: #{@necessidade_kg_ha}," +
"ciclo_dias_inv: #{@ciclo_dias_inv}," +
"ciclo_dias_ver: #{@ciclo_dias_ver}," +
"espacamento_linha_plantas: #{@espacamento_linha_plantas}," +
"epoca_plantio_R1: #{@epoca_plantio_r1}," +
"epoca_plantio_R2: #{@epoca_plantio_r2}," +
"epoca_plantio_R3: #{@epoca_plantio_r3}," +
"descricao: #{@descricao}," +
"tamanho: #{@tamanho}"
end
end;0
plantas = Hash.new
k = 0
id_list = id_list.map do |e|
i = 0
while i < arquivo.length && ( !arquivo[i].start_with?(e.gsub(/\r\n?/,'')) || (arquivo[i] =~ /^[^A-Za-z]+$/).nil?) do
i += 1
end
id = 0
if i != arquivo.length
unless arquivo[i].end_with?(",\"\"\r\n")
arquivo[i] = arquivo[i].gsub(/\r\n?/,'') + ",\"\"\r\n"
end
2.times do |x|
unless arquivo[i-(x+1)].start_with?('"",')
arquivo[i-(x+1)] = '"",' + arquivo[i-(x+1)]
end
unless arquivo[i+(x+1)].end_with?(",\"\"\r\n")
arquivo[i+(x+1)] = arquivo[i+(x+1)].gsub(/\r\n?/,'') + ",\"\"\r\n"
end
end
unless arquivo[i+3].end_with?(",\"\"\r\n")
arquivo[i+3] = arquivo[i+3].gsub(/\r\n?/,'') + ",\"\"\r\n"
end
id = CSV.parse(arquivo[i])[0][0].to_i
plantas[id] = Semente.new( CSV.parse(arquivo[i - 2])[0][1], CSV.parse(arquivo[i - 1])[0][1], id)
3.times do |x|
plantas[id].nomes_cientificos.push(CSV.parse(arquivo[i + (x + 1)])[0][0])
end
z = i - 2
6.times do |x|
arquivo[z + x] = ''
end
puts "\n=============================="
print plantas[id].to_s
else
k += 1
puts "not found #{e}"
end
e = id
end;0
print "Not found ID's = #{k} \n"
class Arvore
@nome
@id
@foto
@nomes_cientificos
@classificacao
@bioma
@regiao_de_origem
@caracteristicas
attr_accessor :nome, :id, :foto, :nomes_cientificos
attr_accessor :classificacao, :bioma, :regiao_de_origem, :caracteristicas
def initialize(nome, id)
@nome, @id = nome, id
@nomes_cientificos = []
@foto, @classificacao, @bioma, @regiao_de_origem = ''
@caracteristicas = ''
end
def to_a
["nome: #{@nome}",
"id: #{@id}",
"foto: #{@foto}",
"nomes_cientificos: #{@nomes_cientificos.to_s}",
"classificacao: #{@classificacao}",
"bioma: #{@bioma}",
"regiao_de_origem: #{@regiao_de_origem}",
"caracteristicas: #{@caracteristicas}"]
end
def to_s
"nome: #{@nome}, " +
"id: #{@id}, " +
"foto: #{@foto}, " +
"nomes_cientificos: #{@nomes_cientificos.to_s}, " +
"classificacao: #{@classificacao}, " +
"bioma: #{@bioma}, " +
"regiao_de_origem: #{@regiao_de_origem}, " +
"caracteristicas: #{@caracteristicas}"
end
end;0
first_ref -= 13
arvores = Hash.new
k = 0
ref_list = ref_list.map do |e|
i = first_ref
id = 0
while i < arquivo.length && ( !arquivo[i].start_with?(e.gsub(/\r\n?/,'')) || (arquivo[i] =~ /^[^A-Za-z]+$/).nil?) do
i += 1
end
if i != arquivo.length
unless arquivo[i].end_with?(",\"\"\r\n")
arquivo[i] = arquivo[i].gsub(/\r\n?/,'') + ",\"\"\r\n"
end
unless arquivo[i-1].start_with?('"",')
arquivo[i-1] = '"",' + arquivo[i-(x+1)]
end
id = CSV.parse(arquivo[i])[0][0].to_i
nome = CSV.parse(arquivo[i - 1])[0][1]
arvores[id] = Arvore.new(nome, id)
x = 1
while x + i < arquivo.length && !arquivo[i].empty? && !arquivo[i+x].start_with?('"",') && arquivo[i + x] != "=======\r\n" do
unless arquivo[i+x].end_with?(",\"\"\r\n")
arquivo[i+x] = arquivo[i+x].gsub(/\r\n?/,'') + ",\"\"\r\n"
end
arvores[id].nomes_cientificos.push CSV.parse(arquivo[i + x])[0][0]
arquivo[i + x] = ''
x += 1
end
arquivo[i] = ''
arquivo[i - 1] = ''
puts "\n=============================="
print arvores[id].to_s
else
k += 1
puts "not found #{e}"
end
e = id
end;0
print "Not found ID's = #{k} \n"
# =====================================
arquivo[18] = ''
arquivo_pronto = open('dados2.csv', 'w')
arquivo_pronto.truncate(0)
arquivo.each do |e|
arquivo_pronto.write e
end;0
arquivo_pronto.close
# =====================================
def remove_acentos(str)
str.tr('ÀÁÂÃÄÅàáâãäåĀāĂ㥹ÇçĆćĈĉĊċČčÐðĎďĐđÈÉÊËèéêëĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħÌÍÎÏìíîïĨĩĪīĬĭĮįİıĴĵĶķĸĹĺĻļĽľĿŀŁł' +
'ÑñŃńŅņŇňʼnŊŋÒÓÔÕÖØòóôõöøŌōŎŏŐőŔŕŖŗŘřŚśŜŝŞşŠšſŢţŤťŦŧÙÚÛÜùúûüŨũŪūŬŭŮůŰűŲųŴŵÝýÿŶŷŸŹźŻżŽž',
'AAAAAAaaaaaaAaAaAaCcCcCcCcCcDdDdDdEEEEeeeeEeEeEeEeEeGgGgGgGgHhHhIIIIiiiiIiIiIiIiIiJjKkkLlLlLlLlLl' +
'NnNnNnNnnNnOOOOOOooooooOoOoOoRrRrRrSsSsSsSssTtTtTtUUUUuuuuUuUuUuUuUuUuWwYyyYyYZzZzZz') unless str.nil?
end
# ======================================
require 'http'
require 'nokogiri'
base_link = 'http://www.plantei.com.br'
base_uri = '/loja/catalogo.php?categoria=4&marca=marca_isla&page='
id_baixar = []
1..6.times do |x|
Nokogiri::HTML(HTTP.get(base_link + base_uri + x.to_s).to_s)
.css('#Vitrine')
.css('.produto-imagem').each do |e|
id_baixar.push(e[:href])
end
end;0
class InfoPlantas
@descr
@img_demo
@title
attr_accessor :descr, :img_demo, :title
def initialize(descr, img_demo, title)
@descr, @img_demo, @title = descr, img_demo, title
end
end;0
infop = []
id_baixar.each do |e|
x = Nokogiri::HTML(HTTP.get(base_link + e).to_s)
descr = x.css('#descricao')[0]
title = x.css('.NomeProduto')[0].children.to_s.encode("UTF-8")
img_demo = x.css('#imgView')[0][:src]
infop.push(InfoPlantas.new(descr, img_demo, title))
end;0
require 'active_support/core_ext/string'
encontrados = []
infop.each_with_index do |e, j|
index_f = nil
plantas.each do |val|
if e.title.mb_chars.downcase.to_s.include?(val[1].nome.nil? ? '' : val[1].nome.mb_chars.downcase.to_s.gsub('/', ' ')) &&
e.title.mb_chars.downcase.to_s.include?(val[1].sigla.nil? ? '' : val[1].sigla.mb_chars.downcase.to_s.gsub('/', ' '))
index_f = val[0]
puts( verde + e.title + ' ==== ' + (val[1].nome.nil? ? '' : val[1].nome) + ' ' +
(val[1].sigla.nil? ? '' : val[1].sigla) + end_cor)
break
end
end
puts azul + j.to_s + ' ' + vermelho + e.title + end_cor if index_f.nil?
encontrados.push([index_f, j]) unless index_f.nil?
end;0
puts azul + j.to_s + end_cor
# MISC
normal = "\e[0m"
bold = "\e[1m"
verde = "\e[32m"
vermelho = "\e[31m"
azul = "\e[34m"
roxo = "\e[35m"
end_cor = "\e[0m"
# ======================================
dados = CSV.read 'entrada.csv';0
description_even = CSV.read 'tabula-catalogo-plantas-descricao.csv';0
i = 0
j = 0
k = 0
while i < dados.length
# noinspection RubyScope
if dados[i][0] == 'CARACTERÍSTICAS/DIFERENCIAIS'
i += 1
j += 1
x = ''
y = ''
while i < dados.length && dados[i][0] != 'ÉPOCA DE'
while i < dados.length && j < description_even.length && dados[i][0] == description_even[j][0]
x += dados[i][0]
i += 1
j += 1
end
#puts "=== CARACTERÍSTICAS IMPAR === \n" + x
if x == ''
puts "\e[31m LADO 1/#{i+1} => #{dados[i][0]} | LADO 2/#{j+1} => #{description_even[j][0]} \e[0m"
end
if description_even[j].nil?
while i < dados.length && dados[i][0] != 'ÉPOCA DE'
y += dados[i][0]
i += 1
end
else
while i < dados.length && dados[i][0] != description_even[j][0] && dados[i][0] != 'ÉPOCA DE'
y += dados[i][0]
i += 1
end
end
#puts "=== CARACTERÍSTICAS PAR === \n" + y
#gets
x = ''
y = ''
k += 2
end
else
i += 1
end
end
# ======================================
|
def getMinCoins(total):
coins = [1, 5, 10, 20, 50, 100, 500]
num_coins = 0
ind = len(coins) - 1
while total:
num_coins = num_coins + int(total/coins[ind])
total = int(total%coins[ind])
ind = ind - 1
return num_coins |
while true; do
bash send_jobs.sh
sleep 10
done
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/FBAllocationTracker/FBAllocationTracker.framework"
install_framework "$BUILT_PRODUCTS_DIR/FBMemoryProfiler/FBMemoryProfiler.framework"
install_framework "$BUILT_PRODUCTS_DIR/FBRetainCycleDetector/FBRetainCycleDetector.framework"
install_framework "$BUILT_PRODUCTS_DIR/MJExtension/MJExtension.framework"
install_framework "$BUILT_PRODUCTS_DIR/ReactiveCocoa/ReactiveCocoa.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/FBAllocationTracker/FBAllocationTracker.framework"
install_framework "$BUILT_PRODUCTS_DIR/FBMemoryProfiler/FBMemoryProfiler.framework"
install_framework "$BUILT_PRODUCTS_DIR/FBRetainCycleDetector/FBRetainCycleDetector.framework"
install_framework "$BUILT_PRODUCTS_DIR/MJExtension/MJExtension.framework"
install_framework "$BUILT_PRODUCTS_DIR/ReactiveCocoa/ReactiveCocoa.framework"
fi
|
<reponame>gregorgrisza/eir
#include <sstream>
#include <algorithm>
#include "Preprocessor.hpp"
void Preprocessor::normalize(std::string& sentence) {
std::transform(sentence.begin(), sentence.end(), sentence.begin(),
[](unsigned char c){ return std::tolower(c); });
}
void Preprocessor::removeStopWords(std::string& sentence) {
std::replace(sentence.begin(), sentence.end(), ',', ' ');
}
std::set<std::string> Preprocessor::tokenize( std::string sentence ) {
std::set<std::string> result;
std::istringstream iss(sentence);
for (std::string s; iss >> s; )
result.insert(s);
return result;
}
void Preprocessor::sanitize(std::string & token) {
token.erase(
std::remove_if(
token.begin(),
token.end(),
[](auto ch) {
return ::isdigit(ch) || ::ispunct(ch);
}
), token.end());
}
std::set<std::string> Preprocessor::sanitize( std::set<std::string> const& sentence ) {
std::set<std::string> result;
for (std::string token : sentence)
{
Preprocessor::sanitize(token);
if (token.length() > 0 ) {
result.insert(token);
}
}
return result;
} |
var firstWord = (function($) {
var selector = '.first-word';
$(selector).each(addWrap);
function addWrap() {
var word = $(this).html();
var index = word.indexOf(' ');
if (index == -1)
index = word.length;
$(this).html('<span class="first">' + word.substring(0, index) + '</span>' + word.substring(index, word.length));
}
})(jQuery)
|
def find_pair_with_sum(arr, target):
indices = []
found_pair = False
for i in range(len(arr)-1):
for j in range(i+1, len(arr)):
if arr[i] + arr[j] == target:
indices.append((i,j))
found_pair = True
return indices, found_pair
if __name__ == '__main__':
arr = [1, 2, 5, 9, 10]
target = 13
print(find_pair_with_sum(arr, target)) |
cargo doc --no-deps
rm -rf ./docs
echo "<meta http-equiv=\"refresh\" content=\"0; url=jadis\">" > target/doc/index.html
echo "jadis.tahar.dev" > target/doc/CNAME
cp -r target/doc ./docs
|
#!/usr/bin/env bash
# Check env vars are set
if [[ -z "${ADMIN_USER}" ]]; then echo "ADMIN_USER is not set" && exit 1; fi;
if [[ -z "${ADMIN_PASSWORD}" ]]; then echo "ADMIN_PASSWORD is not set" && exit 1; fi;
if [[ -z "${PUSHBULLET_API_KEY}" ]]; then echo "PUSHBULLET_API_KEY is not set" && exit 1; fi;
if [[ -z "${ALERTMANAGER_SMTP_HOST}" ]]; then echo "ALERTMANAGER_SMTP_HOST is not set" && exit 1; fi;
if [[ -z "${ALERTMANAGER_SMTP_USERNAME}" ]]; then echo "ALERTMANAGER_SMTP_USERNAME is not set" && exit 1; fi;
if [[ -z "${ALERTMANAGER_SMTP_PASSWORD}" ]]; then echo "ALERTMANAGER_SMTP_PASSWORD is not set" && exit 1; fi;
# Update alertmanager YAML with environment values
envsubst < ./alertmanager/config-template.yml > ./alertmanager/config.yml
# Docker compose
docker-compose up -d --force-recreate
|
babel apollo -d dist/app/apollo --presets env,stage-2
babel lib -d dist/app/lib --presets env,stage-2
babel plugins -d dist/app/plugins --presets env,stage-2
babel store -d dist/app/store --presets env,stage-2
babel server.js -d dist/app --presets env,stage-2
babel nuxt.config.js -d dist/app --presets env,stage-2
cp -r fixtures dist/app
cp .babelrc dist/appnpm
cp --parents apollo/**/*.gql dist/app
|
#!/usr/bin/env bash
source "../config.sh"
curl -X POST "https://rest.nexmo.com/account/settings?api_key=$NEXMO_API_KEY&api_secret=$NEXMO_API_SECRET" \
-d moCallBackUrl=$SMS_CALLBACK_URL
|
// modules are defined as an array
// [ module function, map of requires ]
//
// map of requires is short require name -> numeric require
//
// anything defined in a previous bundle is accessed via the
// orig method which is the require for previous bundles
parcelRequire = (function (modules, cache, entry, globalName) {
// Save the require from previous bundle to this closure if any
var previousRequire = typeof parcelRequire === 'function' && parcelRequire;
var nodeRequire = typeof require === 'function' && require;
function newRequire(name, jumped) {
if (!cache[name]) {
if (!modules[name]) {
// if we cannot find the module within our internal map or
// cache jump to the current global require ie. the last bundle
// that was added to the page.
var currentRequire = typeof parcelRequire === 'function' && parcelRequire;
if (!jumped && currentRequire) {
return currentRequire(name, true);
}
// If there are other bundles on this page the require from the
// previous one is saved to 'previousRequire'. Repeat this as
// many times as there are bundles until the module is found or
// we exhaust the require chain.
if (previousRequire) {
return previousRequire(name, true);
}
// Try the node require function if it exists.
if (nodeRequire && typeof name === 'string') {
return nodeRequire(name);
}
var err = new Error('Cannot find module \'' + name + '\'');
err.code = 'MODULE_NOT_FOUND';
throw err;
}
localRequire.resolve = resolve;
localRequire.cache = {};
var module = cache[name] = new newRequire.Module(name);
modules[name][0].call(module.exports, localRequire, module, module.exports, this);
}
return cache[name].exports;
function localRequire(x){
return newRequire(localRequire.resolve(x));
}
function resolve(x){
return modules[name][1][x] || x;
}
}
function Module(moduleName) {
this.id = moduleName;
this.bundle = newRequire;
this.exports = {};
}
newRequire.isParcelRequire = true;
newRequire.Module = Module;
newRequire.modules = modules;
newRequire.cache = cache;
newRequire.parent = previousRequire;
newRequire.register = function (id, exports) {
modules[id] = [function (require, module) {
module.exports = exports;
}, {}];
};
var error;
for (var i = 0; i < entry.length; i++) {
try {
newRequire(entry[i]);
} catch (e) {
// Save first error but execute all entries
if (!error) {
error = e;
}
}
}
if (entry.length) {
// Expose entry point to Node, AMD or browser globals
// Based on https://github.com/ForbesLindesay/umd/blob/master/template.js
var mainExports = newRequire(entry[entry.length - 1]);
// CommonJS
if (typeof exports === "object" && typeof module !== "undefined") {
module.exports = mainExports;
// RequireJS
} else if (typeof define === "function" && define.amd) {
define(function () {
return mainExports;
});
// <script>
} else if (globalName) {
this[globalName] = mainExports;
}
}
// Override the current require with this new one
parcelRequire = newRequire;
if (error) {
// throw error from earlier, _after updating parcelRequire_
throw error;
}
return newRequire;
})({"IfYP":[function(require,module,exports) {
/**
* Helpers.
*/
var s = 1000;
var m = s * 60;
var h = m * 60;
var d = h * 24;
var w = d * 7;
var y = d * 365.25;
/**
* Parse or format the given `val`.
*
* Options:
*
* - `long` verbose formatting [false]
*
* @param {String|Number} val
* @param {Object} [options]
* @throws {Error} throw an error if val is not a non-empty string or a number
* @return {String|Number}
* @api public
*/
module.exports = function(val, options) {
options = options || {};
var type = typeof val;
if (type === 'string' && val.length > 0) {
return parse(val);
} else if (type === 'number' && isFinite(val)) {
return options.long ? fmtLong(val) : fmtShort(val);
}
throw new Error(
'val is not a non-empty string or a valid number. val=' +
JSON.stringify(val)
);
};
/**
* Parse the given `str` and return milliseconds.
*
* @param {String} str
* @return {Number}
* @api private
*/
function parse(str) {
str = String(str);
if (str.length > 100) {
return;
}
var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(
str
);
if (!match) {
return;
}
var n = parseFloat(match[1]);
var type = (match[2] || 'ms').toLowerCase();
switch (type) {
case 'years':
case 'year':
case 'yrs':
case 'yr':
case 'y':
return n * y;
case 'weeks':
case 'week':
case 'w':
return n * w;
case 'days':
case 'day':
case 'd':
return n * d;
case 'hours':
case 'hour':
case 'hrs':
case 'hr':
case 'h':
return n * h;
case 'minutes':
case 'minute':
case 'mins':
case 'min':
case 'm':
return n * m;
case 'seconds':
case 'second':
case 'secs':
case 'sec':
case 's':
return n * s;
case 'milliseconds':
case 'millisecond':
case 'msecs':
case 'msec':
case 'ms':
return n;
default:
return undefined;
}
}
/**
* Short format for `ms`.
*
* @param {Number} ms
* @return {String}
* @api private
*/
function fmtShort(ms) {
var msAbs = Math.abs(ms);
if (msAbs >= d) {
return Math.round(ms / d) + 'd';
}
if (msAbs >= h) {
return Math.round(ms / h) + 'h';
}
if (msAbs >= m) {
return Math.round(ms / m) + 'm';
}
if (msAbs >= s) {
return Math.round(ms / s) + 's';
}
return ms + 'ms';
}
/**
* Long format for `ms`.
*
* @param {Number} ms
* @return {String}
* @api private
*/
function fmtLong(ms) {
var msAbs = Math.abs(ms);
if (msAbs >= d) {
return plural(ms, msAbs, d, 'day');
}
if (msAbs >= h) {
return plural(ms, msAbs, h, 'hour');
}
if (msAbs >= m) {
return plural(ms, msAbs, m, 'minute');
}
if (msAbs >= s) {
return plural(ms, msAbs, s, 'second');
}
return ms + ' ms';
}
/**
* Pluralization helper.
*/
function plural(ms, msAbs, n, name) {
var isPlural = msAbs >= n * 1.5;
return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : '');
}
},{}],"MTTc":[function(require,module,exports) {
/**
* This is the common logic for both the Node.js and web browser
* implementations of `debug()`.
*/
function setup(env) {
createDebug.debug = createDebug;
createDebug.default = createDebug;
createDebug.coerce = coerce;
createDebug.disable = disable;
createDebug.enable = enable;
createDebug.enabled = enabled;
createDebug.humanize = require('ms');
Object.keys(env).forEach(key => {
createDebug[key] = env[key];
});
/**
* Active `debug` instances.
*/
createDebug.instances = [];
/**
* The currently active debug mode names, and names to skip.
*/
createDebug.names = [];
createDebug.skips = [];
/**
* Map of special "%n" handling functions, for the debug "format" argument.
*
* Valid key names are a single, lower or upper-case letter, i.e. "n" and "N".
*/
createDebug.formatters = {};
/**
* Selects a color for a debug namespace
* @param {String} namespace The namespace string for the for the debug instance to be colored
* @return {Number|String} An ANSI color code for the given namespace
* @api private
*/
function selectColor(namespace) {
let hash = 0;
for (let i = 0; i < namespace.length; i++) {
hash = ((hash << 5) - hash) + namespace.charCodeAt(i);
hash |= 0; // Convert to 32bit integer
}
return createDebug.colors[Math.abs(hash) % createDebug.colors.length];
}
createDebug.selectColor = selectColor;
/**
* Create a debugger with the given `namespace`.
*
* @param {String} namespace
* @return {Function}
* @api public
*/
function createDebug(namespace) {
let prevTime;
function debug(...args) {
// Disabled?
if (!debug.enabled) {
return;
}
const self = debug;
// Set `diff` timestamp
const curr = Number(new Date());
const ms = curr - (prevTime || curr);
self.diff = ms;
self.prev = prevTime;
self.curr = curr;
prevTime = curr;
args[0] = createDebug.coerce(args[0]);
if (typeof args[0] !== 'string') {
// Anything else let's inspect with %O
args.unshift('%O');
}
// Apply any `formatters` transformations
let index = 0;
args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => {
// If we encounter an escaped % then don't increase the array index
if (match === '%%') {
return match;
}
index++;
const formatter = createDebug.formatters[format];
if (typeof formatter === 'function') {
const val = args[index];
match = formatter.call(self, val);
// Now we need to remove `args[index]` since it's inlined in the `format`
args.splice(index, 1);
index--;
}
return match;
});
// Apply env-specific formatting (colors, etc.)
createDebug.formatArgs.call(self, args);
const logFn = self.log || createDebug.log;
logFn.apply(self, args);
}
debug.namespace = namespace;
debug.enabled = createDebug.enabled(namespace);
debug.useColors = createDebug.useColors();
debug.color = selectColor(namespace);
debug.destroy = destroy;
debug.extend = extend;
// Debug.formatArgs = formatArgs;
// debug.rawLog = rawLog;
// env-specific initialization logic for debug instances
if (typeof createDebug.init === 'function') {
createDebug.init(debug);
}
createDebug.instances.push(debug);
return debug;
}
function destroy() {
const index = createDebug.instances.indexOf(this);
if (index !== -1) {
createDebug.instances.splice(index, 1);
return true;
}
return false;
}
function extend(namespace, delimiter) {
const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace);
newDebug.log = this.log;
return newDebug;
}
/**
* Enables a debug mode by namespaces. This can include modes
* separated by a colon and wildcards.
*
* @param {String} namespaces
* @api public
*/
function enable(namespaces) {
createDebug.save(namespaces);
createDebug.names = [];
createDebug.skips = [];
let i;
const split = (typeof namespaces === 'string' ? namespaces : '').split(/[\s,]+/);
const len = split.length;
for (i = 0; i < len; i++) {
if (!split[i]) {
// ignore empty strings
continue;
}
namespaces = split[i].replace(/\*/g, '.*?');
if (namespaces[0] === '-') {
createDebug.skips.push(new RegExp('^' + namespaces.substr(1) + '$'));
} else {
createDebug.names.push(new RegExp('^' + namespaces + '$'));
}
}
for (i = 0; i < createDebug.instances.length; i++) {
const instance = createDebug.instances[i];
instance.enabled = createDebug.enabled(instance.namespace);
}
}
/**
* Disable debug output.
*
* @return {String} namespaces
* @api public
*/
function disable() {
const namespaces = [
...createDebug.names.map(toNamespace),
...createDebug.skips.map(toNamespace).map(namespace => '-' + namespace)
].join(',');
createDebug.enable('');
return namespaces;
}
/**
* Returns true if the given mode name is enabled, false otherwise.
*
* @param {String} name
* @return {Boolean}
* @api public
*/
function enabled(name) {
if (name[name.length - 1] === '*') {
return true;
}
let i;
let len;
for (i = 0, len = createDebug.skips.length; i < len; i++) {
if (createDebug.skips[i].test(name)) {
return false;
}
}
for (i = 0, len = createDebug.names.length; i < len; i++) {
if (createDebug.names[i].test(name)) {
return true;
}
}
return false;
}
/**
* Convert regexp to namespace
*
* @param {RegExp} regxep
* @return {String} namespace
* @api private
*/
function toNamespace(regexp) {
return regexp.toString()
.substring(2, regexp.toString().length - 2)
.replace(/\.\*\?$/, '*');
}
/**
* Coerce `val`.
*
* @param {Mixed} val
* @return {Mixed}
* @api private
*/
function coerce(val) {
if (val instanceof Error) {
return val.stack || val.message;
}
return val;
}
createDebug.enable(createDebug.load());
return createDebug;
}
module.exports = setup;
},{"ms":"IfYP"}],"g5IB":[function(require,module,exports) {
// shim for using process in browser
var process = module.exports = {}; // cached from whatever global is present so that test runners that stub it
// don't break things. But we need to wrap it in a try catch in case it is
// wrapped in strict mode code which doesn't define any globals. It's inside a
// function because try/catches deoptimize in certain engines.
var cachedSetTimeout;
var cachedClearTimeout;
function defaultSetTimout() {
throw new Error('setTimeout has not been defined');
}
function defaultClearTimeout() {
throw new Error('clearTimeout has not been defined');
}
(function () {
try {
if (typeof setTimeout === 'function') {
cachedSetTimeout = setTimeout;
} else {
cachedSetTimeout = defaultSetTimout;
}
} catch (e) {
cachedSetTimeout = defaultSetTimout;
}
try {
if (typeof clearTimeout === 'function') {
cachedClearTimeout = clearTimeout;
} else {
cachedClearTimeout = defaultClearTimeout;
}
} catch (e) {
cachedClearTimeout = defaultClearTimeout;
}
})();
function runTimeout(fun) {
if (cachedSetTimeout === setTimeout) {
//normal enviroments in sane situations
return setTimeout(fun, 0);
} // if setTimeout wasn't available but was latter defined
if ((cachedSetTimeout === defaultSetTimout || !cachedSetTimeout) && setTimeout) {
cachedSetTimeout = setTimeout;
return setTimeout(fun, 0);
}
try {
// when when somebody has screwed with setTimeout but no I.E. maddness
return cachedSetTimeout(fun, 0);
} catch (e) {
try {
// When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally
return cachedSetTimeout.call(null, fun, 0);
} catch (e) {
// same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error
return cachedSetTimeout.call(this, fun, 0);
}
}
}
function runClearTimeout(marker) {
if (cachedClearTimeout === clearTimeout) {
//normal enviroments in sane situations
return clearTimeout(marker);
} // if clearTimeout wasn't available but was latter defined
if ((cachedClearTimeout === defaultClearTimeout || !cachedClearTimeout) && clearTimeout) {
cachedClearTimeout = clearTimeout;
return clearTimeout(marker);
}
try {
// when when somebody has screwed with setTimeout but no I.E. maddness
return cachedClearTimeout(marker);
} catch (e) {
try {
// When we are in I.E. but the script has been evaled so I.E. doesn't trust the global object when called normally
return cachedClearTimeout.call(null, marker);
} catch (e) {
// same as above but when it's a version of I.E. that must have the global object for 'this', hopfully our context correct otherwise it will throw a global error.
// Some versions of I.E. have different rules for clearTimeout vs setTimeout
return cachedClearTimeout.call(this, marker);
}
}
}
var queue = [];
var draining = false;
var currentQueue;
var queueIndex = -1;
function cleanUpNextTick() {
if (!draining || !currentQueue) {
return;
}
draining = false;
if (currentQueue.length) {
queue = currentQueue.concat(queue);
} else {
queueIndex = -1;
}
if (queue.length) {
drainQueue();
}
}
function drainQueue() {
if (draining) {
return;
}
var timeout = runTimeout(cleanUpNextTick);
draining = true;
var len = queue.length;
while (len) {
currentQueue = queue;
queue = [];
while (++queueIndex < len) {
if (currentQueue) {
currentQueue[queueIndex].run();
}
}
queueIndex = -1;
len = queue.length;
}
currentQueue = null;
draining = false;
runClearTimeout(timeout);
}
process.nextTick = function (fun) {
var args = new Array(arguments.length - 1);
if (arguments.length > 1) {
for (var i = 1; i < arguments.length; i++) {
args[i - 1] = arguments[i];
}
}
queue.push(new Item(fun, args));
if (queue.length === 1 && !draining) {
runTimeout(drainQueue);
}
}; // v8 likes predictible objects
function Item(fun, array) {
this.fun = fun;
this.array = array;
}
Item.prototype.run = function () {
this.fun.apply(null, this.array);
};
process.title = 'browser';
process.env = {};
process.argv = [];
process.version = ''; // empty string to avoid regexp issues
process.versions = {};
function noop() {}
process.on = noop;
process.addListener = noop;
process.once = noop;
process.off = noop;
process.removeListener = noop;
process.removeAllListeners = noop;
process.emit = noop;
process.prependListener = noop;
process.prependOnceListener = noop;
process.listeners = function (name) {
return [];
};
process.binding = function (name) {
throw new Error('process.binding is not supported');
};
process.cwd = function () {
return '/';
};
process.chdir = function (dir) {
throw new Error('process.chdir is not supported');
};
process.umask = function () {
return 0;
};
},{}],"jD9Y":[function(require,module,exports) {
var process = require("process");
/* eslint-env browser */
/**
* This is the web browser implementation of `debug()`.
*/
exports.log = log;
exports.formatArgs = formatArgs;
exports.save = save;
exports.load = load;
exports.useColors = useColors;
exports.storage = localstorage();
/**
* Colors.
*/
exports.colors = ['#0000CC', '#0000FF', '#0033CC', '#0033FF', '#0066CC', '#0066FF', '#0099CC', '#0099FF', '#00CC00', '#00CC33', '#00CC66', '#00CC99', '#00CCCC', '#00CCFF', '#3300CC', '#3300FF', '#3333CC', '#3333FF', '#3366CC', '#3366FF', '#3399CC', '#3399FF', '#33CC00', '#33CC33', '#33CC66', '#33CC99', '#33CCCC', '#33CCFF', '#6600CC', '#6600FF', '#6633CC', '#6633FF', '#66CC00', '#66CC33', '#9900CC', '#9900FF', '#9933CC', '#9933FF', '#99CC00', '#99CC33', '#CC0000', '#CC0033', '#CC0066', '#CC0099', '#CC00CC', '#CC00FF', '#CC3300', '#CC3333', '#CC3366', '#CC3399', '#CC33CC', '#CC33FF', '#CC6600', '#CC6633', '#CC9900', '#CC9933', '#CCCC00', '#CCCC33', '#FF0000', '#FF0033', '#FF0066', '#FF0099', '#FF00CC', '#FF00FF', '#FF3300', '#FF3333', '#FF3366', '#FF3399', '#FF33CC', '#FF33FF', '#FF6600', '#FF6633', '#FF9900', '#FF9933', '#FFCC00', '#FFCC33'];
/**
* Currently only WebKit-based Web Inspectors, Firefox >= v31,
* and the Firebug extension (any Firefox version) are known
* to support "%c" CSS customizations.
*
* TODO: add a `localStorage` variable to explicitly enable/disable colors
*/
// eslint-disable-next-line complexity
function useColors() {
// NB: In an Electron preload script, document will be defined but not fully
// initialized. Since we know we're in Chrome, we'll just detect this case
// explicitly
if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) {
return true;
} // Internet Explorer and Edge do not support colors.
if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) {
return false;
} // Is webkit? http://stackoverflow.com/a/16459606/376773
// document is undefined in react-native: https://github.com/facebook/react-native/pull/1632
return typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance || // Is firebug? http://stackoverflow.com/a/398120/376773
typeof window !== 'undefined' && window.console && (window.console.firebug || window.console.exception && window.console.table) || // Is firefox >= v31?
// https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages
typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/) && parseInt(RegExp.$1, 10) >= 31 || // Double check webkit in userAgent just in case we are in a worker
typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/);
}
/**
* Colorize log arguments if enabled.
*
* @api public
*/
function formatArgs(args) {
args[0] = (this.useColors ? '%c' : '') + this.namespace + (this.useColors ? ' %c' : ' ') + args[0] + (this.useColors ? '%c ' : ' ') + '+' + module.exports.humanize(this.diff);
if (!this.useColors) {
return;
}
const c = 'color: ' + this.color;
args.splice(1, 0, c, 'color: inherit'); // The final "%c" is somewhat tricky, because there could be other
// arguments passed either before or after the %c, so we need to
// figure out the correct index to insert the CSS into
let index = 0;
let lastC = 0;
args[0].replace(/%[a-zA-Z%]/g, match => {
if (match === '%%') {
return;
}
index++;
if (match === '%c') {
// We only are interested in the *last* %c
// (the user may have provided their own)
lastC = index;
}
});
args.splice(lastC, 0, c);
}
/**
* Invokes `console.log()` when available.
* No-op when `console.log` is not a "function".
*
* @api public
*/
function log(...args) {
// This hackery is required for IE8/9, where
// the `console.log` function doesn't have 'apply'
return typeof console === 'object' && console.log && console.log(...args);
}
/**
* Save `namespaces`.
*
* @param {String} namespaces
* @api private
*/
function save(namespaces) {
try {
if (namespaces) {
exports.storage.setItem('debug', namespaces);
} else {
exports.storage.removeItem('debug');
}
} catch (error) {// Swallow
// XXX (@Qix-) should we be logging these?
}
}
/**
* Load `namespaces`.
*
* @return {String} returns the previously persisted debug modes
* @api private
*/
function load() {
let r;
try {
r = exports.storage.getItem('debug');
} catch (error) {} // Swallow
// XXX (@Qix-) should we be logging these?
// If debug isn't set in LS, and we're in Electron, try to load $DEBUG
if (!r && typeof process !== 'undefined' && 'env' in process) {
r = undefined;
}
return r;
}
/**
* Localstorage attempts to return the localstorage.
*
* This is necessary because safari throws
* when a user disables cookies/localstorage
* and you attempt to access it.
*
* @return {LocalStorage}
* @api private
*/
function localstorage() {
try {
// TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context
// The Browser also has localStorage in the global context.
return localStorage;
} catch (error) {// Swallow
// XXX (@Qix-) should we be logging these?
}
}
module.exports = require('./common')(exports);
const {
formatters
} = module.exports;
/**
* Map %j to `JSON.stringify()`, since no Web Inspectors do that by default.
*/
formatters.j = function (v) {
try {
return JSON.stringify(v);
} catch (error) {
return '[UnexpectedJSONParseError]: ' + error.message;
}
};
},{"./common":"MTTc","process":"g5IB"}],"bEsR":[function(require,module,exports) {
"use strict";
const debug = require("debug")("GdfsEvent");
/**
* Event class
* @constructor
* @param {HTMLElement} target An element that dispatch
* @param {string} eventName An event name
*/
function GdfsEvent(target, eventName) {
this._target = target;
this._eventName = eventName;
}
/**
* Listen this event.
* @param {Function} handler An event handler
* @returns {undefined}
*/
GdfsEvent.prototype.listen = function (handler) {
debug(`GdfsEvent.listen: ${this._eventName}=>${handler.constructor.name}`);
if (handler.constructor.name === "AsyncFunction") {
this._target.addEventListener(this._eventName, async event => await handler(event));
} else {
this._target.addEventListener(this._eventName, handler);
}
};
/**
* Fire this event.
* @param {Function} handler An event handler
* @returns {undefined}
*/
GdfsEvent.prototype.fire = function (extraData = {}) {
const event = new Event(this._eventName);
for (const key of Object.keys(extraData)) {
event[key] = extraData[key];
}
debug(`GdfsEvent.fire: ${this._eventName}`, `extraData: ${JSON.stringify(extraData)}`);
this._target.dispatchEvent(event);
};
module.exports = GdfsEvent;
},{"debug":"jD9Y"}],"B8ln":[function(require,module,exports) {
"use strict";
const debug = require("debug")("GdfsPath");
debug("loading");
/**
* Gdfs Path class.
* @constructor
* @param {string|undefined} pathname initial path.
*/
function GdfsPath(pathname) {
this._lastSlash = true;
this._absolute = true;
this._paths = [];
if (pathname != undefined) {
this.parse(pathname);
}
}
/**
* Get a part of path.
* @returns {GdfsPath} A path object including only path.
*/
GdfsPath.prototype.getPathPart = function () {
if (this._lastSlash) {
return new GdfsPath(this.toString());
}
const paths = this.elements();
paths.splice(-1, 1, "");
debug(`getPathPart: paths: ${JSON.stringify(paths)}`);
return new GdfsPath(paths.join("/"));
};
/**
* Get filename part of path.
* @returns {string} A filename.
*/
GdfsPath.prototype.getFilename = function () {
return this.elements().pop();
};
/**
* Get paths elements.
* @returns {Array<string>} the elements.
*/
GdfsPath.prototype.elements = function () {
const elements = this._paths.map(item => item);
if (this._absolute) {
elements.unshift("");
}
if (this._lastSlash) {
elements.push("");
}
return elements;
};
/**
* Create a new path object with joining the two paths.
*
* @param {Array<GdfsPath>} paths The paths to join.
* @returns {GdfsPath} The path that was joined.
*/
GdfsPath.merge = (...paths) => {
debug(`Gdfs.merge: ${paths.map(p => p.toString()).join(" | ")}`);
return paths.reduce((pathA, pathB, index) => {
debug(`Gdfs.merge: Reducing #${index}`);
debug(`Gdfs.merge: pathA: ${pathA.toString()}`);
debug(`Gdfs.merge: pathB: ${pathB.toString()}`);
if (typeof pathA === "string") {
pathA = new GdfsPath(pathA);
}
if (typeof pathB === "string") {
pathB = new GdfsPath(pathB);
}
const a = pathA.toString();
const b = pathB.toString();
if (pathB.isAbsolute()) {
debug(`returns ${b}`);
return new GdfsPath(b);
}
const joined = new GdfsPath([a, b].join("/"));
debug(`Gdfs.merge: returns ${joined.toString()}`);
return joined;
});
};
const split_path = pathname => {
const paths = [];
let escaped = false;
let i = 0;
let element = "";
let chars = pathname.split("");
while (i < chars.length) {
const c = chars[i];
if (escaped) {
element += c;
escaped = false;
} else if (c === "\\") {
escaped = true;
} else if (c === "/") {
paths.push(element);
element = "";
} else {
element += c;
}
i++;
}
paths.push(element);
if (escaped) {
throw new Error(`Invalid pathname ${pathname}`);
}
if (paths.length == 0) {
throw new Error("Invalid pathname. It should not be empty.");
}
return paths;
};
/**
* Set a path repersented by a string.
* @param {string} pathname A path name to parse
* @return {undefined}
*/
GdfsPath.prototype.parse = function (pathname) {
let paths = split_path(pathname.replace(/\/+/g, "/"));
debug(`parse ${JSON.stringify(pathname)} => ${JSON.stringify(paths)}`);
const lastSlash = paths[paths.length - 1] === "";
const absolute = paths[0] === "";
if (lastSlash) {
paths.pop();
}
if (absolute) {
paths.shift();
}
this._lastSlash = !!lastSlash;
this._absolute = !!absolute;
for (;;) {
let replacement = false;
if (paths.length >= 2) {
paths = paths.reduce((acc, next) => {
if (!Array.isArray(acc)) {
acc = [acc];
}
const last = acc[acc.length - 1];
if (last !== ".." && next === "..") {
acc.pop();
replacement = true;
} else if (last !== "." && next === ".") {
replacement = true;
} else {
acc.push(next);
}
return acc;
});
}
if (!replacement) {
this._paths = paths;
debug(`this._paths:${JSON.stringify(this._paths)}`);
break;
}
}
};
/**
* Returns if this represents an absolute path.
* @returns {Boolean} True if this represents an absolute path, otherwise false.
*/
GdfsPath.prototype.isAbsolute = function () {
return this._absolute;
};
/**
* Returns if this represents a directory.
* @returns {Boolean} True if this represents a directory, otherwise false.
*/
GdfsPath.prototype.isDirSpec = function () {
return this._lastSlash;
};
/**
* Returns a path represented by string.
* @returns {string} The path that this is representing.
*/
GdfsPath.prototype.toString = function () {
if (this._paths.length === 0) {
return "/";
}
const rootSpec = this._absolute ? "/" : "";
const dirSpec = this._lastSlash ? "/" : "";
const pathname = `${rootSpec}${this._paths.join("/")}${dirSpec}`;
return pathname;
};
module.exports = GdfsPath;
},{"debug":"jD9Y"}],"YTm0":[function(require,module,exports) {
/*global gapi:false*/
"use strict";
const debug = require("debug")("gdfs");
debug("loading");
const GdfsEvent = require("./gdfs-event.js");
const GdfsPath = require("./gdfs-path.js");
/**
* Gdfs class is an interface for the Google Drive API v3.
*
* The instance manages a current working directory(CWD) and offers methods
* to operate files and folders on the Google Drive by its pathname.
*
* Before creating an instance, the APIs must be loaded by the class method
* [`loadApi`](#.loadApi) with a ClientId and ApiKey.
* These had to be created in a project of Google devloper Console.
*
* And to operates files, user must sign-in with the Google account.
* See [signIn](#.signIn) and [signOut](#.signOut).
*
* Instance's CWD is initialized to the root on constructor. It can be changed
* by [chdir](#chdir) method. When it is changed, the 'oncwdupdate' callback
* is fired. To know where the CWD is, The [cwd](#cwd) method is available.
*
* @constructor
*/
function Gdfs() {
this._oncwdupdate = null;
this._currentPath = [{
id: "root",
name: ""
}];
}
/**
* Create Gdfs client.
* @returns {Gdfs} The google drive interface that has a current directory.
*/
Gdfs.createClient = () => {
return new Gdfs();
};
/**
* signInStatusChangeEvent
* @type {GdfsEvent}
*/
Gdfs.signInStatusChangeEvent = new GdfsEvent(window, "gdfs-signin-status-change");
/**
* Load Google Drive APIs and initialize its client object.
*
* The loaded all APIs are accessible with a global `gapi` object.
* But it is wrapped by this class so the users should not use it directly.
*
* @param {string} clientId A clientId from the Developer console.
* @param {string} clientSecret An clientSecret from the Developer console.
* @returns {Promise} A promise that will be resolved when the loading completed.
*/
Gdfs.loadApi = (clientId, clientSecret) => {
debug("Start of Gdfs.loadApi");
const script = document.createElement("SCRIPT");
script.setAttribute("async", "async");
script.setAttribute("src", "https://apis.google.com/js/api.js");
const p = new Promise((resolve, reject) => {
script.addEventListener("load", () => {
script.onload = () => {};
gapi.load("client:auth2", async () => {
debug("initialize gapi.client");
if (typeof clientId === "object" && clientSecret == null && "clientId" in clientId && "clientSecret" in clientId && "discoveryDocs" in clientId && "scope" in clientId) {
await gapi.client.init(clientId);
} else {
await gapi.client.init({
clientId,
clientSecret,
discoveryDocs: ["https://www.googleapis.com/discovery/v1/apis/drive/v3/rest"],
scope: ["https://www.googleapis.com/auth/drive", "https://www.googleapis.com/auth/drive.appdata", "https://www.googleapis.com/auth/drive.file", "https://www.googleapis.com/auth/drive.metadata", "https://www.googleapis.com/auth/drive.metadata.readonly", "https://www.googleapis.com/auth/drive.photos.readonly", "https://www.googleapis.com/auth/drive.readonly"].join(" ")
});
}
gapi.auth2.getAuthInstance().isSignedIn.listen(() => {
debug("the signed-in-status changed");
Gdfs.signInStatusChangeEvent.fire();
});
Gdfs.signInStatusChangeEvent.fire();
debug(`Gdfs.loadApi SignedIn: ${Gdfs.isSignedIn()}`);
debug("Gdfs.loadApi is resolved");
resolve();
});
});
script.addEventListener("readystatechange", () => {
debug(`readystatechange ${script.readyState}`);
if (script.readyState === "complete") {
script.onload();
}
});
script.onerror = event => {
debug("Gdfs.loadApi is rejected");
reject(new URIError(`The script ${event.target.src} is not accessible.`));
};
document.body.appendChild(script);
});
debug("End of Gdfs.loadApi");
return p;
};
/**
* A mime type of the Google Drive's folder.
* @type {string}
*/
Gdfs.mimeTypeFolder = "application/vnd.google-apps.folder";
/**
* Check if gapi was signed in.
* @returns {boolean} true if gapi is signed in, otherwise false.
*/
Gdfs.isSignedIn = () => {
return gapi.auth2.getAuthInstance().isSignedIn.get();
};
/**
* Sign in to Google Drive.
* @async
* @returns {undefined}
*/
Gdfs.signIn = async () => {
return await gapi.auth2.getAuthInstance().signIn();
};
/**
* Sign out from the Google Drive.
* @async
* @returns {undefined}
*/
Gdfs.signOut = async () => {
return await gapi.auth2.getAuthInstance().signOut();
};
/**
* Get file list.
* @async
* @param {object} queryParameters The parameters for the API.
* @returns {Promise<object>} The result of the API.
*/
Gdfs.getFileList = async queryParameters => {
const response = await gapi.client.drive.files.list(queryParameters);
return response.result;
};
/**
* Find a folder by name from a folder.
* @async
* @param {string} parentFolderId A parent folder id.
* @param {string} folderName A folder name to find
* @returns {Array<object>} A folder list that found.
*/
Gdfs.findFolderByName = async (parentFolderId, folderName) => {
debug("No tests pass: Gdfs.findFolderByName");
const folders = [];
const q = [`parents in '${parentFolderId}'`, `name = '${folderName}'`, `mimeType = '${Gdfs.mimeTypeFolder}'`, "trashed = false"].join(" and ");
const params = {
"pageSize": 10,
"pageToken": null,
"q": q,
"fields": "nextPageToken, " + "files(id, name, mimeType, webContentLink, webViewLink)"
};
debug(`${JSON.stringify(params)}`);
try {
do {
const result = await Gdfs.getFileList(params);
debug(`${JSON.stringify(result)}`);
for (const file of result.files) {
folders.push(file);
}
params.pageToken = result.nextPageToken;
} while (params.pageToken != null);
} catch (err) {
debug(err.stack);
}
return folders;
};
/**
* Find a file by name from a folder.
* @async
* @param {string} parentFolderId A parent folder id.
* @param {string} fileName A file name to find
* @returns {Promise<Array<object> >} A folder list that found.
*/
Gdfs.findFileByName = async (parentFolderId, fileName) => {
debug("No tests pass: Gdfs.findFileByName");
const files = [];
const q = [`parents in '${parentFolderId}'`, `name = '${fileName}'`, "trashed = false"].join(" and ");
const params = {
"pageSize": 10,
"pageToken": null,
"q": q,
"fields": "nextPageToken, " + "files(id, name, mimeType, webContentLink, webViewLink)"
};
debug(`findFileByName: params: ${JSON.stringify(params, null, " ")}`);
do {
const result = await Gdfs.getFileList(params);
for (const file of result.files) {
debug(`findFileByName: found file: ${JSON.stringify(file)}`);
files.push(file);
}
debug(`findFileByName: result.nextPageToken: ${result.nextPageToken}`);
params.pageToken = result.nextPageToken;
} while (params.pageToken != null);
return files;
};
/**
* Get file resource.
* @async
* @param {object} queryParameters The parameters for the API.
* @returns {Promise<object>} The result of the API.
*/
Gdfs.getFileResource = async parameters => {
const response = await gapi.client.drive.files.get(parameters);
return response.result;
};
/**
* Check if the file is a folder.
* @param {object} file The file object provided from the result
* of `getFileList` method.
* @returns {boolean} The file is a folder or not.
*/
Gdfs.isFolder = file => {
return file.mimeType === Gdfs.mimeTypeFolder;
};
/**
* Get a file content as text from Google Drive.
* Even if the file is not a text actually, it could be converted
* to ArrayBuffer, Blob or JSON to use by Web App.
* @param {string} fileId The file id to download.
* @param {boolean|null} acknowledgeAbuse A user acknowledgment
* status for the potential to abuse. This parameter is optional.
* default value is false.
* @returns {Promise<string>} A downloaded content as text.
*/
Gdfs.downloadFile = (fileId, acknowledgeAbuse) => {
return requestWithAuth("GET", "https://www.googleapis.com/drive/v3/files/" + fileId, {
alt: "media",
acknowledgeAbuse: acknowledgeAbuse
});
};
/**
* Create a new file's resource.
* @param {string} folderId The folder id where the file is created.
* @param {string} filename The file name.
* @param {string} mimeType The mime type for the new file.
* @returns {Promise<object>} The response of the API.
*/
Gdfs.createFile = async (folderId, filename, mimeType) => {
const response = await requestWithAuth("POST", "https://www.googleapis.com/drive/v3/files", {}, {
"Content-Type": "application/json"
}, JSON.stringify({
name: filename,
mimeType: mimeType,
parents: [folderId]
}));
return JSON.parse(response);
};
/**
* Upload a file content to update a existing file.
* @param {string} fileId The file id to update.
* @param {string} mimeType The content type of the file.
* @param {any} data The file content.
* @returns {Promise<object>} The response of the API.
*/
Gdfs.updateFile = async (fileId, mimeType, data) => {
const response = await requestWithAuth("PATCH", "https://www.googleapis.com/upload/drive/v3/files/" + fileId, {
uploadType: "media"
}, {
"Content-Type": mimeType
}, data);
return JSON.parse(response);
};
/**
* @param {string} method The request method.
* @param {string} endpoint The endpoint of API.
* @param {object} queryParams The query parameters.
* @param {object} headers The request headers.
* @param {any} body The request body.
* @returns {Promise<object>} The response of the request.
*/
const requestWithAuth = (method, endpoint, queryParams, headers, body) => {
let xhr = new XMLHttpRequest();
xhr.open(method, createUrl(endpoint, queryParams), true);
headers = headers || {};
Object.keys(headers).forEach(name => {
xhr.setRequestHeader(name, headers[name]);
});
xhr.setRequestHeader("Authorization", "Bearer " + getAccessToken());
xhr.timeout = 30000;
return new Promise((resolve, reject) => {
xhr.onload = () => {
resolve(xhr.responseText);
};
xhr.onerror = () => {
reject(new Error(xhr.statusText));
};
xhr.ontimeout = () => {
reject(new Error("request timeout"));
};
xhr.send(body);
});
};
/**
* Get access-token on current session.
* @returns {string} The access token.
*/
const getAccessToken = () => {
const googleUser = gapi.auth2.getAuthInstance().currentUser.get();
const authResponse = googleUser.getAuthResponse(true);
const accessToken = authResponse.access_token;
return accessToken;
};
/**
* Create URI including query parameters.
* @param {string} endpoint The endpoint of API.
* @param {object|null} params The query parameters.
* @returns {string} The URI.
*/
const createUrl = (endpoint, params) => {
if (params == null) {
return endpoint;
}
let keys = Object.keys(params).filter(key => key !== "");
if (keys.length == 0) {
return endpoint;
}
let queryString = keys.map(key => {
let value = params[key];
return value == null ? null : `${key}=${encodeURI(value)}`;
}).join("&");
return `${endpoint}?${queryString}`;
};
/**
* Get actual root folder id.
* @async
* @return {Promise<string>} The root folder's id
*/
Gdfs.getActualRootFolderId = async () => {
const res = await Gdfs.getFileResource({
fileId: "root",
fields: "id"
});
debug(`getActualRootFolderId: res ${JSON.stringify(res, null, " ")}`);
return res.id;
};
/**
* Set oncwdchage callback hander.
* @param {FUnction|AsyncFunction} handler a function to be invoked when
* the current directory is changed.
* @returns {undefined|Function} the previous handler will be returned.
*/
Gdfs.prototype.onCwdUpdate = function (handler) {
const prev = this._oncwdupdate;
if (handler != null) {
this._oncwdupdate = handler;
}
return prev;
};
/**
* Fire cwdUpdate.
* @returns {Promise} what the handler returns.
*/
Gdfs.prototype.fireCwdUpdate = async function () {
if (this._oncwdupdate) {
try {
const result = this._oncwdupdate();
if (result != null) {
if (result.constructor === Promise) {
return await result;
}
return result;
}
} catch (err) {
debug(err.stack);
}
}
};
/**
* Get current folder id.
* @returns {string} The folder id that the instance is.
*/
Gdfs.prototype.getCurrentFolderId = function () {
return this._currentPath.slice(-1)[0].id;
};
/**
* Get current working directory as path object.
* @returns {GdfsPath} the current working directory.
*/
Gdfs.prototype.getCurrentPath = function () {
const path = this._currentPath.map(path => `${path.name}/`).join("");
const cwd = new GdfsPath(path);
debug(`getCurrentPath: ${cwd.toString()}`);
return cwd;
};
/**
* Set current working directory with path object.
* @async
* @param {GdfsPath} path the new current working directory.
* @returns {Promise<boolean>} the status of the operation.
*/
Gdfs.prototype.setCurrentPath = async function (path) {
debug("No tests pass: Gdfs#setCurrentPath");
debug(`setCurrentPath(${path})`);
if (!path.isAbsolute()) {
debug(`The path must be absolute. ${path}`);
return false;
}
if (!(await this.isDirectory(path))) {
debug(`${path} is not a directory`);
return false;
}
this._currentPath = await this.getPaths(path);
await this.fireCwdUpdate();
return true;
};
/**
* Get an array of path element from root directory.
* @async
* @param {GdfsPath} path path object.
* @returns {Promise<Array<object> >} the array of the object having an id and
* the name.
*/
Gdfs.prototype.getPaths = async function (path) {
debug("No tests pass: Gdfs#getPaths");
debug(`getPaths(${path})`);
if (!path.isAbsolute()) {
debug("getPaths: Error: the path must be absolute");
return null;
}
const paths = [{
id: "root",
name: "",
mimeType: Gdfs.mimeTypeFolder
}];
for (const name of path.elements().slice(1)) {
if (name === "") {
break;
}
const parent = paths.slice(-1)[0];
debug(`name: ${name}, parent: ${JSON.stringify(parent)}`);
const path = {
id: null,
name: null,
mimeType: null
};
if (parent.id != null) {
const children = await Gdfs.findFileByName(parent.id, name);
if (children.length > 0) {
const child = children.shift();
path.id = child.id;
path.name = child.name;
path.mimeType = child.mimeType;
}
}
paths.push(path);
}
debug(`getPaths: ${JSON.stringify(paths, null, " ")}`);
return paths;
};
/**
* Get the file object that the path points to.
* @param {GdfsPath} path the path.
* @returns {file} the file object of google drive.
*/
Gdfs.prototype.getFileOfPath = async function (path) {
const paths = await this.getPaths(path);
if (!paths) {
return null;
}
return paths.slice(-1)[0];
};
/**
* Get the current working directory of gdrive-fs.
* @returns {string} The current working directory.
*/
Gdfs.prototype.cwd = function () {
return this.getCurrentPath().toString();
};
/**
* Changes the current working directory of this client session.
* @param {string} directory A pathname to operate.
* @async
* @returns {Promise<boolean>} the status of the operation.
*/
Gdfs.prototype.chdir = async function (directory) {
debug("No tests pass: Gdfs#chdir");
const cwd = this.getCurrentPath();
const next_cwd = GdfsPath.merge(cwd, new GdfsPath(directory));
return await this.setCurrentPath(next_cwd);
};
/**
* Move current directory to root, parent or one of children.
* @async
* @param {string} folderId A destination file id to move.
* To move to parent, ".." is available.
* @returns {Promise<boolean>} the status of the operation.
*/
Gdfs.prototype.chdirById = async function (folderId) {
debug(`Gdfs.chdirById( ${folderId} )`);
if (folderId === ".") {
return true;
}
const currentFolderId = this.getCurrentFolderId();
if (folderId === "/" || folderId === "root") {
this._currentPath = [{
id: "root",
name: ""
}];
await this.fireCwdUpdate();
} else if (folderId === "..") {
if (currentFolderId === "root") {
debug("Could not move to upper folder from root.");
return false;
}
this._currentPath.pop();
await this.fireCwdUpdate();
} else if (folderId !== currentFolderId) {
const paths = [];
const root = await Gdfs.getFileResource({
fileId: "root",
fields: "id"
});
let searchId = folderId;
for (;;) {
const file = await Gdfs.getFileResource({
fileId: searchId,
fields: "id, name, parents, mimeType"
});
if (file == null) {
debug(`folder ${searchId} is not found.`);
return false;
}
if (file.mimeType !== Gdfs.mimeTypeFolder) {
debug(`folder ${searchId} is not folder.`);
return false;
}
debug(JSON.stringify(file, null, " "));
if (file.id == root.id) {
paths.unshift({
id: "root",
name: ""
});
break;
} else {
paths.unshift({
id: file.id,
name: file.name
});
}
searchId = file.parents.shift();
}
debug(JSON.stringify(paths, null, " "));
this._currentPath = paths;
await this.fireCwdUpdate();
}
return true;
};
/**
* Check the path is a directory.
* @async
* @param {GdfsPath} path A path to check
* @returns {Promise<Boolean>} The path is a directory or not.
*/
Gdfs.prototype.isDirectory = async function (path) {
debug("No tests pass: Gdfs#isDirectory");
const file = await this.getFileOfPath(this.toAbsolutePath(path));
if (!file) {
return false;
}
return file.mimeType === Gdfs.mimeTypeFolder;
};
/**
* Convert to absolute path.
* @param {GdfsPath} path path to be converted
* @returns {GdfsPath} An absolute path
*/
Gdfs.prototype.toAbsolutePath = function (path) {
debug("No tests pass: Gdfs#toAbsolutePath");
if (path.isAbsolute()) {
return path;
}
const cwd = this.getCurrentPath();
return GdfsPath.merge(cwd, path);
};
/**
* Read the directory to get a list of filenames.
*
* This method may not returns all files in the directory.
* To know all files were listed, check the `pageToken` field in the parameter
* `options` after the invocation.
* If the reading was completed, the field would be set `null`.
* The rest files unread will be returned at the next invocation with same
* parameters.
*
* ```javascript
* const readDirAll = async path => {
* const opts = { pageSize: 10, pageToken: null };
* const files = [];
* do {
* for(const fn of await files.readdir(path, opts)) {
* files.push(fn);
* }
* } while(opts.pageToken != null);
* };
* ```
*
* @async
* @since v1.1.0
* @param {string} path A path to the directory.
*
* @param {object|null} options (Optional) options for this method.
*
* Only two fields are available:
*
* * "pageSize": Set maximum array size that this method returns at one
* time. The default value 10 will be used if this is not specified or
* zero or negative value is specified.
* * "pageToken": Set null to initial invocation to read from first
* entry. This would be updated other value if the unread files are
* remained. The value is used for reading next files. User should not
* set the value except for null.
*
* If this parameter is ommited, all files will be read.
* This is not recomended feature for the directory that has a number of files.
*
* @returns {Promise<Array<string> >} returns an array of filenames.
*/
Gdfs.prototype.readdir = async function (path, options) {
path += path.match(/\/$/) ? "" : "/";
const absPath = this.toAbsolutePath(new GdfsPath(path));
const parentFolder = await this.getFileOfPath(absPath.getPathPart());
debug(`readdir: parentFolder: ${JSON.stringify(parentFolder)}`);
if (!parentFolder || parentFolder.id == null) {
debug(`readdir: The path not exists ${path}`);
return null;
}
if (!Gdfs.isFolder(parentFolder)) {
debug(`readdir: The path is not a folder ${path}`);
return null;
}
const files = [];
const readAll = options == null;
options = options || {};
const pageSize = options.pageSize || 10;
let pageToken = options.pageToken || null;
const readFiles = async params => {
debug(`readdir: params: ${JSON.stringify(params, null, " ")}`);
const result = await Gdfs.getFileList(params);
debug(`readdir: result.nextPageToken: ${result.nextPageToken}`);
for (const file of result.files) {
files.push(file.name);
}
return result.nextPageToken;
};
const params = {
"pageSize": pageSize <= 0 ? 10 : pageSize,
"pageToken": pageToken,
"q": `parents in '${parentFolder.id}' and trashed = false`,
"fields": "nextPageToken, files(name)"
};
if (!readAll) {
// eslint-disable-next-line require-atomic-updates
options.pageToken = await readFiles(params);
} else {
do {
// eslint-disable-next-line require-atomic-updates
options.pageToken = await readFiles(params);
} while (options.pageToken != null);
}
debug(`readdir: files: ${JSON.stringify(files)}`);
return files;
};
/**
* Get file's properties.
* It is a file resource of Google Drive including id, name, mimeType,
* webContentLink and webViewLink about the file or directory.
*
* @async
* @param {string} path A pathname.
* @returns {File} The file resource of Google Drive including id, name,
* mimeType, webContentLink and webViewLink about the file or directory.
* @since v1.1.0
*/
Gdfs.prototype.stat = async function (path) {
debug(`Gdfs#stat(${path})`);
path = path.replace(/\/+$/, "");
const absPath = this.toAbsolutePath(new GdfsPath(path));
debug(`stat: absPath: ${absPath.toString()}`);
path = absPath.toString();
if (path === "/") {
const file = await Gdfs.getFileResource({
fileId: "root",
fields: "id, name, mimeType, webContentLink, webViewLink"
});
debug(`stat: file ${JSON.stringify(file)}`);
return file;
}
const parentFolder = await this.getFileOfPath(absPath.getPathPart());
debug(`stat: parentFolder: ${JSON.stringify(parentFolder)}`);
if (!parentFolder || parentFolder.id == null) {
debug(`stat: The path not exists ${path}`);
return null;
}
const filename = absPath.getFilename();
debug(`stat: filename: ${filename}`);
const files = await Gdfs.findFileByName(parentFolder.id, filename);
if (files.length === 0) {
debug(`stat: File not found ${path}`);
return null;
}
const file = files.shift();
debug(`stat: file ${JSON.stringify(file)}`);
return file;
};
/**
* Read a file.
* The file must have webContentLink in its resource to read the contents,
* To get the resource, Use [`Gdfs#stat`](#stat).
*
* @async
* @param {string} path A pathname to operate.
* @returns {Promise<string>} The file content.
*/
Gdfs.prototype.readFile = async function (path) {
debug(`Gdfs#readFile(${path})`);
const absPath = this.toAbsolutePath(new GdfsPath(path));
const parentFolder = await this.getFileOfPath(absPath.getPathPart());
debug(`readFile: parentFolder: ${JSON.stringify(parentFolder)}`);
if (!parentFolder || parentFolder.id == null) {
debug(`readFile: The path not exists ${path}`);
return null;
}
const filename = absPath.getFilename();
debug(`readFile: filename: ${filename}`);
const files = await Gdfs.findFileByName(parentFolder.id, filename);
debug(`readFile: files: ${JSON.stringify(files)}`);
if (files.length === 0) {
debug(`File not found ${path}`);
return null;
}
const file = files.shift();
if (!file.webContentLink) {
debug(`File is not downloadable ${path}`);
return null;
}
return await Gdfs.downloadFile(file.id);
};
/**
* Make a directory.
* @async
* @param {string} path A pathname to operate.
* @returns {Promise<object>} The API response.
*/
Gdfs.prototype.mkdir = async function (path) {
debug(`mkdir(${path})`);
path = path.replace(/\/+$/, "");
const absPath = this.toAbsolutePath(new GdfsPath(path));
const parentFolder = await this.getFileOfPath(absPath.getPathPart());
debug(`mkdir: parentFolder ${JSON.stringify(parentFolder)}`);
if (!parentFolder || parentFolder.id == null) {
debug(`mkdir: The path not exists ${path}`);
return null;
}
const pathname = absPath.getFilename();
debug(`mkdir: pathname: ${pathname}`);
const files = await Gdfs.findFileByName(parentFolder.id, pathname);
debug(`mkdir: files: ${JSON.stringify(files)}`);
if (files.length > 0) {
debug(`mkdir: The directory exists ${path}`);
return null;
}
const result = await Gdfs.createFile(parentFolder.id, pathname, Gdfs.mimeTypeFolder);
if (parentFolder.id === this.getCurrentFolderId()) {
await this.fireCwdUpdate();
}
return result;
};
/**
* Remove the directory but not a normal file.
* The operation will fail, if it is not a directory nor empty.
* @async
* @param {string} path A pathname to operate.
* @returns {Promise<object|null>} Returns the API response.
* null means it was failed.
*/
Gdfs.prototype.rmdir = async function (path) {
debug(`rmdir(${path})`);
path = path.replace(/\/+$/, "");
const absPath = this.toAbsolutePath(new GdfsPath(path));
const parentFolder = await this.getFileOfPath(absPath.getPathPart());
debug(`rmdir: parentFolder ${JSON.stringify(parentFolder)}`);
if (!parentFolder || parentFolder.id == null) {
debug(`rmdir: The path not exists ${path}`);
return null;
}
const pathname = absPath.getFilename();
debug(`rmdir: pathname: ${pathname}`);
if (pathname === "") {
debug(`rmdir: The root directory cannot be removed ${path}`);
return null;
}
const dires = await Gdfs.findFolderByName(parentFolder.id, pathname);
debug(`rmdir: dires: ${JSON.stringify(dires)}`);
if (dires.length === 0) {
debug(`rmdir: The directory not exists ${path}`);
return null;
}
const dir = dires.shift();
debug(`rmdir: dir ${JSON.stringify(dir)}`);
debug(`rmdir: _currentPath ${JSON.stringify(this._currentPath, null, " ")}`);
if (this._currentPath.filter(parent => parent.id == dir.id).length > 0 || dir.id === (await Gdfs.getActualRootFolderId())) {
debug(`rmdir: The path is a parent ${path}`);
return null;
}
if (dir.mimeType !== Gdfs.mimeTypeFolder) {
debug(`rmdir: The path is not folder ${path}`);
return null;
}
const params = {
"q": `parents in '${dir.id}' and trashed = false`,
"fields": "files(id)"
};
debug(`rmdir: params ${JSON.stringify(params)}`);
const children = await Gdfs.getFileList(params);
debug(`rmdir: children: ${JSON.stringify(children, null, " ")}`);
if (children.files.length > 0) {
debug(`rmdir: The folder is not empty ${path}`);
return null;
}
const response = await gapi.client.drive.files.delete({
fileId: dir.id
});
if (parentFolder.id === this.getCurrentFolderId()) {
await this.fireCwdUpdate();
}
return response.result;
};
/**
* Delete the file but not directory.
* This does not move the file to the trash-box.
*
* @async
* @param {string} path A pathname to operate.
* @returns {Promise<object|null>} Returns the API response.
* null means it was failed.
*/
Gdfs.prototype.unlink = async function (path) {
debug(`unlink(${path})`);
const absPath = this.toAbsolutePath(new GdfsPath(path));
const parentFolder = await this.getFileOfPath(absPath.getPathPart());
debug(`unlink: parentFolder ${JSON.stringify(parentFolder)}`);
if (!parentFolder || parentFolder.id == null) {
debug(`unlink: The path not exists ${path}`);
return null;
}
const pathname = absPath.getFilename();
debug(`unlink: pathname: ${pathname}`);
const files = await Gdfs.findFileByName(parentFolder.id, pathname);
debug(`unlink: files: ${JSON.stringify(files)}`);
if (files.length === 0) {
debug(`unlink: The file not exists ${path}`);
return null;
}
const file = files.shift();
if (file.mimeType === Gdfs.mimeTypeFolder) {
debug(`unlink: The file is a folder ${path}`);
return null;
}
const response = await gapi.client.drive.files.delete({
fileId: file.id
});
const result = response.result;
if (parentFolder.id === this.getCurrentFolderId()) {
await this.fireCwdUpdate();
}
return result;
};
/**
* Write a file.
* @async
* @param {string} path A pathname to operate.
* @param {string} mimeType A mimeType of the file content.
* @param {string} data A file content.
* @returns {Promise<object>} The API response.
*/
Gdfs.prototype.writeFile = async function (path, mimeType, data) {
debug(`Gdfs#writeFile(${path},${mimeType}, ${JSON.stringify(data)})`);
const absPath = this.toAbsolutePath(new GdfsPath(path));
const parentFolder = await this.getFileOfPath(absPath.getPathPart());
debug(`writeFile: parentFolder: ${JSON.stringify(parentFolder)}`);
if (!parentFolder || parentFolder.id == null) {
debug(`writeFile: The path not exists ${path}`);
return null;
}
const filename = absPath.getFilename();
debug(`writeFile: filename: ${filename}`);
const files = await Gdfs.findFileByName(parentFolder.id, filename);
debug(`writeFile: files: ${JSON.stringify(files)}`);
if (files.length === 0) {
const file = await Gdfs.createFile(parentFolder.id, filename, mimeType);
const result = await Gdfs.updateFile(file.id, mimeType, data);
if (parentFolder.id === this.getCurrentFolderId()) {
await this.fireCwdUpdate();
}
return result;
}
const file = files.shift();
if (file.mimeType === Gdfs.mimeTypeFolder) {
debug(`writeFile: The path already exists as directory ${path}`);
return null;
}
const result = await Gdfs.updateFile(file.id, mimeType, data);
if (parentFolder.id === this.getCurrentFolderId()) {
await this.fireCwdUpdate();
}
return result;
};
module.exports = Gdfs;
},{"debug":"jD9Y","./gdfs-event.js":"bEsR","./gdfs-path.js":"B8ln"}],"V4rw":[function(require,module,exports) {
"use strict";
const debug = require("debug")("gdfs-ui");
debug("loading");
const Gdfs = require("./gdfs.js");
const GdfsEvent = require("./gdfs-event.js");
/**
* class GdfsUi
* @constructor
* @param {HTMLElement} The root element that UI widget will be built.
* @param {Gdfs} The gapi client.
*/
function GdfsUi(element, opt) {
debug("Start of GdfsUi ctor");
this._element = element;
this._gdfs = new Gdfs();
this._pageSize = 10;
this._trashed = false;
this._pageToken = null;
this._files = [];
this._opt = {
onFileListChange: () => {},
onCurrentDirChange: () => {}
};
opt = opt || {};
for (const key of Object.keys(this._opt)) {
if (key in opt) {
this._opt[key] = opt[key];
}
} // events
this._fileListChangeEvent = new GdfsEvent(this._element, "gdfsui-filelist-change");
this._currentDirChangeEvent = new GdfsEvent(this._element, "gdfsui-current-dir-change");
this._currentDirChangeEvent.listen(this._opt.onCurrentDirChange);
this._fileListChangeEvent.listen(this._opt.onFileListChange);
this._gdfs.onCwdUpdate(async () => {
debug("Start of _gdfs.onCwdUpdate");
await this.reload();
this._currentDirChangeEvent.fire();
debug("End of _gdfs.onCwdUpdate");
});
const onSignedInStatusChange = async status => {
debug("Start of signInStatusChange");
if (status) {
await this._gdfs.chdir("/");
}
debug("End of signInStatusChange");
}; // Listen events
Gdfs.signInStatusChangeEvent.listen(() => onSignedInStatusChange(Gdfs.isSignedIn()));
onSignedInStatusChange(Gdfs.isSignedIn());
debug("End of GdfsUi ctor");
}
/**
* Returns the listing files in current directory is completed.
* @returns {boolean} true if the listing files is completed.
*/
GdfsUi.prototype.isPageCompleted = function () {
const status = this._pageToken == null;
return status;
};
/**
* Get current path as full path.
* @returns {Array<string>} The array of file ids.
*/
GdfsUi.prototype.getCurrentPath = function () {
return this._gdfs._currentPath;
};
/**
* Get files list on current page.
* @param {number} begin a file index
* @param {number} end a file index
* @returns {Array<File>} the files in current page.
*/
GdfsUi.prototype.getFiles = async function (begin, end) {
debug(`GdfsUi#getFiles param:{begin:${begin}, end:${end})}`);
debug(`_pageToken: ${this._pageToken}`);
if (this._pageToken == null) {
this._files = [];
}
while (end > this._files.length) {
await this.readDir();
this._fileListChangeEvent.fire();
if (this._pageToken == null) {
break;
}
}
return this._files.slice(begin, end);
};
/**
* Read the files on current directory.
* @async
* @returns {Promise<undefined>}
*/
GdfsUi.prototype.readDir = async function () {
const andConditionsOfQuerySearchClauses = [`parents in '${this._gdfs.getCurrentFolderId()}'`, `trashed = ${this._trashed ? "true" : "false"}`];
const queryParameters = {
"pageSize": this._pageSize,
"pageToken": this._pageToken,
"q": andConditionsOfQuerySearchClauses.join(" and "),
"fields": "nextPageToken, files(id, name, mimeType, webContentLink, webViewLink)"
};
const result = await Gdfs.getFileList(queryParameters);
this._pageToken = result.nextPageToken;
for (const file of result.files) {
this._files.push(file);
}
};
/**
* Reload the file list.
* @async
* @returns {Promise} to sync
*/
GdfsUi.prototype.reload = async function () {
this._pageToken = null;
this._files = [];
await this.readDir();
this._fileListChangeEvent.fire();
};
/**
* Move current directory to root, parent or one of children.
* @param {string} folderId A destination file id to move.
* To move to parent, ".." is available.
* @returns {Promise<undefined>}
*/
GdfsUi.prototype.chdirById = async function (folderId) {
await this._gdfs.chdirById(folderId);
};
/**
* Get file resource.
* @async
* @param {string} fileId The file id of the target file.
* @returns {Promise<object>} The resource object.
*/
GdfsUi.prototype.getFileResource = async function (fileId) {
return await Gdfs.getFileResource({
"fileId": fileId
});
};
/**
* Upload a file.
* @param {File} file the file to be uploaded.
* @return {Promise<File>} an uploaded File.
*/
GdfsUi.prototype.uploadFile = function (file) {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = async () => {
resolve((await this.writeFile(file.name, file.type, reader.result)));
};
reader.onerror = event => {
reject(new Error(["Fail to upload. Could not read the file ", `${file.name}(${event.type}).`].join("")));
};
reader.readAsArrayBuffer(file);
});
};
/**
* Create or overwrite a file to current directory.
* @param {string} filename The file name.
* @param {string} mimeType The content type.
* @param {any} data The file content.
* @returns {Promise<object>} The response of update.
*/
GdfsUi.prototype.writeFile = async function (filename, mimeType, data) {
// Find same file in current directory
const fileIds = this._files.filter(file => file.name === filename).map(file => file.id);
if (fileIds.length == 0) {
//Create new file
const response = await Gdfs.createFile(this._gdfs.getCurrentFolderId(), filename, mimeType);
const file = JSON.parse(response);
return await Gdfs.updateFile(file.id, mimeType, data);
} // Overwrite the file
return await Gdfs.updateFile(fileIds[0], mimeType, data);
};
module.exports = GdfsUi;
},{"debug":"jD9Y","./gdfs.js":"YTm0","./gdfs-event.js":"bEsR"}],"Focm":[function(require,module,exports) {
"use strict";
const debug = require("debug")("gdrive-fs");
const Gdfs = require("./lib/gdfs.js");
Gdfs.Ui = require("./lib/gdfs-ui.js");
Gdfs.Path = require("./lib/gdfs-path.js");
try {
const context = Function("return this;")();
if (context === window) {
window.Gdfs = Gdfs;
}
} catch (err) {
debug(err.message);
}
module.exports = Gdfs;
},{"debug":"jD9Y","./lib/gdfs.js":"YTm0","./lib/gdfs-ui.js":"V4rw","./lib/gdfs-path.js":"B8ln"}]},{},["Focm"], null) |
/***********************************************************************
* Copyright (c) 2008-2080 pepstack.com, <EMAIL>
*
* ALL RIGHTS RESERVED.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
**********************************************************************/
/**
* @filename cstrbuf.h
* C String Buffer Functions.
*
* @author <NAME> <<EMAIL>>
* @version 0.0.10
* @create 2017-08-28 11:12:10
* @update 2020-01-06 18:20:46
*/
#ifndef _CSTRBUF_H_
#define _CSTRBUF_H_
#if defined(__cplusplus)
extern "C"
{
#endif
#include "unitypes.h"
#ifndef cstr_allocate_blocksize
# define cstr_allocate_blocksize 16
#endif
#define cstr_bool_true 1
#define cstr_bool_false 0
#define cstr_length(str, maxlen) (str? ((maxlen)==-1? (int)strlen(str) : (int)strnlen(str, maxlen)) : 0)
static void cstr_varray_free (char ** varr, int maxnum)
{
while (varr && maxnum-- > 0) {
char *p = varr[maxnum];
if (p) {
varr[maxnum] = NULL;
free(p);
}
}
}
static char * cstr_toupper (char * s, int num)
{
char *p = s;
while (num-- > 0 && *p) {
*p = toupper(*p);
p++;
}
return s;
}
static char * cstr_tolower (char * s, int num)
{
char *p = s;
while (num-- > 0 && *p) {
*p = tolower(*p);
p++;
}
return s;
}
/**
* trim specified character in given string
*/
static char * cstr_trim_chr (char * s, char c)
{
return (*s==0)?s:(((*s!=c)?(((cstr_trim_chr(s+1,c)-1)==s)?s:(*(cstr_trim_chr(s+1,c)-1)=*s,*s=c,cstr_trim_chr(s+1,c))):cstr_trim_chr(s+1,c)));
}
static char * cstr_trim_chr_mul (char * str, const char * chrs, int num)
{
char *s = str;
while (num-- > 0) {
char ch = chrs[num];
s = cstr_trim_chr(s, ch);
}
return s;
}
static char * cstr_Ltrim_chr (char * str, char ch)
{
char *p = str;
while (*p && *p++ == ch) {
str = p;
}
return str;
}
static char* cstr_Rtrim_chr (char * str, char ch, int *outlen)
{
char *p = str;
char *q = p;
while (*p) {
if (*p != ch) {
q = p;
}
p++;
}
if (++q <= p) {
*q = 0;
if (outlen) {
*outlen = (int)(q - str);
}
} else {
if (outlen) {
*outlen = 0;
}
}
return str;
}
#define cstr_LRtrim_chr(str, c, outlen) cstr_Rtrim_chr(cstr_Ltrim_chr((str), (c)), (c), (outlen))
static char * cstr_Lfind_chr (char * str, int len, char c)
{
if (! str || len <= 0) {
return NULL;
} else {
char *p = str;
while ((int)(p - str) < len && *p) {
if (*p == c) {
return p;
}
p++;
}
return NULL;
}
}
static char * cstr_Rfind_chr (char * str, int len, char c)
{
if (! str || len <= 0) {
return NULL;
} else {
char *p = &str[len-1];
while (len-- > 0) {
if (*p == c) {
return p;
}
p--;
}
return NULL;
}
}
static char * cstr_find_chrs (char * str, int len, const char *chrs, int nch)
{
if (! str || len <= 0) {
return NULL;
} else {
int i;
char *p = str;
while ((int)(p - str) < len && *p) {
for (i = 0; i < nch; i++) {
if (chrs[i] == *p) {
return p;
}
}
p++;
}
return NULL;
}
}
/**
* int isspace(char c);
* Standard white-space characters are:
* ' ' (0x20) space (SPC) arcii=32
* '\t' (0x09) horizontal tab (TAB)
* '\n' (0x0a) newline (LF)
* '\v' (0x0b) vertical tab (VT)
* '\f' (0x0c) feed (FF)
* '\r' (0x0d) carriage return (CR)
*/
static char * cstr_Ltrim_whitespace (char *str)
{
char *p = str;
while (p && isspace(*p)) {
p++;
}
return p;
}
static int cstr_Rtrim_whitespace (char *str, int len)
{
while (len-- > 0) {
if (isspace(str[len])) {
str[len] = 0;
} else {
break;
}
}
return len + 1;
}
static char * cstr_LRtrim_whitespace (char *str)
{
char *p = cstr_Ltrim_whitespace(str);
cstr_Rtrim_whitespace(p, cstr_length(p, -1));
return p;
}
static int cstr_shrink_whitespace (const char *str, int *start, int *end)
{
int s = *start;
int e = *end;
for (; s < *end; s++) {
if (! isspace(str[s])) {
break;
}
}
*start = s;
for (; e >= *start; e--) {
if (! isspace(str[e])) {
break;
}
}
*end = e;
return (*end - *start + 1);
}
static char * cstr_trim_whitespace (char * s)
{
return (*s==0)?s:((( ! isspace(*s) )?(((cstr_trim_whitespace(s+1)-1)==s)? s : (*(cstr_trim_whitespace(s+1)-1)=*s, *s=32 ,cstr_trim_whitespace(s+1))):cstr_trim_whitespace(s+1)));
}
static char * cstr_replace_chr (char * str, char ch, char rpl)
{
char *p = str;
while (p && *p) {
if (*p == ch) {
*p = rpl;
}
p++;
}
return str;
}
static int cstr_slpit_chr (const char * str, int len, char delim, char **outstrs, int outstrslen[], int maxoutnum)
{
char *p;
const char *s = str;
int outlen;
int i = 0;
int n = 1;
while (s && (p = strchr(s, delim)) && (p < str +len)) {
s = p+1;
n++;
}
if (! outstrs) {
// only to get count
return n;
}
if (n > 0) {
char *sb;
char *s0 = malloc(len + 1);
memcpy(s0, str, len);
s0[len] = 0;
sb = s0;
while (sb && (p = strchr(sb, delim))) {
*p++ = 0;
if (i < maxoutnum) {
// remove whitespaces
outlen = 0;
outstrs[i] = strdup( cstr_LRtrim_chr(sb, 32, &outlen) );
if (outstrslen) {
outstrslen[i] = outlen;
}
i++;
} else {
// overflow than maxoutnum
break;
}
sb = p;
}
if (i < maxoutnum) {
outlen = 0;
outstrs[i] = strdup( cstr_LRtrim_chr(sb, 32, &outlen) );
if (outstrslen) {
outstrslen[i] = outlen;
}
i++;
}
free(s0);
}
return i;
}
static int cstr_replace_new (const char *original, const char *pattern, const char *replacement, char **outresult)
{
size_t const replen = strlen(replacement);
size_t const patlen = strlen(pattern);
size_t const orilen = strlen(original);
size_t patcnt = 0;
const char * oriptr;
const char * patloc;
*outresult = 0;
// find how many times the pattern occurs in the original string
for (oriptr = original; patloc = strstr(oriptr, pattern); oriptr = patloc + patlen) {
patcnt++;
}
if (patcnt) {
// allocate memory for the new string
size_t len = orilen + patcnt * (replen - patlen);
char * result = (char *) malloc( sizeof(char) * (len + 1) );
if (result) {
// copy the original string,
// replacing all the instances of the pattern
char * retptr = result;
for (oriptr = original; patloc = strstr(oriptr, pattern); oriptr = patloc + patlen) {
size_t const skplen = patloc - oriptr;
// copy the section until the occurence of the pattern
strncpy(retptr, oriptr, skplen);
retptr += skplen;
// copy the replacement
strncpy(retptr, replacement, replen);
retptr += replen;
}
// copy the rest of the string.
strcpy(retptr, oriptr);
}
*outresult = result;
return (int) len;
}
return 0;
}
static int cstr_to_dbl (const char *str, int slen, double *outval)
{
if (slen == 0) {
// null string
return 0;
} else {
double val;
char *endptr;
/* To distinguish success/failure after call */
errno = 0;
val = strtod(str, &endptr);
/* Check for various possible errors */
if ((errno == ERANGE) || (errno != 0 && val == 0)) {
// error: overflow or underflow
return (-1);
}
if (endptr == str) {
// No digits were found
return 0;
}
/* success return */
*outval = val;
return 1;
}
}
/**
* cstr_split_substr
* split string by separator string (sep) into sub strings
*/
static int cstr_split_substr (char *str, const char *sepstr, int seplen, char **subs, int maxsubs)
{
int i = 0;
char *s = str;
while (s && i < maxsubs) {
char *p = strstr(s, sepstr);
if (p) {
*p = 0;
p += seplen;
}
subs[i++] = s;
s = p;
}
return i;
}
static int cstr_split_multi_chrs (char *str, int slen, const char *sepchrs, int count, char **outsubs, int outsubslen[], int maxsubs)
{
char *sub;
int substart, subend, sublen;
int k;
int i = 0;
int len = 0;
int start = i;
int end = start;
for (; i < slen; i++) {
for (k = 0; k < count; k++) {
if (str[i] == sepchrs[k]) {
end = i;
break;
}
}
if (k < count && end > start && len < maxsubs) {
substart = start;
subend = end - 1;
sublen = cstr_shrink_whitespace(str, &substart, &subend);
if (sublen > 0) {
sub = (char *) malloc(sublen + 1);
if (! sub) {
/* no memory */
exit(EXIT_FAILURE);
}
memcpy(sub, str + substart, sublen);
sub[sublen] = 0;
outsubs[len] = sub;
outsubslen[len] = sublen;
if (++len == maxsubs) {
return len;
}
}
start = end;
}
}
end = slen;
if (len < maxsubs && end > start) {
substart = start;
subend = end - 1;
sublen = cstr_shrink_whitespace(str, &substart, &subend);
if (sublen > 0) {
sub = (char *) malloc(sublen + 1);
if (! sub) {
/* no memory */
exit(EXIT_FAILURE);
}
memcpy(sub, str + substart, sublen);
sub[sublen] = 0;
outsubs[len] = sub;
outsubslen[len] = sublen;
len++;
}
}
return len;
}
static int cstr_to_sb8 (int base, const char *str, int slen, sb8 *outval)
{
if (slen == 0) {
// null string
return 0;
} else {
sb8 val;
char *endptr;
/* To distinguish success/failure after call */
errno = 0;
val = strtoll(str, &endptr, base);
/* Check for various possible errors */
if ((errno == ERANGE && (val == LLONG_MAX || val == LLONG_MIN)) || (errno != 0 && val == 0)) {
// error
return (-1);
}
if (endptr == str) {
// No digits were found
return (0);
}
/* success return */
*outval = val;
return 1;
}
}
static int cstr_to_ub8 (int base, const char *str, int slen, ub8 *outval)
{
if (slen == 0) {
// null string
return 0;
} else {
ub8 val;
char *endptr;
/* To distinguish success/failure after call */
errno = 0;
val = strtoull(str, &endptr, base);
/* Check for various possible errors */
if ((errno == ERANGE && (val == ULLONG_MAX || val == 0)) || (errno != 0 && val == 0)) {
// error
return (-1);
}
if (endptr == str) {
// No digits were found
return (0);
}
/* success return */
*outval = val;
return 1;
}
}
static int cstr_notequal (const char *str1, const char *str2)
{
if (str1 == str2) {
return cstr_bool_false;
}
if (str1 && str2) {
// str1 != str2
return strcmp(str1, str2)? cstr_bool_true : cstr_bool_false;
}
// str1 != str2
return cstr_bool_true;
}
static int cstr_notequal_len (const char *Astr, int Alen, const char *Bstr, int Blen)
{
if (Alen != Blen) {
// Astr != Bstr
return cstr_bool_true;
}
if (Astr == Bstr) {
return cstr_bool_false;
}
if (Astr && Bstr) {
return strncmp(Astr, Bstr, Alen)? cstr_bool_true : cstr_bool_false;
}
// not eauql
return cstr_bool_true;
}
/**
* cstr_compare_len
* Safely compare two strings as strncmp(A, B) do
*
* returns:
* 1: A > B
* 0: A = B
* -1: A < B
*
* notes:
* 1) null string is less than any non-null or empty one.
* 2) shorter string is less than longer one.
* 3) two null strings is equal (0 returned).
*/
static int cstr_compare_len (const char *Astr, int Alen, const char *Bstr, int Blen, int caseignore)
{
if (Astr == Bstr) {
return 0;
}
if (! Astr) {
// A < B (B is non-null)
return (-1);
}
if (! Bstr) {
// A > B (B is null)
return 1;
}
if (! Alen && ! Blen) {
// A and B are all empty
return 0;
}
if (Alen < 0 && Blen < 0) {
// same as strcmp
if (caseignore) {
#ifdef _MSC_VER
return stricmp(Astr, Bstr);
#else
return strcasecmp(Astr, Bstr);
#endif
} else {
return strcmp(Astr, Bstr);
}
}
if (Alen < 0) {
// get length of Astr
Alen = cstr_length(Astr, -1);
}
if (Blen < 0) {
// get length of Astr
Blen = cstr_length(Bstr, -1);
}
if (Alen > Blen) {
return 1;
}
if (Alen < Blen) {
return -1;
}
// Alen == Blen
if (caseignore) {
#ifdef _MSC_VER
return strnicmp(Astr, Bstr, Alen);
#else
return strncasecmp(Astr, Bstr, Alen);
#endif
} else {
return strncmp(Astr, Bstr, Alen);
}
}
/**
* cstr_startwith("HelloWorld", 10, "Hello", 5) == cstr_bool_true
* cstr_startwith("HelloWorld", 10, "World", 5) == cstr_bool_false
* cstr_startwith("HelloWorld", 10, "hello", 5) == cstr_bool_false
*/
static int cstr_startwith (const char *str, int count, const char *start, int startlen)
{
if (str == start) {
return cstr_bool_true;
}
if (str && start && startlen <= count) {
if (! memcmp(str, start, startlen)) {
return cstr_bool_true;
}
}
return cstr_bool_false;
}
static int cstr_endwith (const char *str, int count, const char *end, int endlen)
{
if (str == end) {
return cstr_bool_true;
}
if (str && end && endlen <= count) {
// str="aaaaBBBB"
// end="aBBBB"
return ! cstr_notequal_len(&str[count - endlen], endlen, end, endlen);
}
return cstr_bool_false;
}
static int cstr_containwith (const char *str, int count, const char *sub, int sublen)
{
if (str == sub) {
return cstr_bool_true;
}
if (str && sub && sublen <= count) {
return strstr((char *)str, sub)? cstr_bool_true : cstr_bool_false;
}
return cstr_bool_false;
}
static int cstr_startwith_mul (const char *str, int count, const char *starts[], const int *startslen, int startsnum)
{
while (startsnum-- > 0) {
const char *s = starts[startsnum];
if (s) {
int len = (startslen? startslen[startsnum] : (int) strlen(s));
if (cstr_startwith(str, count, s, len)) {
return startsnum;
}
}
}
return (-1);
}
static int cstr_endwith_mul (const char *str, int count, const char *ends[], const int *endslen, int endsnum)
{
while (endsnum-- > 0) {
const char *s = ends[endsnum];
if (s) {
int len = (endslen? endslen[endsnum] : (int) strlen(s));
if (cstr_endwith(str, count, s, len)) {
return endsnum;
}
}
}
return (-1);
}
static int cstr_findstr_in (const char *str, int count, const char *dests[], int destsnum, int caseignore)
{
int i = 0;
for (; i < destsnum; i++) {
const char *dest = dests[i];
if (str == dest) {
// found
return i;
}
if (dest && str) {
int len = cstr_length(dest, count + 1);
if (len == count) {
if (caseignore) {
#ifdef _MSC_VER
if (! strnicmp(str, dest, count)) {
// found
return i;
}
#else
if (! strncasecmp(str, dest, count)) {
// found
return i;
}
#endif
} else {
if (! strncmp(str, dest, count)) {
// found
return i;
}
}
}
}
}
// not found
return (-1);
}
static int cstr_isdigit (const char *str, int len)
{
while(len-- > 0) {
if (! isdigit(str[len])) {
return cstr_bool_false;
}
}
return cstr_bool_true;
}
static int cstr_safecopy (char *dstbuf, size_t dstbufsize, size_t dstoffset, const char *source, size_t sourcelen)
{
if (dstoffset + sourcelen < dstbufsize) {
memcpy(dstbuf + dstoffset, source, sourcelen);
dstbuf[dstoffset + sourcelen] = '\0';
/* success full copied */
return 1;
}
if (dstoffset < dstbufsize) {
size_t cplen = dstbufsize - dstoffset;
memcpy(dstbuf + dstoffset, source, cplen);
if (dstbufsize > 3) {
dstbuf[dstbufsize - 4] = '.';
dstbuf[dstbufsize - 3] = '.';
dstbuf[dstbufsize - 2] = '.';
}
}
dstbuf[dstbufsize - 1] = '\0';
/* error with part copied */
return 0;
}
static int cstr_readline (FILE *fp, char line[], size_t maxlen, int ignore_whitespace)
{
int ch, len = 0;
if (ftell(fp) == 0) {
int bomhead[3] = {fgetc(fp), fgetc(fp), fgetc(fp)};
if (bomhead[0] == 0xEF && bomhead[1] == 0xBB && bomhead[2] == 0xBF) {
fseek(fp, 3, SEEK_SET);
} else {
fseek(fp, 0, SEEK_SET);
}
}
while ((ch = fgetc(fp)) != EOF) {
if ((size_t) len < maxlen) {
if (ch != '\r' && ch != '\n' && ch != '\\') {
if (! ignore_whitespace || ! isspace(ch)) {
line[len++] = ch;
}
}
}
if (ch == '\n') {
break;
}
}
if (ch == EOF && len == 0) {
// end of file
return -1;
}
line[len] = 0;
return len;
}
#define TM_YEAR_IS_LEAP(year) ((year) % 4? 0 : ((year) % 100? 1 : ((year) % 400? 0 : 1)))
/**
* time_is_valid()
*
* test time is valid.
*
* remark:
*
* The number of seconds(sec) after the minute, normally in the range 0 to 59,
* but can be up to 60 to allow for leap.
* Deciding when to introduce a leap second is the responsibility of the
* international earth rotation and reference systems service.
*
* returns:
* 0: error
* 1: ok
*/
static int time_is_valid (int year, int mon, int day, int hour, int min, int sec)
{
if (year < 1900 || year > 9999 || mon <= 0 || mon > 12 || day <= 0 || day > 31 || hour < 0 || hour > 24 || min < 0 || min > 59 || sec < 0 || sec > 60) {
return 0;
}
if (mon == 1 || mon == 3 || mon == 5 || mon == 7 || mon == 8 || mon == 10 || mon == 12) {
// 31 days ok
return 1;
} else if (mon == 4 || mon == 6 || mon == 9 || mon == 11) {
if (day < 31) {
return 1;
}
} else {
// mon=2
return (day > 29? 0 : (day < 29? 1: (TM_YEAR_IS_LEAP(year)? 1 : 0)));
}
return 0;
}
static ub8 cstr_parse_timestamp (char *timestr)
{
/**
* '2019-12-22 12:36:59.065'
* '2019-12-22 12:36:59'
* '2019-12-22'
*/
char Year[5] = {'0', '0', '0', '0', '\0'};
char Mon[3] = {'0', '0', '\0'};
char Day[3] = {'0', '0', '\0'};
char hour[3] = {'0', '0', '\0'};
char min[3] = {'0', '0', '\0'};
char sec[3] = {'0', '0', '\0'};
char msec[4] = {'0', '0', '0', '\0'};
char *str = cstr_LRtrim_whitespace(timestr);
char *a = strchr(str, 39);
char *b = strrchr(str, 39);
char *hms;
int len = 0;
if (a && b) {
*a++ = 0;
*b-- = 0;
str = a;
len = (int)(b - a) + 1;
} else if (a || b) {
// error char
return (-1);
} else {
len = cstr_length(str, 30);
}
if (len == 10) {
/* 2019-12-22 */
a = strchr(str, '-');
b = strrchr(str, '-');
if (a && b && a - str == 4 && b-a == 3) {
*a++ = 0;
*b++ = 0;
snprintf_chkd_V1(Year, sizeof(Year), "%.*s", 4, str);
snprintf_chkd_V1(Mon, sizeof(Mon), "%.*s", 2, a);
snprintf_chkd_V1(Day, sizeof(Day), "%.*s", 2, b);
} else {
// error date format
return (-1);
}
} else if (len == 19) {
/* 2019-12-22 12:36:59 */
a = strchr(str, 32);
if (a && a - str == 10) {
*a++ = 0;
hms = a;
a = strchr(str, '-');
b = strrchr(str, '-');
if (a && b && a - str == 4 && b-a == 3) {
*a++ = 0;
*b++ = 0;
snprintf_chkd_V1(Year, sizeof(Year), "%.*s", 4, str);
snprintf_chkd_V1(Mon, sizeof(Mon), "%.*s", 2, a);
snprintf_chkd_V1(Day, sizeof(Day), "%.*s", 2, b);
} else {
// error date format
return (-1);
}
a = strchr(hms, ':');
b = strrchr(hms, ':');
if (a && b && a - hms == 2 && b-a == 3) {
*a++ = 0;
*b++ = 0;
snprintf_chkd_V1(hour, sizeof(hour), "%.*s", 2, hms);
snprintf_chkd_V1(min, sizeof(min), "%.*s", 2, a);
snprintf_chkd_V1(sec, sizeof(sec), "%.*s", 2, b);
} else {
// error date format
return (-1);
}
} else {
// error datetime format
return (-1);
}
} else if (len == 23) {
/* 2019-12-22 12:36:59.065 */
a = strchr(str, 32);
if (a && a - str == 10) {
*a++ = 0;
hms = a;
a = strchr(str, '-');
b = strrchr(str, '-');
if (a && b && a - str == 4 && b-a == 3) {
*a++ = 0;
*b++ = 0;
snprintf_chkd_V1(Year, sizeof(Year), "%.*s", 4, str);
snprintf_chkd_V1(Mon, sizeof(Mon), "%.*s", 2, a);
snprintf_chkd_V1(Day, sizeof(Day), "%.*s", 2, b);
} else {
// error date format
return (-1);
}
a = strchr(hms, ':');
b = strrchr(hms, ':');
if (a && b && a - hms == 2 && b-a == 3) {
char *dot = strchr(b, '.');
if (!dot || dot - b != 3) {
// error stamp format
return (-1);
}
*a++ = 0;
*b++ = 0;
*dot++ = 0;
snprintf_chkd_V1(hour, sizeof(hour), "%.*s", 2, hms);
snprintf_chkd_V1(min, sizeof(min), "%.*s", 2, a);
snprintf_chkd_V1(sec, sizeof(sec), "%.*s", 2, b);
if (*dot) {
msec[0] = *dot++;
if (*dot) {
msec[1] = *dot++;
if (*dot) {
msec[2] = *dot++;
}
}
}
} else {
// error date format
return (-1);
}
} else {
// error datetime format
return (-1);
}
} else {
// error format
return (-1);
}
if (cstr_isdigit(Year, 4) &&
cstr_isdigit(Mon, 2) &&
cstr_isdigit(Day, 2) &&
cstr_isdigit(hour, 2) &&
cstr_isdigit(min, 2) &&
cstr_isdigit(sec, 2) &&
cstr_isdigit(msec, 3)) {
time_t tsec;
struct tm t = {0};
t.tm_year = atoi(Year);
t.tm_mon = atoi(Mon);
t.tm_mday = atoi(Day);
t.tm_hour = atoi(hour);
t.tm_min = atoi(min);
t.tm_sec = atoi(sec);
if (! time_is_valid(t.tm_year, t.tm_mon, t.tm_mday, t.tm_hour, t.tm_min, t.tm_sec)) {
// invalid time
return (-1);
}
t.tm_year -= 1900;
t.tm_mon -= 1;
// since 1970-01-01 UTChh:00:00 (china: hh=8)
tsec = mktime(&t);
if (tsec == (time_t)(-1)) {
fprintf(stderr, "%s\n", strerror(errno));
return (-1);
}
return (ub8)(tsec * 1000 + atoi(msec));
}
// error no digit
return (-1);
}
static const char * cstr_timestamp_to_datetime (char *stampms, int mslen, char timestr[24])
{
ub8 stamp = 0;
time_t tsec;
int msec;
struct tm t;
if (mslen == -1) {
mslen = cstr_length(stampms, 20);
}
if (! cstr_to_ub8(10, stampms, mslen, &stamp)) {
// error stamp
return NULL;
}
tsec = (time_t)(stamp / 1000);
msec = (int)(stamp % 1000);
#ifdef _WIN32
if (!localtime_s(&t, (const time_t*) &tsec)) {
// error localtime
return NULL;
}
#else
if (!localtime_r((const time_t*)&tsec, &t)) {
// error localtime
return NULL;
}
#endif
t.tm_year += 1900;
t.tm_mon += 1;
if (! time_is_valid(t.tm_year, t.tm_mon, t.tm_mday, t.tm_hour, t.tm_min, t.tm_sec)) {
// invalid time
return NULL;
}
if (msec) {
/* 2012-12-22 17:45:59.875 */
snprintf_chkd_V1(timestr, 24, "%04d-%02d-%02d %02d:%02d:%02d.%03d", t.tm_year, t.tm_mon, t.tm_mday, t.tm_hour, t.tm_min, t.tm_sec, msec);
} else {
snprintf_chkd_V1(timestr, 20, "%04d-%02d-%02d %02d:%02d:%02d", t.tm_year, t.tm_mon, t.tm_mday, t.tm_hour, t.tm_min, t.tm_sec);
}
return timestr;
}
/**********************************************************************
* cstrbuf
* C String Buffer Api
*********************************************************************/
#define cstrbuf_error_size_len ((ub4)(-1))
#define cstrbuf_len_max 134217727 /* max size is 128 MB */
#define cstrbuf_size_max (cstrbuf_len_max + 1)
#define cstrbuf_alignsize(len) ((ub4) (((len + cstr_allocate_blocksize) / cstr_allocate_blocksize) * cstr_allocate_blocksize))
#define cstrbufGetLen(s) ((s)? (int)(s)->len : 0)
#define cstrbufGetMaxsz(s) ((s)? (int)(s)->maxsz : 0)
#define cstrbufGetStr(s) ((s)? (s)->str : 0)
#define cstrbufPrint(s) printf("%.*s", cstrbufGetLen(s), cstrbufGetStr(s))
typedef struct _cstrbuf_t
{
union {
ub8 __maxszlen;
struct {
ub4 maxsz;
ub4 len;
};
};
char str[0];
} cstrbuf_t, *cstrbuf;
static cstrbuf cstrbufNew (ub4 maxsz, const char *str, ub4 len)
{
cstrbuf_t *csb;
if (len == cstrbuf_error_size_len) {
len = (ub4) cstr_length(str, -1);
}
if (maxsz == cstrbuf_error_size_len || maxsz <= len) {
maxsz = cstrbuf_alignsize(len + 1);
} else {
maxsz = cstrbuf_alignsize(maxsz);
}
csb = malloc(sizeof(*csb) + maxsz);
if (! csb) {
printf("(%s:%d) Cannot allocate memory.\n", __FILE__, __LINE__);
exit(EXIT_FAILURE);
}
csb->__maxszlen = 0;
*csb->str = 0;
if (str) {
memcpy(csb->str, str, len);
memset(csb->str + len, 0, maxsz - len);
csb->len = len;
} else {
memset(csb->str, 0, maxsz);
csb->len = 0;
}
csb->maxsz = maxsz;
return csb;
}
static void cstrbufFree (cstrbuf *csb)
{
if (csb) {
cstrbuf s = *csb;
if (s) {
*csb = 0;
free(s);
}
}
}
/* NOTE: dstbuf must have enough space for copy to */
static ub4 cstrbufCopyTo (const cstrbuf src, void *dstbuf, ub4 offcb)
{
if (src && src->len) {
memcpy((char*)dstbuf + offcb, src->str, src->len);
return (ub4)(offcb + src->len);
}
return (ub4)offcb;
}
static cstrbuf cstrbufCat (cstrbuf dst, const char *fmt, ...)
{
int vlen, len;
if (! dst) {
do {
va_list args;
va_start(args, fmt);
vlen = vsnprintf(0, 0, fmt, args);
va_end(args);
} while(0);
if (vlen < 0) {
printf("(%s:%d) vsnprintf error: %s\n", __FILE__, __LINE__, strerror(errno));
return 0;
}
dst = cstrbufNew(vlen + 1, 0, 0);
do {
va_list args;
va_start(args, fmt);
len = vsnprintf(dst->str, dst->maxsz, fmt, args);
va_end(args);
} while(0);
if (len == vlen) {
dst->len = len;
dst->str[dst->len] = 0;
return dst;
}
printf("(%s:%d) vsnprintf error: %s\n", __FILE__, __LINE__, strerror(errno));
cstrbufFree(&dst);
return 0;
} else {
do {
va_list args;
va_start(args, fmt);
vlen = vsnprintf(&dst->str[dst->len], dst->maxsz - dst->len, fmt, args);
va_end(args);
} while(0);
if (vlen < 0) {
printf("(%s:%d) vsnprintf error: %s\n", __FILE__, __LINE__, strerror(errno));
dst->str[dst->len] = 0;
return dst;
}
if (dst->len + vlen < dst->maxsz) {
dst->len += vlen;
dst->str[dst->len] = 0;
return dst;
} else {
ub4 maxsz = cstrbuf_alignsize(dst->len + vlen + 1);
cstrbuf_t *newdst = realloc(dst, sizeof(*newdst) + maxsz);
if (! newdst) {
printf("(%s:%d) Cannot allocate memory.\n", __FILE__, __LINE__);
exit(EXIT_FAILURE);
}
newdst->maxsz = maxsz;
do {
va_list args;
va_start(args, fmt);
len = vsnprintf(&newdst->str[newdst->len], newdst->maxsz - newdst->len, fmt, args);
va_end(args);
} while(0);
if (len == vlen) {
newdst->len += len;
}
newdst->str[newdst->len] = 0;
return newdst;
}
}
}
static cstrbuf cstrbufTrunc (cstrbuf s, ub4 len)
{
if (s->len > len) {
s->len = len;
s->str[s->len] = 0;
}
return s;
}
static cstrbuf cstrbufConcat (const cstrbuf start, ...)
{
va_list argp;
cstrbuf src = start;
va_start(argp, start);
cstrbuf result = 0;
do {
result = cstrbufCat(result, "%.*s", cstrbufGetLen(src), cstrbufGetStr(src));
src = va_arg(argp, cstrbuf);
} while(src);
va_end(argp);
return result;
}
static cstrbuf cstrbufDup (cstrbuf dst, const char *source, ub4 sourcelen)
{
if (! dst) {
return cstrbufNew(0, source, sourcelen);
}
if (sourcelen == cstrbuf_error_size_len) {
sourcelen = (ub4) cstr_length(source, -1);
}
if (! sourcelen) {
dst->len = 0;
dst->str[dst->len] = 0;
return dst;
}
if (dst->maxsz > sourcelen) {
memcpy(dst->str, source, sourcelen);
dst->len = sourcelen;
dst->str[dst->len] = 0;
return dst;
}
free(dst);
return cstrbufNew(0, source, sourcelen);
}
static cstrbuf cstrbufSub (const char *source, int startindex, int length, const char *substr, int sublen)
{
int sourlen = cstr_length(source, -1);
cstrbuf endstr = cstrbufNew(0, source + startindex + length, sourlen - startindex - length);
cstrbuf result = cstrbufNew(sourlen - length + sublen + 1, source, startindex);
memcpy(result->str + startindex, substr, sublen);
memcpy(result->str + startindex + sublen, endstr->str, endstr->len);
cstrbufFree(&endstr);
result->len = sourlen - length + sublen;
result->str[result->len] = 0;
return result;
}
#ifdef __cplusplus
}
#endif
#endif /* _CSTRBUF_H_ */
|
#!/usr/bin/env bash
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -x
set -e
set -u
WORK="$(pwd)"
# Old bash versions can't expand empty arrays, so we always include at least this option.
CMAKE_OPTIONS=("-DCMAKE_OSX_ARCHITECTURES=x86_64")
help | head
uname
case "$(uname)" in
"Linux")
NINJA_OS="linux"
BUILD_PLATFORM="Linux_x64"
PYTHON="python3"
;;
"Darwin")
NINJA_OS="mac"
BUILD_PLATFORM="Mac_x64"
PYTHON="python3"
brew install md5sha1sum
;;
"MINGW"*|"MSYS_NT"*)
NINJA_OS="win"
BUILD_PLATFORM="Windows_x64"
PYTHON="python"
CMAKE_OPTIONS+=("-DCMAKE_C_COMPILER=cl.exe" "-DCMAKE_CXX_COMPILER=cl.exe")
choco install zip
;;
*)
echo "Unknown OS"
exit 1
;;
esac
###### START EDIT ######
TARGET_REPO_ORG="KhronosGroup"
TARGET_REPO_NAME="SPIRV-Tools"
BUILD_REPO_ORG="google"
BUILD_REPO_NAME="gfbuild-SPIRV-Tools"
###### END EDIT ######
COMMIT_ID="$(cat "${WORK}/COMMIT_ID")"
ARTIFACT="${BUILD_REPO_NAME}"
ARTIFACT_VERSION="${COMMIT_ID}"
GROUP_DOTS="github.${BUILD_REPO_ORG}"
GROUP_SLASHES="github/${BUILD_REPO_ORG}"
TAG="${GROUP_SLASHES}/${ARTIFACT}/${ARTIFACT_VERSION}"
BUILD_REPO_SHA="${GITHUB_SHA}"
CLASSIFIER="${BUILD_PLATFORM}_${CONFIG}"
POM_FILE="${BUILD_REPO_NAME}-${ARTIFACT_VERSION}.pom"
INSTALL_DIR="${ARTIFACT}-${ARTIFACT_VERSION}-${CLASSIFIER}"
export PATH="${HOME}/bin:$PATH"
mkdir -p "${HOME}/bin"
pushd "${HOME}/bin"
# Install github-release-retry.
"${PYTHON}" -m pip install --user 'github-release-retry==1.*'
# Install ninja.
curl -fsSL -o ninja-build.zip "https://github.com/ninja-build/ninja/releases/download/v1.9.0/ninja-${NINJA_OS}.zip"
unzip ninja-build.zip
ls
popd
###### START EDIT ######
CMAKE_GENERATOR="Ninja"
CMAKE_BUILD_TYPE="${CONFIG}"
CMAKE_OPTIONS+=("-DSPIRV_BUILD_FUZZER=ON")
git clone https://github.com/${TARGET_REPO_ORG}/${TARGET_REPO_NAME}.git "${TARGET_REPO_NAME}"
cd "${TARGET_REPO_NAME}"
git checkout "${COMMIT_ID}"
# Get headers version from the DEPS file.
HEADERS_VERSION="$(${PYTHON} "${WORK}/get_headers_version.py" <DEPS)"
git clone https://github.com/KhronosGroup/SPIRV-Headers.git external/spirv-headers
pushd external/spirv-headers
git checkout "${HEADERS_VERSION}"
popd
git clone --depth=1 --branch v3.13.0 https://github.com/protocolbuffers/protobuf external/protobuf
###### END EDIT ######
###### BEGIN BUILD ######
BUILD_DIR="b_${CONFIG}"
mkdir -p "${BUILD_DIR}"
pushd "${BUILD_DIR}"
cmake -G "${CMAKE_GENERATOR}" .. "-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}" "${CMAKE_OPTIONS[@]}"
cmake --build . --config "${CMAKE_BUILD_TYPE}"
cmake "-DCMAKE_INSTALL_PREFIX=../${INSTALL_DIR}" "-DBUILD_TYPE=${CMAKE_BUILD_TYPE}" -P cmake_install.cmake
popd
###### END BUILD ######
###### START EDIT ######
# We just want the bin directory.
mv "${INSTALL_DIR}" "${INSTALL_DIR}_old"
mkdir -p "${INSTALL_DIR}"
cp -r "${INSTALL_DIR}_old/bin" "${INSTALL_DIR}"
case "$(uname)" in
"Linux")
;;
"Darwin")
;;
"MINGW"*|"MSYS_NT"*)
# For some reason, there is a .dll in bin/ on Windows.
rm -rf "${INSTALL_DIR:?}/bin/"*.dll
"${PYTHON}" "${WORK}/add_pdbs.py" "${BUILD_DIR}" "${INSTALL_DIR}"
;;
*)
echo "Unknown OS"
exit 1
;;
esac
for f in "${INSTALL_DIR}/bin/"*; do
echo "${BUILD_REPO_SHA}">"${f}.build-version"
cp "${WORK}/COMMIT_ID" "${f}.version"
done
###### END EDIT ######
GRAPHICSFUZZ_COMMIT_SHA="b82cf495af1dea454218a332b88d2d309657594d"
OPEN_SOURCE_LICENSES_URL="https://github.com/google/gfbuild-graphicsfuzz/releases/download/github/google/gfbuild-graphicsfuzz/${GRAPHICSFUZZ_COMMIT_SHA}/OPEN_SOURCE_LICENSES.TXT"
# Add licenses file.
curl -fsSL -o OPEN_SOURCE_LICENSES.TXT "${OPEN_SOURCE_LICENSES_URL}"
cp OPEN_SOURCE_LICENSES.TXT "${INSTALL_DIR}/"
# zip file.
pushd "${INSTALL_DIR}"
zip -r "../${INSTALL_DIR}.zip" ./*
popd
sha1sum "${INSTALL_DIR}.zip" >"${INSTALL_DIR}.zip.sha1"
# POM file.
sed -e "s/@GROUP@/${GROUP_DOTS}/g" -e "s/@ARTIFACT@/${ARTIFACT}/g" -e "s/@VERSION@/${ARTIFACT_VERSION}/g" "../fake_pom.xml" >"${POM_FILE}"
sha1sum "${POM_FILE}" >"${POM_FILE}.sha1"
DESCRIPTION="$(echo -e "Automated build for ${TARGET_REPO_NAME} version ${COMMIT_ID}.\n$(git log --graph -n 3 --abbrev-commit --pretty='format:%h - %s <%an>')")"
# Only release from master branch commits.
# shellcheck disable=SC2153
if test "${GITHUB_REF}" != "refs/heads/master"; then
exit 0
fi
# We do not use the GITHUB_TOKEN provided by GitHub Actions.
# We cannot set enviroment variables or secrets that start with GITHUB_ in .yml files,
# but the github-release-retry tool requires GITHUB_TOKEN, so we set it here.
export GITHUB_TOKEN="${GH_TOKEN}"
"${PYTHON}" -m github_release_retry.github_release_retry \
--user "${BUILD_REPO_ORG}" \
--repo "${BUILD_REPO_NAME}" \
--tag_name "${TAG}" \
--target_commitish "${BUILD_REPO_SHA}" \
--body_string "${DESCRIPTION}" \
"${INSTALL_DIR}.zip"
"${PYTHON}" -m github_release_retry.github_release_retry \
--user "${BUILD_REPO_ORG}" \
--repo "${BUILD_REPO_NAME}" \
--tag_name "${TAG}" \
--target_commitish "${BUILD_REPO_SHA}" \
--body_string "${DESCRIPTION}" \
"${INSTALL_DIR}.zip.sha1"
# Don't fail if pom cannot be uploaded, as it might already be there.
"${PYTHON}" -m github_release_retry.github_release_retry \
--user "${BUILD_REPO_ORG}" \
--repo "${BUILD_REPO_NAME}" \
--tag_name "${TAG}" \
--target_commitish "${BUILD_REPO_SHA}" \
--body_string "${DESCRIPTION}" \
"${POM_FILE}" || true
"${PYTHON}" -m github_release_retry.github_release_retry \
--user "${BUILD_REPO_ORG}" \
--repo "${BUILD_REPO_NAME}" \
--tag_name "${TAG}" \
--target_commitish "${BUILD_REPO_SHA}" \
--body_string "${DESCRIPTION}" \
"${POM_FILE}.sha1" || true
# Don't fail if OPEN_SOURCE_LICENSES.TXT cannot be uploaded, as it might already be there.
"${PYTHON}" -m github_release_retry.github_release_retry \
--user "${BUILD_REPO_ORG}" \
--repo "${BUILD_REPO_NAME}" \
--tag_name "${TAG}" \
--target_commitish "${BUILD_REPO_SHA}" \
--body_string "${DESCRIPTION}" \
"OPEN_SOURCE_LICENSES.TXT" || true
|
#!/bin/sh
. ../.function
#DOC_ROOT=ruby/blog
DOC_ROOT=benchmark/docroot
rm -f tmp/usp_compile.sh.err /tmp/*.hpack.* \
$DOC_ROOT/web_server.log* \
out/userver_*.out err/userver_*.err \
trace.*userver_*.[0-9]* object.*userver_*.[0-9]* stack.*userver_*.[0-9]* mempool.*userver_*.[0-9]* \
$DOC_ROOT/trace.*userver_*.[0-9]* $DOC_ROOT/object.*userver_*.[0-9]* $DOC_ROOT/stack.*userver_*.[0-9]* $DOC_ROOT/mempool.*userver_*.[0-9]*
UTRACE="0 20M -1"
UTRACE_SIGNAL="0 20M -1"
#UOBJDUMP="0 10M 100"
#USIMERR="error.sim"
export UTRACE UOBJDUMP USIMERR UTRACE_SIGNAL
SOCK1=tmp/fcgi.socket
start_test() {
CMD=test_fcgi
PIDS=`ps x | grep $CMD | grep -v grep | awk '{ print $1 }'`
if [ -z "$PIDS" ]; then
# rm -f $SOCK1
../../src/ulib/net/server/plugin/fcgi/$CMD $SOCK1 2>/tmp/$CMD.err &
chmod 777 $SOCK1
fi
}
#start_test
#/usr/bin/spawn-fcgi -p 8080 -f /usr/bin/php-cgi -C 5 -P /var/run/spawn-fcgi.pid
# =================================================================
# HTTP2
# =================================================================
# ./h2a -c server.crt -k server.key -p 8000 -H 127.0.0.1 -P 443
#
# Once h2a starts, you can access http://localhost:8000 from the
# HTTP client such as Firefox and you will be able to check the
# HTTP/2 traffic
#
# ./web_server.sh
#
# /opt/go/bin/h2a -p 80 -H 127.0.0.1 -P 8080 -d -D >& h2a.out &
# /opt/go/bin/h2spec -p 80 >& h2spec.out
# =================================================================
cat <<EOF >inp/webserver.cfg
userver {
PORT 8080
RUN_AS_USER nobody
#MIN_SIZE_FOR_SENDFILE 2k
LOG_FILE web_server.log
LOG_FILE_SZ 10M
#LOG_FILE_SZ 20k
LOG_MSG_SIZE -1
PID_FILE /var/run/userver_tcp.pid
#PREFORK_CHILD 0
#REQ_TIMEOUT 300
#PLUGIN "ssi http"
#ORM_DRIVER "sqlite mysql"
DOCUMENT_ROOT benchmark/docroot
PLUGIN_DIR ../../../../src/ulib/net/server/plugin/.libs
ORM_DRIVER_DIR ../../../../src/ulib/orm/driver/.libs
#DOCUMENT_ROOT .
#PLUGIN_DIR ../../src/ulib/net/server/plugin/.libs
#ORM_DRIVER_DIR ../../src/ulib/orm/driver/.libs
#DOCUMENT_ROOT php
#PLUGIN_DIR ../../../src/ulib/net/server/plugin/.libs
#ORM_DRIVER_DIR ../../../src/ulib/orm/driver/.libs
#DOCUMENT_ROOT ruby/blog/public
#PLUGIN_DIR ../../../../../src/ulib/net/server/plugin/.libs
#ORM_DRIVER_DIR ../../../../../src/ulib/orm/driver/.libs
}
http {
ALIAS "[ / /100.html ]"
#VIRTUAL_HOST yes
ENABLE_INOTIFY yes
LIMIT_REQUEST_BODY 3M
REQUEST_READ_TIMEOUT 30
#DIGEST_AUTHENTICATION yes
#CACHE_FILE_STORE nocat/webif.gz
#CACHE_FILE_MASK inp/http/data/file1|*.flv|*.svgz
#URI_REQUEST_STRICT_TRANSPORT_SECURITY_MASK *
}
EOF
export ORM_DRIVER="sqlite"
export ELASTICSEARCH_HOST="localhost"
export UMEMPOOL="136,0,60,100,250,-22,-17,-23,60"
export ORM_OPTION="host=localhost dbname=../db/hello_world"
DIR_CMD="../../examples/userver"
compile_usp
#STRACE=$TRUSS
start_prg_background userver_tcp -c inp/webserver.cfg
# RA/RA.cfg
# deployment.properties
wait_server_ready localhost 8080
echo "PID = `cat /var/run/userver_tcp.pid`"
# HTTP pseudo-streaming for FLV video
#curl -I -s -D - 'http://localhost:8080/test.flv' -o /dev/null
#curl -I -s -D - 'http://localhost:8080/test.flv' -o /tmp/test.flv
#curl -s -v -r0-499 'http://localhost:8080/test.flv' -o /tmp/test.flv
#curl -s -D 'http://localhost:8080/test.flv?start=669000' -o /tmp/test.flv
#sleep 6
#kill_server userver_tcp
mv err/userver_tcp.err err/web_server.err
#check_for_netcat
#send_req localhost 8080 inp/http/get_geoip.req web_server 3
#openssl s_client -debug -cert ../ulib/CA/username.crt -key ../ulib/CA/username.key -pass pass:caciucco -CApath ../ulib/CA/CApath -verify 0 -connect localhost:8080
|
class EventSystem:
def __init__(self):
self.events = {}
def register_event(self, event_name, handler):
if event_name in self.events:
self.events[event_name].append(handler)
else:
self.events[event_name] = [handler]
def trigger_event(self, event_name):
if event_name in self.events:
for handler in self.events[event_name]:
handler()
# Example usage
def handler1():
print("Handler 1 executed")
def handler2():
print("Handler 2 executed")
event_system = EventSystem()
event_system.register_event("click", handler1)
event_system.register_event("click", handler2)
event_system.trigger_event("click")
# Output:
# Handler 1 executed
# Handler 2 executed |
import senseTk
def perform_image_segmentation(input_image_path):
# Load the input image
input_image = senseTk.loadImage(input_image_path)
# Perform image segmentation using senseTk library
segmented_image = senseTk.segmentImage(input_image)
return segmented_image |
#!/usr/bin/env bash
#
# Because the ocaml/opam2 docker image uses a stale
# local copy of opam repo, we just delete everything
# and start from scratch :)
#
rm -rf ~/.opam
opam init --compiler="${OCAML_VERSION}"
opam update
eval `opam env`
opam config report
if [ -z "${MIN_REQS_ONLY}" ]; then
opam install --yes --deps-only --with-test ./ExcelSynth.opam
else
opam install --yes alcotest.0.8.0 \
core.v0.13.0 \
csv.2.3 \
dune.1.11.0 \
lwt.4.2.0
fi
opam list
pwd ; ls -lah
dune clean
dune build --verbose
dune build bin/App.exe --verbose
dune runtest --verbose |
# fasd documentation: https://github.com/clvv/fasd
# check if fasd is installed
if (( ! ${+commands[fasd]} )); then
return
fi
fasd_cache="${ZSH_CACHE_DIR}/fasd-init-cache"
if [[ "$commands[fasd]" -nt "$fasd_cache" || ! -s "$fasd_cache" ]]; then
fasd --init posix-alias zsh-hook zsh-ccomp zsh-ccomp-install \
zsh-wcomp zsh-wcomp-install >| "$fasd_cache"
fi
source "$fasd_cache"
unset fasd_cache
# Use both the local file at ~/.fasd and spotlight as backends
export _FASD_BACKENDS="spotlight native"
# Default fasd aliases
alias a='fasd -a' # any
alias s='fasd -si' # show / search / select
alias d='fasd -d' # directory
alias f='fasd -f' # file
alias sd='fasd -sid' # interactive directory selection
alias sf='fasd -sif' # interactive file selection
alias z='fasd_cd -d' # cd, same functionality as j in autojump
alias zz='fasd_cd -d -i' # cd with interactive selection
# Advanced aliases from fasd zsh plugin
alias v='f -e "$EDITOR"' # open a file in vim
alias o='a -e open' # open any file in the Mac-default program
alias j='zz' # jump, with interactive selection
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sshd.common.channel;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.charset.Charset;
import java.util.EnumMap;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
/**
* Support for stty command on unix
*
* @author <a href="mailto:<EMAIL>">Apache MINA SSHD Project</a>
*/
public final class SttySupport {
public static final int DEFAULT_TERMINAL_WIDTH = 80;
public static final int DEFAULT_TERMINAL_HEIGHT = 24;
public static final String SSHD_STTY_COMMAND_PROP = "sshd.sttyCommand";
public static final String DEFAULT_SSHD_STTY_COMMAND = "stty";
private static final AtomicReference<String> STTY_COMMAND_HOLDER
= new AtomicReference<>(System.getProperty(SSHD_STTY_COMMAND_PROP, DEFAULT_SSHD_STTY_COMMAND));
private static final AtomicReference<String> TTY_PROPS_HOLDER = new AtomicReference<>(null);
private static final AtomicLong TTY_PROPS_LAST_FETCHED_HOLDER = new AtomicLong(0L);
private SttySupport() {
throw new UnsupportedOperationException("No instance allowed");
}
public static Map<PtyMode, Integer> getUnixPtyModes() throws IOException, InterruptedException {
return parsePtyModes(getTtyProps());
}
public static Map<PtyMode, Integer> parsePtyModes(String stty) {
Map<PtyMode, Integer> modes = new EnumMap<>(PtyMode.class);
for (PtyMode mode : PtyMode.MODES) {
if ((mode == PtyMode.TTY_OP_ISPEED) || (mode == PtyMode.TTY_OP_OSPEED)) {
// TODO ...
continue;
}
String str = mode.name().toLowerCase();
// Are we looking for a character?
if (str.charAt(0) == 'v') {
str = str.substring(1);
int v = findChar(stty, str);
if ((v < 0) && "reprint".equals(str)) {
v = findChar(stty, "rprnt");
}
if (v >= 0) {
modes.put(mode, v);
}
} else {
int v = findFlag(stty, str);
if (v >= 0) {
modes.put(mode, v);
}
}
}
return modes;
}
private static int findFlag(String stty, String name) {
int cur = 0;
while (cur < stty.length()) {
int idx1 = stty.indexOf(name, cur);
int idx2 = idx1 + name.length();
if (idx1 < 0) {
return -1;
}
if ((idx1 > 0) && Character.isLetterOrDigit(stty.charAt(idx1 - 1))
|| ((idx2 < stty.length()) && Character.isLetterOrDigit(stty.charAt(idx2)))) {
cur = idx2;
continue;
}
return idx1 == 0 ? 1 : stty.charAt(idx1 - 1) == '-' ? 0 : 1;
}
return -1;
}
private static int findChar(String stty, String name) {
int cur = 0;
while (cur < stty.length()) {
int idx1 = stty.indexOf(name, cur);
int idx2 = stty.indexOf('=', idx1);
int idx3 = stty.indexOf(';', idx1);
if (idx1 < 0 || idx2 < 0 || idx3 < idx2) {
// Invalid syntax
return -1;
}
if (idx1 > 0 && Character.isLetterOrDigit(stty.charAt(idx1 - 1))
|| (idx2 < stty.length() && Character.isLetterOrDigit(stty.charAt(idx2)))) {
cur = idx1 + name.length();
continue;
}
String val = stty.substring(idx2 + 1, idx3 < 0 ? stty.length() : idx3).trim();
if (val.contains("undef")) {
return -1;
}
if (val.length() == 2 && val.charAt(0) == '^') {
int v = (val.charAt(1) - 'A' + 129) % 128;
return v;
} else {
try {
return Integer.parseInt(val);
} catch (NumberFormatException e) {
// what else ?
}
}
return -1;
}
return -1;
}
/**
* <P>
* Returns the value of "stty size" width param.
* </P>
*
* <P>
* <strong>Note</strong>: this method caches the value from the first time it is called in order to increase speed,
* which means that changing to size of the terminal will not be reflected in the console.
* </P>
*
* @return The terminal width
*/
public static int getTerminalWidth() {
try {
int val = getTerminalProperty("columns");
if (val == -1) {
val = DEFAULT_TERMINAL_WIDTH;
}
return val;
} catch (Exception e) {
return DEFAULT_TERMINAL_WIDTH; // debug breakpoint
}
}
/**
* <P>
* Returns the value of "stty size" height param.
* </P>
*
* <P>
* <strong>Note</strong>: this method caches the value from the first time it is called in order to increase speed,
* which means that changing to size of the terminal will not be reflected in the console.
* </P>
*
* @return The terminal height
*/
public static int getTerminalHeight() {
try {
int val = getTerminalProperty("rows");
if (val == -1) {
val = DEFAULT_TERMINAL_HEIGHT;
}
return val;
} catch (Exception e) {
return DEFAULT_TERMINAL_HEIGHT; // debug breakpoint
}
}
public static int getTerminalProperty(String prop) throws IOException, InterruptedException {
// need to be able handle both output formats:
// speed 9600 baud; 24 rows; 140 columns;
// and:
// speed 38400 baud; rows = 49; columns = 111; ypixels = 0; xpixels = 0;
for (StringTokenizer tok = new StringTokenizer(getTtyProps(), ";\n"); tok.hasMoreTokens();) {
String str = tok.nextToken().trim();
if (str.startsWith(prop)) {
int index = str.lastIndexOf(' ');
return Integer.parseInt(str.substring(index).trim());
} else if (str.endsWith(prop)) {
int index = str.indexOf(' ');
return Integer.parseInt(str.substring(0, index).trim());
}
}
return -1;
}
public static String getTtyProps() throws IOException, InterruptedException {
// tty properties are cached so we don't have to worry too much about getting term width/height
long now = System.currentTimeMillis();
long lastFetched = TTY_PROPS_LAST_FETCHED_HOLDER.get();
if ((TTY_PROPS_HOLDER.get() == null) || ((now - lastFetched) > 1000L)) {
TTY_PROPS_HOLDER.set(stty("-a"));
TTY_PROPS_LAST_FETCHED_HOLDER.set(System.currentTimeMillis());
}
return TTY_PROPS_HOLDER.get();
}
/**
* Execute the stty command with the specified arguments against the current active terminal.
*
* @param args The command arguments
* @return The execution result
* @throws IOException If failed to execute the command
* @throws InterruptedException If interrupted while awaiting command execution
* @see #exec(String)
*/
public static String stty(String args) throws IOException, InterruptedException {
return exec("stty " + args + " < /dev/tty").trim();
}
/**
* Execute the specified command and return the output (both stdout and stderr).
*
* @param cmd The command to execute
* @return The execution result
* @throws IOException If failed to execute the command
* @throws InterruptedException If interrupted while awaiting command execution
* @see #exec(String[])
*/
public static String exec(final String cmd)
throws IOException, InterruptedException {
return exec("sh", "-c", cmd);
}
/**
* Execute the specified command and return the output (both stdout and stderr).
*
* @param cmd The command components
* @return The execution result
* @throws IOException If failed to execute the command
* @throws InterruptedException If interrupted while awaiting command execution
*/
private static String exec(String... cmd)
throws IOException, InterruptedException {
try (ByteArrayOutputStream bout = new ByteArrayOutputStream()) {
Process p = Runtime.getRuntime().exec(cmd);
copyStream(p.getInputStream(), bout);
copyStream(p.getErrorStream(), bout);
p.waitFor();
String result = new String(bout.toByteArray(), Charset.defaultCharset());
return result;
}
}
private static int copyStream(InputStream in, OutputStream bout) throws IOException {
int count = 0;
while (true) {
int c = in.read();
if (c == -1) {
return count;
}
bout.write(c);
count++;
}
}
/**
* @return The command to use to set the terminal options.
* @see #setSttyCommand(String)
*/
public static String getSttyCommand() {
return STTY_COMMAND_HOLDER.get();
}
/**
* @param cmd The command to use to set the terminal options. Defaults to {@link #DEFAULT_SSHD_STTY_COMMAND}, or the
* value of the {@link #SSHD_STTY_COMMAND_PROP} system property if not set via this method
*/
public static void setSttyCommand(String cmd) {
STTY_COMMAND_HOLDER.set(cmd);
}
}
|
function PageListView() {
BaseTemplatedWidget.call(this);
this.filterCache = {};
this.showFilterBar = false;
var findPageThumbnailView = function (event) {
var node = Dom.findUpward(event.target, function (n) {
return n.__widget && (n.__widget instanceof PageThumbnailView);
});
return node;
}
this.bind("click", function (event) {
var node = findPageThumbnailView(event);
if (!node) return;
this.handleSelectPage(node.__widget.page);
}, this.pageListContainer);
this.bind("dblclick", function (event) {
var node = findPageThumbnailView(event);
if (!node) return;
this.handleDoubleClick(node.__widget.page);
}, this.pageListContainer);
// this.bind("mouseover", function (event) {
// var page = Dom.findUpwardForData(event.target, "_page");
// if(!page || page.children.length == 0) return;
// // open child pages
// var activePage = function (page) {
// thiz.activatePage(page);
// }
// var childrenList = new ChildPageListMenu(page, activePage);
// childrenList.showMenuAt(event.clientX,event.clientY);
//
// },this.childPageContainer)
this.bind("click", function (event) {
var page = Dom.findUpwardForData(event.target, "_page");
if (!page) return;
var node = Dom.findParentWithClass(event.target, "button_Down");
if (node && node.nodeName != "#document") {
var childrenListMenu = new ChildPageListMenu(page, function (selectedPage) {
thiz.activatePage(selectedPage);
});
childrenListMenu.showMenu(node, "left-inside", "top", 0, 0, true);
} else {
this.handleSelectPage(page);
}
}, this.childPageContainer);
this.bind("dblclick", function (event) {
var page = Dom.findUpwardForData(event.target, "_page");
if (!page) return;
this.handleDoubleClick(page);
}, this.childPageContainer);
this.bind("click", function (event) {
var node = Dom.findUpward(event.target, function (n) {
return typeof(n._page) != "undefined";
});
if (!node) return;
var page = node._page;
if ((page == null && this.currentParentPage == null) || (page && this.currentParentPage && page.id == this.currentParentPage.id)) return;
var newActivePage = null;
if (this.currentParentPage
&& (!page && !this.currentParentPage.parentPage
|| (page && this.currentParentPage.parentPage && page.id == this.currentParentPage.parentPage.id))) {
newActivePage = this.currentParentPage;
} else {
if (page) {
newActivePage = page.children[0];
} else {
for (var i in this.controller.doc.pages) {
if (!this.controller.doc.pages[i].parentPage) {
newActivePage = this.controller.doc.pages[i];
break;
}
}
}
}
this.activatePage(newActivePage);
}, this.pageBreadcrumb);
var thiz = this;
this.bind("contextmenu", function (event) {
var childOfListPage = Dom.isChildOf(this.pageListContainer, event.target);
var childOfChildPage = Dom.isChildOf(this.childPageContainer, event.target);
var page = null;
var pageNode = null;
if (childOfChildPage) {
pageNode = Dom.findUpwardForNodeWithData(event.target, "_page");
pageNode.focus();
page = pageNode["_page"];
} else if (childOfListPage) {
var pageNode = Dom.findUpwardForNodeWithData(event.target, "__widget");
var view = pageNode["__widget"];
if (!view) return;
pageNode.focus();
page = view.page;
} else if (Dom.isChildOf(this.pageBreadcrumb, event.target)) {
var node = Dom.findUpwardForNodeWithData(event.target, "_page");
if (node) {
node.focus();
page = node._page;
}
}
var pageMenu = new PageMenu(thiz, page);
pageMenu.showMenuAt(event.clientX, event.clientY);
}, this.node());
this.bind("click", function (event) {
var dialog = new PageDetailDialog();
dialog.open({
defaultParentPage: this.currentParentPage,
onDone: function (page) {
if (!page) return;
thiz.activatePage(page);
}
});
}, this.addPageButton);
this.bind("click", function (event) {
this.expanded = !this.expanded;
this.invalidateExpandedState();
this.pageListSrollView.invalidate();
this.childPageSrollView.invalidate();
Config.set("pageListViewExpanded.enabled", this.expanded);
this.validateFilterBox();
this.filterPages();
}, this.toggleButton);
this.bind("click", function(ev) {
this.showFilterBar = !this.showFilterBar;
this.validateFilterBox();
},this.filterButton);
this.bind("input", function(ev) {
setTimeout(function() {
var value = thiz.nameTextBox.value == "" ? null : thiz.nameTextBox.value;
var filterName = thiz.controller.activePage.parentPage ? thiz.controller.activePage.parentPage.name : "Root";
if (value == null && thiz.filterCache[filterName] != null) {
delete thiz.filterCache[filterName];
} else {
thiz.filterCache[filterName] = value;
}
thiz.filterPages();
thiz.nameTextBox.focus();
}, 500);
}, this.nameTextBox)
this.bind("blur", function(ev) {
this.showFilterBar = false;
this.validateFilterBox();
}, this.nameTextBox)
this.pageListContainer._isDropZone = true;
this.childPageContainer._isDropZone = true;
function findPageIdFromUINode(n) {
var page = n._page ? n._page : (n.__widget ? n.__widget.page : null);
return page ? page.id : null;
}
this.bind("dragstart", function (event) {
nsDragAndDrop.dragStart(event);
var n = Dom.findUpwardForNodeWithData(Dom.getTarget(event), "_index");
if (!n) return;
event.dataTransfer.setDragImage(this.dndImage, 8, 8);
event.dataTransfer.setData("dragType", "page");
event.dataTransfer.setData("text/html", "");
nsDragAndDrop.setData("dragType", "page");
nsDragAndDrop.setData("text/html", "");
if (n.__widget && n.__widget.page && n.__widget.page.thumbPath) {
event.dataTransfer.setData("text/html", "");
event.dataTransfer.setData("pencil/png", n.__widget.page.thumbPath);
nsDragAndDrop.setData("text/html", "");
nsDragAndDrop.setData("pencil/png", n.__widget.page.thumbPath);
}
if (this.currentDraggedObject) this.currentDraggedObject.removeAttribute("dragged");
this.currentDraggedObject = n;
this.currentDraggedObject.setAttribute("dragged", "true");
}, this.node());
this.bind("drop", function (event) {
// if (event.dataTransfer.getData("dragType") != "page") return;
if (nsDragAndDrop.getData("dragType") != "page") return;
if (!this.lastDropCandidateObject || !this.currentDraggedObject) return;
var pageId = findPageIdFromUINode(this.currentDraggedObject);
var targetPageId = findPageIdFromUINode(this.lastDropCandidateObject);
Pencil.controller.movePageTo(pageId, targetPageId, this.lastDropCandidateObject._dropLeft);
this.renderPages();
}, this.node());
this.bind("dragover", function (event) {
// if (event.dataTransfer.getData("dragType") != "page") return;
if (nsDragAndDrop.getData("dragType") != "page") return;
var container = Dom.findUpwardForNodeWithData(Dom.getTarget(event), "_isDropZone");
if (!container) return;
var index = 0;
var left = true;
var distance = Number.MAX_VALUE;
for (var i = 0; i < container.childNodes.length; i ++) {
var node = container.childNodes[i];
var rect = node.getBoundingClientRect();
var center = rect.left + rect.width / 2;
var d = Math.abs(center - event.clientX);
if (d < distance) {
index = i;
distance = d;
left = event.clientX < center;
if (this.lastDropCandidateObject) this.lastDropCandidateObject.removeAttribute("will-drop");
this.lastDropCandidateObject = node;
this.lastDropCandidateObject.setAttribute("will-drop", left ? "left" : "right");
this.lastDropCandidateObject._dropLeft = left;
}
}
}, this.node());
this.bind("dragend", function (event) {
if (this.lastDropCandidateObject) this.lastDropCandidateObject.removeAttribute("will-drop");
this.lastDropCandidateObject = null;
if (this.currentDraggedObject) this.currentDraggedObject.removeAttribute("dragged");
this.currentDraggedObject = null;
});
this.dndImage = new Image();
this.dndImage.src = "css/bullet.png";
this.pageListSrollView.getStep = function () {
return 120;
};
this.expanded = Config.get("pageListViewExpanded.enabled");
this.invalidateExpandedState();
}
__extend(BaseTemplatedWidget, PageListView);
PageListView.prototype.restartFilterCache = function() {
this.filterCache = {};
}
PageListView.prototype.validateFilterBox = function() {
if (this.showFilterBar == true) {
this.filterContainer.style.display = "flex";
var filterName = this.controller.activePage.parentPage ? this.controller.activePage.parentPage.name : "Root";
if (this.filterCache[filterName]) {
this.nameTextBox.value = this.filterCache[filterName] == null ? "" : this.filterCache[filterName];
}
var bottom = this.controller.applicationPane.pageListView.node().clientHeight;
var right = this.controller.applicationPane.rightSidePane.node().clientWidth;
this.filterContainer.style.bottom = (bottom + 5) + "px";
this.filterContainer.style.right = (right + 5) + "px";
this.filterButton.disabled = true;
var thiz = this;
window.setTimeout(function() {
thiz.nameTextBox.focus();
}, 0)
} else {
this.filterButton.disabled = false;
this.filterContainer.style.display = "none";
}
}
PageListView.prototype.filterPages = function() {
if (!this.controller.activePage) { return; }
var filterName = this.controller.activePage.parentPage ? this.controller.activePage.parentPage.name : "Root";
var value = this.filterCache[filterName];
if (!value) {
this.filterValue.innerHTML = "Filter";
this.nameTextBox.value = "";
Dom.removeClass(this.filterButton, "activeFilter");
} else {
this.nameTextBox.value = value;
this.filterValue.innerHTML = Dom.htmlEncode(value);
Dom.addClass(this.filterButton, "activeFilter");
}
var selectedContainer = this.expanded == true ? this.pageListContainer : this.childPageContainer;
var hiddenItemCount = 0;
for (var i = 0; i < selectedContainer.childNodes.length; i++) {
var item = selectedContainer.childNodes[i];
var activePageItem;
var page;
if (this.expanded) page = item.__widget.page;
else page = item._page;
item.style.display = "inherit";
if (value) {
if (page.name.toUpperCase().indexOf(value.toUpperCase()) < 0) {
hiddenItemCount++;
item.style.display = "none";
}
if (page == this.controller.activePage) {
activePageItem = item;
}
}
}
// if (hiddenItemCount == selectedContainer.childNodes.length) {
// activePageItem.style.display = "inherit";
// }
}
PageListView.prototype.setController = function (controller) {
this.controller = controller;
this.currentParentPage = null;
this.currentPage = null;
this.renderPages();
};
PageListView.prototype.activatePage = function (page) {
this.controller.activatePage(page);
this.renderPages();
};
PageListView.prototype.renderPages = function() {
this.pageBreadcrumb.innerHTML = "";
this.pageListContainer.innerHTML = "";
this.childPageContainer.innerHTML = "";
this.currentPage = null;
this.currentParentPage = null;
this.views = [];
if (!this.controller || !this.controller.doc) return;
this.currentPage = this.controller.activePage;
this.currentParentPage = this.currentPage && this.currentPage.parentPage || null;
var pages = [];
var parentPages = [];
if (!this.currentParentPage) {
for (var i in this.controller.doc.pages) {
var page = this.controller.doc.pages[i];
if (!page.parentPage) pages.push(page);
}
} else {
pages = this.currentParentPage.children;
parentPages.push(this.currentParentPage);
var p = this.currentParentPage;
while (p.parentPage) {
parentPages.unshift(p.parentPage);
p = p.parentPage;
}
}
if (!this.currentPage) this.currentPage = pages[0];
var node = Dom.newDOMElement({
_name: "hbox",
_children: [
{
_name: "button",
type: "button",
_children: [
{
_name: "i",
_text: "description"
},
{
_name: "span",
_text: this.controller.getDocumentName()
}
]
}
]
});
node._page = null;
this.pageBreadcrumb.appendChild(node);
if (parentPages.length > 0) {
node = Dom.newDOMElement({
_name: "hbox",
"class": "OverflowIndicator",
_children: [
{
_name: "span",
_text: "..."
}
]
});
this.pageBreadcrumb.appendChild(node);
}
const MAX = 2;
var index = parentPages.length;
for (var i in parentPages) {
var p = parentPages[i];
node = Dom.newDOMElement({
_name: "hbox",
_children: [
{
_name: "button",
type: "button",
_children: [
{
_name: "span",
_text: p.name
}
]
}
]
});
node._page = p;
this.pageBreadcrumb.appendChild(node);
if (index > MAX) Dom.addClass(node, "Overflow");
index --;
}
this.pageBreadcrumb.setAttribute("overflow", parentPages.length > MAX);
var thiz = this;
for (var i in pages) {
var page = pages[i];
var selected = this.currentPage && this.currentPage.id == page.id;
var pageThumbnailView = new PageThumbnailView();
pageThumbnailView.node()._index = i;
pageThumbnailView.setPage(page);
this.pageListContainer.appendChild(pageThumbnailView.node());
pageThumbnailView.setAttribute("draggable", "true");
pageThumbnailView.selectPage(selected);
this.views.push(pageThumbnailView);
var childNode;
if( page.children.length == 0 ) {
childNode = Dom.newDOMElement({
_name: "hbox",
"selected": selected,
draggable: "true",
"tabindex": "0",
_children: [
{
_name: "span",
_text: page.name
}
]
});
} else {
childNode = Dom.newDOMElement({
_name: "hbox",
"selected": selected,
draggable: "true",
class: "nodeHasChild",
"tabindex": "0",
_children: [
{
_name: "span",
_text: page.name
},
{
_name: "button",
class:"button_Down",
name:"showChildren",
_children: [
{
_name: "i",
_text: "keyboard_arrow_down",
name:"showChildren",
}
]
}
]
});
}
childNode._page = page;
childNode._index = i;
this.childPageContainer.appendChild(childNode);
}
this.invalidateExpandedState();
this.childPageSrollView.invalidate();
this.pageListSrollView.invalidate();
var thiz = this;
window.setTimeout(function () {
var childListFrom = 0;
var childListTo = 0;
var thumbnailFrom = 0;
var thumbnailTo = 0;
for (var i = 0; i < thiz.childPageContainer.childNodes.length; i++) {
var item = thiz.childPageContainer.childNodes[i];
if (item._page.id == thiz.currentPage.id) {
childListTo = childListFrom + item.offsetWidth;
break;
}
childListFrom += item.offsetWidth;
}
for (var i = 0; i < thiz.pageListContainer.childNodes.length; i++) {
var item = thiz.pageListContainer.childNodes[i];
if (item.__widget.page.id == thiz.currentPage.id) {
thumbnailTo = thumbnailFrom + item.offsetWidth + Util.em();
break;
}
thumbnailFrom += item.offsetWidth + Util.em();
}
thiz.childPageSrollView.ensuareVisible(childListFrom, childListTo);
thiz.pageListSrollView.ensuareVisible(thumbnailFrom, thumbnailTo);
thiz.filterPages();
}, 0);
};
PageListView.prototype.invalidateExpandedState = function() {
Dom.toggleClass(this.node(), "Collapsed", !this.expanded);
};
PageListView.prototype.handlePageInfoChangedEvent = function (event) {
if (!event || !event.page) return;
for (var i in this.views) {
if (this.views[i].page.id == event.page.id) {
this.views[i].setPage(event.page);
return;
}
}
};
PageListView.prototype.handleSelectPage = function (page) {
if (!page) return;
Dom.doOnAllChildren(this.pageListContainer, function (n) {
var view = n.__widget;
if (!view) return;
var p = view.page;
view.selectPage(p.id == page.id);
});
Dom.doOnAllChildren(this.childPageContainer, function (n) {
var p = n._page;
n.setAttribute("selected", p.id == page.id);
});
this.controller.activatePage(page);
};
PageListView.prototype.handleDoubleClick = function (page) {
if (!page.children || page.children.length == 0) {
this.handleSelectPage(page);
} else {
if (this.filterCache[page.name]) delete this.filterCache[page.name];
this.activatePage(page.children[0]);
}
};
|
#!/bin/sh
npm init -y
npm install -s express body-parser easygraphql-mock express-graphql graphql cors |
<filename>src/main/java/com/mycompany/smartparkingmanagement/servlets/Booking.java
package com.mycompany.smartparkingmanagement.servlets;
import api.Razorpay;
import com.mycompany.smartparkingmanagement.dao.BookingDao;
import com.mycompany.smartparkingmanagement.entities.BookingBean;
import com.mycompany.smartparkingmanagement.entities.Message;
import com.mycompany.smartparkingmanagement.entities.OrderBean;
import com.mycompany.smartparkingmanagement.entities.TimeCheckBean;
import java.io.IOException;
import java.time.LocalTime;
import com.razorpay.Order;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
public class Booking extends HttpServlet {
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
HttpSession s = request.getSession();
String value = request.getParameter("val");
try {
String cust_name = request.getParameter("cust_firstname");
String cust_surname = request.getParameter("cust_surname");
String vehicle_type = request.getParameter("vehicle_type");
String vehicle_no = request.getParameter("vehicle_no");
String date = request.getParameter("date");
String start_hrs = request.getParameter("start_hrs");
String start_mins = request.getParameter("start_mins");
String end_hrs = request.getParameter("end_hrs");
String end_mins = request.getParameter("end_mins");
BookingDao bkDao = new BookingDao();
TimeCheckBean time = bkDao.CheckTImeFromMaster(vehicle_type);
LocalTime open_time = LocalTime.parse(time.getDb_open_time());
//for msg
String OpenTime_msg = open_time.toString();
LocalTime close_time = LocalTime.parse(time.getDb_close_time());
//for msg
String CloseTime_msg = close_time.toString();
String str_start_time = start_hrs + ":" + start_mins + ":" + "00";
String str_end_time = end_hrs + ":" + end_mins + ":" + "00";
LocalTime start_time = LocalTime.parse(start_hrs + ":" + start_mins);
LocalTime end_time = LocalTime.parse(end_hrs + ":" + end_mins);
if (start_time.isBefore(end_time)) {
if (start_time.isBefore(open_time)) {
System.out.println("you can't book.... Parking opens at : " + OpenTime_msg + " please change entry time");
Message msg = new Message("you can't book.... Parking opens at : " + OpenTime_msg + " please change entry time", "error", "alert-danger");
s.setAttribute("message", msg);
if (value.equals("online")) {
response.sendRedirect("booking.jsp");
} else {
response.sendRedirect("OfflineBooking.jsp");
}
} else if (end_time.isAfter(close_time)) {
System.out.println("you can't book.... Parking closes at : " + close_time + " please change exit time");
Message msg = new Message("you can't book.... Parking opens at : " + CloseTime_msg + " please change exit time", "error", "alert-danger");
s.setAttribute("message", msg);
if (value.equals("online")) {
response.sendRedirect("booking.jsp");
} else {
response.sendRedirect("OfflineBooking.jsp");
}
} else if (start_time.equals(open_time) || start_time.isAfter(open_time)) {
if (end_time.equals(close_time) || end_time.isBefore(close_time)) {
System.out.println("book can be done");
BookingBean book = new BookingBean(cust_name, cust_surname, vehicle_type, vehicle_no, date, str_start_time, str_end_time);
// String result = bkDao.slotProvider(book);
book = bkDao.slotProvider(book);
// if (result.equals("Proceed to pay")) {
if (book.isBool() && book.getMsg().equals("true")) {
//checking online or onspot
if (value.equals("online")) {
double cost = bkDao.Payment(book.getVehicle_type(), book.getStr_start_time(), book.getStr_end_time());
System.out.println(cost);
System.out.println(book.getDate());
Order order = Razorpay.Generate_Order(cost, 1);
int amt_paid = order.get("amount_paid");
//String created_at = order.get("created_at");
int amount_due = order.get("amount_due");
System.out.println(amount_due + "checking amount");
amount_due = amount_due / 100; // converting paise to rs
System.out.println(amount_due + "After Converting");
String currency = order.get("currency");
String receipt = order.get("receipt");
String order_id = order.get("id");//order id
// int offer_id = order.get("offer_id");
String status = order.get("status");
int attempts = order.get("attempts");
OrderBean orderBean = new OrderBean(cost, amt_paid, amount_due, currency,
receipt, order_id, 1, status, attempts); //1 is offer id
boolean answer = bkDao.InsertOrderToDB(orderBean);
if (answer != true) {
Message msg = new Message("Something went wrong try again !!", "error", "alert-danger");
System.out.println("Something went wrong try again");
s.setAttribute("message", msg);
response.sendRedirect("booking.jsp");
} else {
// s.setAttribute("RazorpayOrder", order);
// String id = order.get("id");
s.setAttribute("order_id", order_id);
System.out.println(order_id);
s.setAttribute("booking", book);
Message msg = new Message("Proceed to pay your " + cost + ""
+ " <button type=\"submit\" id=\"rzp-button1\" class=\"btn btn-success me-3 px-4\">Pay</button> ",
"success", "alert-success");
s.setAttribute("message", msg);
}
} //ONSPOT START HERE
else {
if (bkDao.onSpotBookingInsertion(cust_name, cust_surname, vehicle_type,
vehicle_no, book.getDate(), book.getStr_start_time(), book.getStr_end_time(), book.getSlot_no())) {
Message msg = new Message("Booking Done Successfully, Your Slot No is " + book.getSlot_no(), "success", "alert-success");
s.setAttribute("message", msg);
} else {
Message msg = new Message("Offline Slot Something went Wrong", "error", "alert-danger");
s.setAttribute("message", msg);
}
}
} else if (book.isBool() && book.getMsg().equals("false")) {
Message msg = new Message("Sorry we are out of slots on" + book.getDate() + " at " + book.getStr_start_time(), "error", "alert-danger");
s.setAttribute("message", msg);
} else {
Message msg = new Message("error", "error", "alert-danger");
s.setAttribute("message", msg);
}
if (value.equals("online")) {
response.sendRedirect("booking.jsp");
} else {
response.sendRedirect("OfflineBooking.jsp");
}
}
}
} else {
Message msg = new Message("Start Time cannot be Greater than End Time", "error", "alert-danger");
s.setAttribute("message", msg);
if (value.equals("online")) {
response.sendRedirect("booking.jsp");
} else {
response.sendRedirect("OfflineBooking.jsp");
}
}
} catch (Exception e) {
e.printStackTrace();
if (value.equals("online")) {
response.sendRedirect("booking.jsp");
} else {
response.sendRedirect("OfflineBooking.jsp");
}
System.out.println("OOps... Something is wrong " + e);
}
}
}
|
#!/usr/bin/env bash
set -e
set -u # Exit on error when uninitialized variable
if [ $# -eq 0 ] ; then
echo "Usage: ./updatev1.sh <traefik tag or branch>"
exit
fi
SCRIPT_DIRNAME_ABSOLUTEPATH="$(cd "$(dirname "$0")" && pwd -P)"
echo "$1" > ./version
bash "${SCRIPT_DIRNAME_ABSOLUTEPATH}/update.sh" "$1"
|
CREATE TRIGGER trigger_name
AFTER INSERT ON table_name
FOR EACH ROW
BEGIN
CALL stored_procedure_name();
END; |
class Node:
def __init__(self,value):
self.value = value
self.left = None
self.right = None
def insert(stack, node):
if stack is None:
stack = node
else:
if stack.value > node.value:
if stack.left is None:
stack.left = node
else:
insert(stack.left, node)
else:
if stack.right is None:
stack.right = node
else:
insert(stack.right, node)
def bst_from_list(lst):
root = Node(lst[0])
for i in range(1, len(lst)):
insert(root, Node(lst[i]))
return root |
#!/bin/sh
cd @CMAKE_INSTALL_PREFIX@/@install_directory@ && ./node_modules/grunt-cli/bin/grunt full
|
import { Request, Response, Router } from 'express';
export const scimRouter = Router();
// SCIM
// http://www.simplecloud.info/
scimRouter.get('/:resourceType', (req: Request, res: Response) => {
res.sendStatus(200);
});
scimRouter.post('/:resourceType', (req: Request, res: Response) => {
res.sendStatus(200);
});
scimRouter.get('/:resourceType/:id', (req: Request, res: Response) => {
res.sendStatus(200);
});
scimRouter.put('/:resourceType/:id', (req: Request, res: Response) => {
res.sendStatus(200);
});
scimRouter.delete('/:resourceType/:id', (req: Request, res: Response) => {
res.sendStatus(200);
});
scimRouter.patch('/:resourceType/:id', (req: Request, res: Response) => {
res.sendStatus(200);
});
|
package main
import "github.com/urfave/cli"
func run(args []string) int {
app := newApp()
if app.Run(args) != nil {
return 1
}
return 0
}
func newApp() *cli.App {
app := cli.NewApp()
app.Name = name
app.HelpName = name
app.Usage = description
app.Version = version
app.Author = author
app.Flags = flags
app.HideHelp = true
app.Action = action
return app
}
|
# frozen_string_literal: true
##
# A comet indexer because the Hyrax one hard codes the solr connection without
# authentication.
class CometIndexingAdapter < Valkyrie::Indexing::Solr::IndexingAdapter
def connection_url
sprintf "http://%s:%s@%s:%s/solr/%s", ENV["SOLR_ADMIN_USER"], ENV["SOLR_ADMIN_PASSWORD"], ENV["SOLR_HOST"], ENV["SOLR_PORT"], ENV["SOLR_COLLECTION_NAME"]
end
end
|
#! /bin/sh
build_path=release
mkdir -p $build_path && \
cc -Os -s src/* -o $build_path/calic
|
#!/bin/bash
cloud_user_data_file=/run/config/userdata
[ -e "${cloud_user_data_file}" ] || exit 1
user=$(yq -r ".user" "${cloud_user_data_file}")
if [ -n "${user}" ]; then
uid=$(id -u "${user}" 2>/dev/null)
has_user=$?
password=$(yq -r ".password" "${cloud_user_data_file}")
[ ${has_user} -eq 0 ] || (adduser -s /bin/bash -D "${user}" && addgroup "${user}")
existing_passwd=$(cat /etc/shadow | grep -E "^${user}:" | cut -d':' -f2)
if [ -n "${password}" ] && [[ "${existing_passwd}" == "!" || -z "${existing_passwd}" ]]; then
[[ "${password:1:1}" = '$' || "${password:1:1}" = '' ]] && echo "${user}:${password}" | chpasswd -e || echo "${user}:${password}" | chpasswd
fi
[ "${user}" = "root" ] && home_dir="/root" || home_dir="/home/${user}"
ssh_authorized_keys=$(yq -r ".ssh_authorized_keys" "${cloud_user_data_file}")
mkdir -p "${home_dir}/.ssh"
touch "${home_dir}/.ssh/authorized_keys"
if [ -n "${ssh_authorized_keys}" ]; then
while read line; do
echo "$line" >> "${home_dir}/.ssh/authorized_keys"
done <<< $(yq -r ".ssh_authorized_keys[]" "${cloud_user_data_file}")
fi
chmod 400 "${home_dir}/.ssh/authorized_keys"
chown "${user}:${user}" -R "${home_dir}/.ssh"
fi
exit 0
|
fn remove_duplicates(arr: Vec<String>) -> Vec<String> {
let mut seen: HashSet<String> = HashSet::new();
let mut result: Vec<String> = vec![];
for item in arr {
if !seen.contains(&item) {
seen.insert(item);
result.push(item);
}
}
return result;
} |
"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
exports.__esModule = true;
exports.AuthModule = void 0;
var common_1 = require("@nestjs/common");
var auth_service_1 = require("./auth.service");
var authenticated_guard_1 = require("./authenticated.guard");
var local_strategy_1 = require("./local.strategy");
var login_guard_1 = require("./login.guard");
var session_serializer_1 = require("./session.serializer");
var users_module_1 = require("../users/users.module");
var AuthModule = /** @class */ (function () {
function AuthModule() {
}
AuthModule = __decorate([
(0, common_1.Module)({
providers: [
auth_service_1.AuthService,
local_strategy_1.LocalStrategy,
session_serializer_1.SessionSerializer,
login_guard_1.LoginGuard,
authenticated_guard_1.Authenticated,
],
imports: [users_module_1.UsersModule]
})
], AuthModule);
return AuthModule;
}());
exports.AuthModule = AuthModule;
|
<reponame>Mihran9991/async-forms-back
import {
BelongsTo,
Column,
ForeignKey,
Model,
Table,
} from "sequelize-typescript";
import { TEXT } from "sequelize";
import User from "./user.entity";
@Table({
modelName: "Form",
tableName: "forms",
timestamps: true,
})
export class Form extends Model {
@Column({
autoIncrement: true,
primaryKey: true,
})
id: number;
@Column({
allowNull: false,
unique: true,
})
name: string;
@Column({
allowNull: false,
unique: true,
})
sysName: string;
@ForeignKey(() => User)
@Column({ allowNull: false })
ownerId: string;
@BelongsTo(() => User)
owner: User;
@Column({
allowNull: false,
type: TEXT,
})
json: string;
}
export default Form;
|
#!/bin/bash
set -ex
MODEL=checkpoints/czeng-lee12-6
DATA=$1
OUTPUT=$2
CHECKPOINT=$MODEL/checkpoint.avg.pt
if [ ! -e $CHECKPOINT ] || [ ${MODEL}/checkpoint_last.pt -nt $CHECKPOINT ]; then
python scripts/average_checkpoints.py --inputs ${MODEL}/checkpoint.best_loss_*pt --output $CHECKPOINT
fi
TMPFILE=$(mktemp)
cut -c -1023 $DATA > $TMPFILE
python -u fairseq_cli/interactive.py \
$MODEL \
--path $CHECKPOINT \
--max-tokens 10000 \
--unkpen 5.0 \
--beam 10 --lenpen 1.4 \
--source-lang en --target-lang cs \
--char-tokens \
--lee-style \
--max-len-a 0 --max-len-b 1024 \
--input $TMPFILE | grep '^H-' | sed -e 's/.*\t//'
rm $TMPFILE
|
#!/bin/bash
#History:
#v1: first version of geting the correct branch
#v2: fix the bug, if the current git-language is french..
#Set to English
alias git='LANG=en_GB git'
#v1: add the automatically get the local branch name
git status
#Parceque: On branch tutorial_quiz_with_api_example
#N= 1 2 3
#[Jean]: On utiliser la N=3
N=3
#Obtenir la status de git
ja_git_status=$(git status)
echo $ja_git_status
#Utiliser the method pour obtenir le nom de branch
ja_branch=$(echo $ja_git_status | cut -d " " -f $N)
#Normal Information
echo "---"
echo "[JA]: in branch: \"$ja_branch\""
echo -n "[JA]: This script will push back your code to git are yo sure to do it ? "
read nothing
echo "[JA]: Start git add process"
git add .
echo "[JA]: Finish git add"
echo
echo -n "[JA]: Prepare for commit, please enter the commit message: "
read ja_commit
echo "[JA]: your commit is $ja_commit"
echo "[JA]: Start commit.."
git commit -m $ja_commit
echo
echo "[JA]: Start to push back to $ja_branch"
git push origin $ja_branch
echo
echo "[JA]: End the ja_git_push_back.sh"
echo
|
#! /bin/bash
#SBATCH -o /home/hpc/pr63so/di69fol/workspace/SWEET_2016_01_16/benchmarks_performance/rexi_tests/2016_01_06_scalability_rexi_fd_run3/run_rexi_fd_par_m2048_t002_n0128_r0896_a1.txt
###SBATCH -e /home/hpc/pr63so/di69fol/workspace/SWEET_2016_01_16/benchmarks_performance/rexi_tests/2016_01_06_scalability_rexi_fd_run3/run_rexi_fd_par_m2048_t002_n0128_r0896_a1.err
#SBATCH -J rexi_fd_par_m2048_t002_n0128_r0896_a1
#SBATCH --get-user-env
#SBATCH --clusters=mpp2
#SBATCH --ntasks=896
#SBATCH --cpus-per-task=2
#SBATCH --exclusive
#SBATCH --export=NONE
#SBATCH --time=00:05:00
#declare -x NUMA_BLOCK_ALLOC_VERBOSITY=1
declare -x KMP_AFFINITY="granularity=thread,compact,1,0"
declare -x OMP_NUM_THREADS=2
echo "OMP_NUM_THREADS=$OMP_NUM_THREADS"
echo
. /etc/profile.d/modules.sh
module unload gcc
module unload fftw
module unload python
module load python/2.7_anaconda_nompi
module unload intel
module load intel/16.0
module unload mpi.intel
module load mpi.intel/5.1
module load gcc/5
cd /home/hpc/pr63so/di69fol/workspace/SWEET_2016_01_16/benchmarks_performance/rexi_tests/2016_01_06_scalability_rexi_fd_run3
cd ../../../
. local_software/env_vars.sh
# force to use FFTW WISDOM data
declare -x SWEET_FFTW_LOAD_WISDOM_FROM_FILE="FFTW_WISDOM_nofreq_T0"
time -p mpiexec.hydra -genv OMP_NUM_THREADS 2 -envall -ppn 14 -n 896 ./build/rexi_fd_par_m_tno_a1 --initial-freq-x-mul=2.0 --initial-freq-y-mul=1.0 -f 1 -g 1 -H 1 -X 1 -Y 1 --compute-error 1 -t 50 -R 4 -C 0.3 -N 128 -U 0 -S 0 --use-specdiff-for-complex-array 0 --rexi-h 0.8 --timestepping-mode 1 --staggering 0 --rexi-m=2048 -C -5.0
|
#!/bin/bash
set -o nounset -o xtrace -o errexit
source "$(dirname "$(readlink -f "$0")")/common.sh"
function find_new_tests {
: detect tests affected by current merge request
: store list of modified tests in ${NEW_TESTS_FILE}
git diff --name-only --diff-filter=AM ${MERGEBASE} ${HEAD} | fgrep .rpl > "${NEW_TESTS_FILE}" || : no new tests detected
}
NEW_TESTS_FILE="/tmp/new_tests"
find_new_tests
truncate -s0 /tmp/rplint_fails
: run rplint of all new tests
FAIL=0
cat /tmp/new_tests
for test in $(cat ${NEW_TESTS_FILE})
do
${PYTHON} -m rplint $test >> /tmp/rplint_fails || FAIL=1
done
: if even one of the test does not pass rplint, fail
if [ "$FAIL" -eq 1 ]
then
cat /tmp/rplint_fails
exit 1
fi
exit 0
|
docker build -t vegeta -f ./vegeta-dockerfile ./ &&
docker run -it -p 6543:80 --rm vegeta |
import React, { Component } from 'react';
import {
View,
Text,
Button,
StyleSheet,
} from 'react-native';
export default class App extends Component {
render() {
return (
<View style={styles.container}>
<Text>Water Usage App</Text>
<Button
title='Main Content'
onPress={() => {
this.props.navigation.navigate('Main');
}}
/>
<Button
title='Tips'
onPress={() => {
this.props.navigation.navigate('Tips');
}}
/>
</View>
);
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: 'center',
alignItems: 'center'
}
}); |
<reponame>PhilippePitzClairoux/Nova_Gestion<filename>nova-gestion-backend/src/main/java/ca/nova/gestion/services/MaintenanceService.java
package ca.nova.gestion.services;
import ca.nova.gestion.errors.exceptions.InvalidRequest;
import ca.nova.gestion.errors.exceptions.RessourceNotFound;
import ca.nova.gestion.mappers.MaintenanceMapper;
import ca.nova.gestion.model.Maintenance;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@Service
public class MaintenanceService {
private final MaintenanceMapper maintenanceMapper;
@Autowired
public MaintenanceService(MaintenanceMapper maintenanceMapper) {
this.maintenanceMapper = maintenanceMapper;
}
@Transactional
@PreAuthorize("hasRole('Admin') or hasRole('Superviseur')")
public int insertMaintenance(Maintenance maintenance) {
if (maintenance == null)
throw new InvalidRequest("Missing parameters");
if (maintenance.getIdMachine() == 0 || maintenance.getDescription() == null || maintenance.getDate() == null)
throw new InvalidRequest("Missing parameters");
maintenanceMapper.insertMaintenance(maintenance);
return maintenance.getIdMaintenance();
}
@Transactional
@PreAuthorize("hasRole('Admin')")
public void updateMaintenance(Maintenance maintenance) {
Maintenance verifiedMaintenance = maintenanceMapper.getMaintenance(maintenance.getIdMaintenance());
if (maintenance.getIdMaintenance() == 0 || verifiedMaintenance == null)
throw new InvalidRequest("Missing IdMaintenance");
if (maintenance.getIdMachine() == 0 || maintenance.getDescription() == null || maintenance.getDate() == null)
throw new InvalidRequest("Missing information for update");
maintenanceMapper.updateMaintenance(maintenance);
}
@Transactional
@PreAuthorize("hasRole('Admin') or hasRole('Superviseur')")
public void deleteMaintenance(int idMaintenance) {
if (maintenanceMapper.getMaintenance(idMaintenance) == null)
throw new RessourceNotFound("Invalid idMaintenance");
maintenanceMapper.deleteMaintenance(idMaintenance);
}
}
|
function validateAndSerializeGraphQLRequest(
target: any,
propertyKey: string,
descriptor: PropertyDescriptor
) {
const originalMethod = descriptor.value;
descriptor.value = function (request: SerializedGraphQLRequest) {
if (!request.query || typeof request.query !== 'string') {
throw new Error('Invalid GraphQL request: query must be a non-empty string');
}
if (request.variables && typeof request.variables !== 'object') {
throw new Error('Invalid GraphQL request: variables must be an object');
}
if (request.variables) {
request.variables = JSON.stringify(request.variables);
}
return originalMethod.call(this, request);
};
return descriptor;
} |
#!/bin/sh
for I in macsol.ll lllinuxsparc.llm3 loader.l? lelisp31bin
do
echo $I
cp $I save
done
tar cvzf ./save/llas.tar.gz ./s31/*
|
declare module 'node-cache' {
declare type CacheOptions = {
stdTTL?: number,
checkperiod?: number,
errorOnMissing?: boolean,
useClones?: boolean,
deleteOnExpire?: boolean,
};
declare type Stats = {
keys: number,
hits: number,
misses: number,
ksize: number,
vsize: number,
};
declare class NodeCache<Key, Value> {
constructor(options?: CacheOptions): NodeCache<Key, Value>;
set(key: Key, value: Value, ttl?: number, cb?: (err: ?any, success: ?true) => any): ?true;
get(key: Key, cb?: (err: ?any, value: ?Value) => any): ?Value;
mget<V: {[key: Key]: Value}>(keys: Key[], cb?: (err: ?any, value: V) => any): V;
del(key: Key, cb?: (err: ?any, count: number) => any): number;
mdel(keys: Key[], cb?: (err: ?any, count: number) => any): number;
ttl(key: Key, ttl: number, cb?: (err: ?any, changed: boolean) => any): boolean;
getTtl(key: Key, cb?: (err: ?any, ttl: ?number) => any): ?number;
keys(cb?: (err: ?any, keys: Key[]) => any): Key[];
getStats(): Stats;
flushAll(): void;
close(): void;
}
declare module.exports: typeof NodeCache;
}
|
package pl.jamnic.example.postgres.database.api.service;
import pl.jamnic.example.postgres.database.api.request.PersonCreateRequest;
import pl.jamnic.example.postgres.database.api.request.PersonUpdateRequest;
public interface PersonService {
void create(PersonCreateRequest request);
void update(PersonUpdateRequest request);
}
|
def check_anagram(s1, s2):
# List conversion of the strings
l1 = list(s1.lower())
l2 = list(s2.lower())
l1.sort()
l2.sort()
# Checking both the strings are equal or not
if (l1 == l2):
return True
else:
return False
res = check_anagram("listen", "silent")
print(res) |
import pandas as pd
from sklearn.neighbors import KNeighborsClassifier
import numpy as np
# Load dataset
data = pd.read_csv('dataset.csv')
X = data.iloc[:, :-1]
y = data.iloc[:, -1]
# Create and fit classifier
classifier = KNeighborsClassifier(n_neighbors=5)
classifier.fit(X, y)
# Make predictions
predictions = classifier.predict(X)
print(np.mean(predictions == y)) |
<gh_stars>1-10
/*
* Copyright 1999-2018 Alibaba Group Holding Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.csp.sentinel.adapter.pigeon;
import com.alibaba.csp.sentinel.adapter.pigeon.provider.DemoService;
import com.dianping.pigeon.remoting.common.domain.InvocationContext;
import com.dianping.pigeon.remoting.common.domain.InvocationRequest;
import org.junit.Test;
import java.lang.reflect.Method;
import static org.junit.Assert.assertEquals;
import static org.mockito.Mockito.*;
/**
* @author <NAME>
*/
public class PigeonUtilsTest {
@Test
public void testGetResourceName() {
InvocationRequest invocationRequest = mock(InvocationRequest.class);
InvocationContext context = mock(InvocationContext.class);
Method method = DemoService.class.getMethods()[0];
when(context.getRequest()).thenReturn(invocationRequest);
when(invocationRequest.getServiceName()).thenReturn(DemoService.class.getName());
when(invocationRequest.getMethodName()).thenReturn(method.getName());
String[] paramClazzName = new String[2];
int i = 0;
for (Class<?> clazz : method.getParameterTypes()) {
paramClazzName[i] = clazz.getName();
i ++;
}
when(invocationRequest.getParamClassName()).thenReturn(paramClazzName);
String resourceName = PigeonUtils.getResourceName(context);
assertEquals("com.alibaba.csp.sentinel.adapter.pigeon.provider.DemoService:sayHello(java.lang.String,int)", resourceName);
}
}
|
package com.imooc.o2o.service;
import com.imooc.o2o.entity.ShopCategory;
import java.util.List;
/**
* //店铺类别列表
*/
public interface ShopCategoryService {
public static final String SCLISTKEY ="shopcategorylist";
/**
* 根据查询条件获取ShopCategory列表
* @param shopCategoryCondition
* @return
*/
List<ShopCategory> getShopCategoryList(ShopCategory shopCategoryCondition);
}
|
#!/bin/sh
BASEPATH=$(dirname $0)
PERMFILE=${BASEPATH}/argocd-remote-permissions.yaml
if ! test -f ${PERMFILE}; then
echo "ERROR: $PERMFILE does not exist." >&2
exit 1
fi
NAMESPACE=${NAMESPACE:-argocd-e2e}
if test "${ARGOCD_E2E_NAME_PREFIX}" != ""; then
CRNAME="${ARGOCD_E2E_NAME_PREFIX}-argocd-application-controller"
CRBNAME="${ARGOCD_E2E_NAME_PREFIX}-argocd-application-controller"
CONTROLLERSANAME="${ARGOCD_E2E_NAME_PREFIX}-argocd-application-controller"
SERVERSANAME="${ARGOCD_E2E_NAME_PREFIX}-argocd-server"
else
CRNAME="argocd-application-controller"
CRBNAME="argocd-application-controller"
CONTROLLERSANAME="argocd-application-controller"
SERVERSANAME="argocd-server"
fi
sed \
-e "s/##CRNAME##/${CRNAME}/g" \
-e "s/##CRBNAME##/${CRBNAME}/g" \
-e "s/##CONTROLLERSANAME##/${CONTROLLERSANAME}/g" \
-e "s/##SERVERSANAME##/${SERVERSANAME}/g" \
-e "s/##NAMESPACE##/${NAMESPACE}/g" \
$PERMFILE
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.chartBarOutline = void 0;
var chartBarOutline = {
"viewBox": "0 0 24 24",
"children": [{
"name": "path",
"attribs": {
"d": "M17 5c-.771 0-1.468.301-2 .779v-1.779c0-1.654-1.346-3-3-3s-3 1.346-3 3v4.779c-.532-.478-1.229-.779-2-.779-1.654 0-3 1.346-3 3v6h16v-9c0-1.654-1.346-3-3-3zm-5-2c.551 0 1 .448 1 1v11h-2v-11c0-.552.449-1 1-1zm-4 12h-2v-4c0-.552.449-1 1-1s1 .448 1 1v4zm10 0h-2v-7c0-.552.449-1 1-1s1 .448 1 1v7zM19 21h-14c-.552 0-1-.447-1-1s.448-1 1-1h14c.552 0 1 .447 1 1s-.448 1-1 1z"
},
"children": []
}]
};
exports.chartBarOutline = chartBarOutline; |
/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package create
import (
"context"
"fmt"
"regexp"
"github.com/spf13/cobra"
policyv1 "k8s.io/api/policy/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/util/intstr"
"k8s.io/cli-runtime/pkg/genericclioptions"
resourcecli "k8s.io/cli-runtime/pkg/resource"
policyv1client "k8s.io/client-go/kubernetes/typed/policy/v1"
cmdutil "k8s.io/kubectl/pkg/cmd/util"
"k8s.io/kubectl/pkg/scheme"
"k8s.io/kubectl/pkg/util"
"k8s.io/kubectl/pkg/util/i18n"
"k8s.io/kubectl/pkg/util/templates"
)
var (
pdbLong = templates.LongDesc(i18n.T(`
Create a pod disruption budget with the specified name, selector, and desired minimum available pods.`))
pdbExample = templates.Examples(i18n.T(`
# Create a pod disruption budget named my-pdb that will select all pods with the app=rails label
# and require at least one of them being available at any point in time
kubectl create poddisruptionbudget my-pdb --selector=app=rails --min-available=1
# Create a pod disruption budget named my-pdb that will select all pods with the app=nginx label
# and require at least half of the pods selected to be available at any point in time
kubectl create pdb my-pdb --selector=app=nginx --min-available=50%`))
)
// PodDisruptionBudgetOpts holds the command-line options for poddisruptionbudget sub command
type PodDisruptionBudgetOpts struct {
// PrintFlags holds options necessary for obtaining a printer
PrintFlags *genericclioptions.PrintFlags
PrintObj func(obj runtime.Object) error
// Name of resource being created
Name string
MinAvailable string
MaxUnavailable string
// A label selector to use for this budget
Selector string
CreateAnnotation bool
FieldManager string
Namespace string
EnforceNamespace bool
Client *policyv1client.PolicyV1Client
DryRunStrategy cmdutil.DryRunStrategy
DryRunVerifier *resourcecli.DryRunVerifier
genericclioptions.IOStreams
}
// NewPodDisruptionBudgetOpts creates a new *PodDisruptionBudgetOpts with sane defaults
func NewPodDisruptionBudgetOpts(ioStreams genericclioptions.IOStreams) *PodDisruptionBudgetOpts {
return &PodDisruptionBudgetOpts{
PrintFlags: genericclioptions.NewPrintFlags("created").WithTypeSetter(scheme.Scheme),
IOStreams: ioStreams,
}
}
// NewCmdCreatePodDisruptionBudget is a macro command to create a new pod disruption budget.
func NewCmdCreatePodDisruptionBudget(f cmdutil.Factory, ioStreams genericclioptions.IOStreams) *cobra.Command {
o := NewPodDisruptionBudgetOpts(ioStreams)
cmd := &cobra.Command{
Use: "poddisruptionbudget NAME --selector=SELECTOR --min-available=N [--dry-run=server|client|none]",
DisableFlagsInUseLine: true,
Aliases: []string{"pdb"},
Short: i18n.T("Create a pod disruption budget with the specified name"),
Long: pdbLong,
Example: pdbExample,
Run: func(cmd *cobra.Command, args []string) {
cmdutil.CheckErr(o.Complete(f, cmd, args))
cmdutil.CheckErr(o.Validate())
cmdutil.CheckErr(o.Run())
},
}
o.PrintFlags.AddFlags(cmd)
cmdutil.AddApplyAnnotationFlags(cmd)
cmdutil.AddValidateFlags(cmd)
cmdutil.AddDryRunFlag(cmd)
cmd.Flags().StringVar(&o.MinAvailable, "min-available", o.MinAvailable, i18n.T("The minimum number or percentage of available pods this budget requires."))
cmd.Flags().StringVar(&o.MaxUnavailable, "max-unavailable", o.MaxUnavailable, i18n.T("The maximum number or percentage of unavailable pods this budget requires."))
cmd.Flags().StringVar(&o.Selector, "selector", o.Selector, i18n.T("A label selector to use for this budget. Only equality-based selector requirements are supported."))
cmdutil.AddFieldManagerFlagVar(cmd, &o.FieldManager, "kubectl-create")
return cmd
}
// Complete completes all the required options
func (o *PodDisruptionBudgetOpts) Complete(f cmdutil.Factory, cmd *cobra.Command, args []string) error {
var err error
o.Name, err = NameFromCommandArgs(cmd, args)
if err != nil {
return err
}
restConfig, err := f.ToRESTConfig()
if err != nil {
return err
}
o.Client, err = policyv1client.NewForConfig(restConfig)
if err != nil {
return err
}
o.CreateAnnotation = cmdutil.GetFlagBool(cmd, cmdutil.ApplyAnnotationsFlag)
o.DryRunStrategy, err = cmdutil.GetDryRunStrategy(cmd)
if err != nil {
return err
}
dynamicClient, err := f.DynamicClient()
if err != nil {
return err
}
discoveryClient, err := f.ToDiscoveryClient()
if err != nil {
return err
}
o.DryRunVerifier = resourcecli.NewDryRunVerifier(dynamicClient, discoveryClient)
o.Namespace, o.EnforceNamespace, err = f.ToRawKubeConfigLoader().Namespace()
if err != nil {
return err
}
cmdutil.PrintFlagsWithDryRunStrategy(o.PrintFlags, o.DryRunStrategy)
printer, err := o.PrintFlags.ToPrinter()
if err != nil {
return err
}
o.PrintObj = func(obj runtime.Object) error {
return printer.PrintObj(obj, o.Out)
}
return nil
}
// Validate checks to the PodDisruptionBudgetOpts to see if there is sufficient information run the command
func (o *PodDisruptionBudgetOpts) Validate() error {
if len(o.Name) == 0 {
return fmt.Errorf("name must be specified")
}
if len(o.Selector) == 0 {
return fmt.Errorf("a selector must be specified")
}
if len(o.MaxUnavailable) == 0 && len(o.MinAvailable) == 0 {
return fmt.Errorf("one of min-available or max-unavailable must be specified")
}
if len(o.MaxUnavailable) > 0 && len(o.MinAvailable) > 0 {
return fmt.Errorf("min-available and max-unavailable cannot be both specified")
}
// The following regex matches the following values:
// 10, 20, 30%, 50% (number and percentage)
// but not 10Gb, 20Mb
re := regexp.MustCompile(`^[0-9]+%?$`)
switch {
case len(o.MinAvailable) > 0 && !re.MatchString(o.MinAvailable):
return fmt.Errorf("invalid format specified for min-available")
case len(o.MaxUnavailable) > 0 && !re.MatchString(o.MaxUnavailable):
return fmt.Errorf("invalid format specified for max-unavailable")
}
return nil
}
// Run calls the CreateSubcommandOptions.Run in PodDisruptionBudgetOpts instance
func (o *PodDisruptionBudgetOpts) Run() error {
podDisruptionBudget, err := o.createPodDisruptionBudgets()
if err != nil {
return err
}
if err := util.CreateOrUpdateAnnotation(o.CreateAnnotation, podDisruptionBudget, scheme.DefaultJSONEncoder()); err != nil {
return err
}
if o.DryRunStrategy != cmdutil.DryRunClient {
createOptions := metav1.CreateOptions{}
if o.FieldManager != "" {
createOptions.FieldManager = o.FieldManager
}
if o.DryRunStrategy == cmdutil.DryRunServer {
if err := o.DryRunVerifier.HasSupport(podDisruptionBudget.GroupVersionKind()); err != nil {
return err
}
createOptions.DryRun = []string{metav1.DryRunAll}
}
podDisruptionBudget, err = o.Client.PodDisruptionBudgets(o.Namespace).Create(context.TODO(), podDisruptionBudget, createOptions)
if err != nil {
return fmt.Errorf("failed to create poddisruptionbudgets: %v", err)
}
}
return o.PrintObj(podDisruptionBudget)
}
func (o *PodDisruptionBudgetOpts) createPodDisruptionBudgets() (*policyv1.PodDisruptionBudget, error) {
namespace := ""
if o.EnforceNamespace {
namespace = o.Namespace
}
podDisruptionBudget := &policyv1.PodDisruptionBudget{
TypeMeta: metav1.TypeMeta{
APIVersion: policyv1.SchemeGroupVersion.String(),
Kind: "PodDisruptionBudget",
},
ObjectMeta: metav1.ObjectMeta{
Name: o.Name,
Namespace: namespace,
},
}
selector, err := metav1.ParseToLabelSelector(o.Selector)
if err != nil {
return nil, err
}
podDisruptionBudget.Spec.Selector = selector
switch {
case len(o.MinAvailable) > 0:
minAvailable := intstr.Parse(o.MinAvailable)
podDisruptionBudget.Spec.MinAvailable = &minAvailable
case len(o.MaxUnavailable) > 0:
maxUnavailable := intstr.Parse(o.MaxUnavailable)
podDisruptionBudget.Spec.MaxUnavailable = &maxUnavailable
}
return podDisruptionBudget, nil
}
|
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#! /bin/bash
set -e
set -x
virtualenv -p python3 .
source ./bin/activate
pip3 install tensorflow torch jax jaxlib
pip3 install git+https://github.com/google-research/flax.git@prerelease
cd opt_list/
python3 -m opt_list.examples.tf_v1
python3 -m opt_list.examples.tf_keras
python3 -m opt_list.examples.torch
python3 -m opt_list.examples.jax_flax
python3 -m opt_list.examples.jax_optimizers
python3 -m opt_list.examples.jax_optix
|
<!DOCTYPE html>
<html>
<head>
<style type="text/css">
a {
text-decoration: none;
color: #000000;
}
.nav-bar {
padding: 10px;
border-bottom: 1px solid #000000;
}
.nav-bar a {
padding: 10px;
display: inline-block;
}
</style>
</head>
<body>
<div class="nav-bar">
<a href="/">Home</a>
<a href="/about.html">About</a>
<a href="/contact.html">Contact</a>
</div>
<script>
document.querySelectorAll('.nav-bar a').forEach(item => {
item.addEventListener('click', (e) => {
// Handle navigation
e.preventDefault();
});
});
</script>
</body>
</html> |
#!/bin/bash
pkill -x waybar
/usr/bin/waybar -c ~/.config/waybar/config-top </dev/null &>/dev/null &
#/usr/bin/waybar -c ~/.config/waybar/config-bottom </dev/null &>/dev/null &
|
#!/bin/bash
vendor/bin/codecept run unit |
set -x #echo on
curr_path=$(pwd)
# check that all the required environment vars are set
: "${VIRIE_QT_PATH:?variable not set, see also macosx_build_config.command}"
: "${VIRIE_BOOST_ROOT:?variable not set, see also macosx_build_config.command}"
: "${VIRIE_BOOST_LIBS_PATH:?variable not set, see also macosx_build_config.command}"
: "${VIRIE_BUILD_DIR:?variable not set, see also macosx_build_config.command}"
export BUILD_PREFIX_NAME="virie-macos-x64-webengine-$BUILD_SUFFIX"
echo "---------------- BUILDING PROJECT ----------------"
echo "--------------------------------------------------"
echo "Building...."
rm -rf $VIRIE_BUILD_DIR; mkdir -p "$VIRIE_BUILD_DIR/release"; cd "$VIRIE_BUILD_DIR/release"
cmake -D BUILD_GUI=TRUE -D CMAKE_PREFIX_PATH="$VIRIE_QT_PATH/clang_64" -D CMAKE_BUILD_TYPE=Release -D BOOST_ROOT="$VIRIE_BOOST_ROOT" -D BOOST_LIBRARYDIR="$VIRIE_BOOST_LIBS_PATH" ${ADDITIONAL_DEFINES} ../..
if [ $? -ne 0 ]; then
echo "Failed to cmake"
exit 1
fi
make -j Virie
if [ $? -ne 0 ]; then
echo "Failed to make Virie"
exit 1
fi
# for version
make -j connectivity_tool;
if [ $? -ne 0 ]; then
echo "Failed to make connectivity_tool"
exit 1
fi
cd src/
if [ $? -ne 0 ]; then
echo "Failed to cd src"
exit 1
fi
# copy all necessary libs into the bundle in order to workaround El Capitan's SIP restrictions
mkdir -p Virie.app/Contents/Frameworks/boost_libs
cp -R "$VIRIE_BOOST_LIBS_PATH/" Virie.app/Contents/Frameworks/boost_libs/
if [ $? -ne 0 ]; then
echo "Failed to cp workaround to MacOS"
exit 1
fi
# rename process name to big letter
mv Virie.app/Contents/MacOS/virie Virie.app/Contents/MacOS/Virie
if [ $? -ne 0 ]; then
echo "May be already big letter"
fi
cp ../../../resources/License.pdf Virie.app/Contents/MacOS/License.pdf
cp ../../../resources/License.txt Virie.app/Contents/MacOS/License.txt
# fix boost libs paths in main executable and libs to workaround El Capitan's SIP restrictions
source ../../../utils/macosx_fix_boost_libs_path.sh
fix_boost_libs_in_binary @executable_path/../Frameworks/boost_libs Virie.app/Contents/MacOS/Virie
fix_boost_libs_in_libs @executable_path/../Frameworks/boost_libs Virie.app/Contents/Frameworks/boost_libs
"$VIRIE_QT_PATH/clang_64/bin/macdeployqt" Virie.app
if [ $? -ne 0 ]; then
echo "Failed to macdeployqt Virie.app"
exit 1
fi
rsync -a ../../../src/gui/qt-daemon/html Virie.app/Contents/MacOS --exclude less --exclude package.json --exclude gulpfile.js
if [ $? -ne 0 ]; then
echo "Failed to cp html to MacOS"
exit 1
fi
cp ../../../src/gui/qt-daemon/app.icns Virie.app/Contents/Resources
if [ $? -ne 0 ]; then
echo "Failed to cp app.icns to resources"
exit 1
fi
codesign -s "Virie" --deep -vv -f Virie.app
if [ $? -ne 0 ]; then
echo "Failed to sign application"
exit 1
fi
cd ../../..
echo "Build success (in ${VIRIE_BUID_DIR}/release/src/Virie.app)"
|
#include<stdio.h>
#include<sys/ipc.h>
#include<sys/shm.h>
#include<sys/types.h>
#include<string.h>
#include<errno.h>
#include<stdlib.h>
#include<unistd.h>
#include<string.h>
#include <sys/time.h>
#include<semaphore.h>
#include <time.h>
#include"common.h"
//./bus -t type -n incpassengers -c capacity -p parkperiod -m mantime -s shmid
int main(int argc,char* argv[]){
srand(time(NULL));
char* type=malloc(4*sizeof(char));
struct timeval stop, start;
struct my_shared_mem *shared_mem;
int bus_type,tp,num_pas,capacity,mantime,parkperiod;
num_pas=rand()%60;
capacity=rand()%60;
mantime=rand()%3;
parkperiod=rand()%4;
for(int k=0;k<argc;k++){
if(strcmp(argv[k],"-s")==0){
shared_mem = shmat(atoi(argv[k+1]),NULL,0);
}else if(strcmp(argv[k],"-t")==0){
strcpy(type,argv[k+1]);
}else if(strcmp(argv[k],"-n")==0){
num_pas=atoi(argv[k+1]);
}else if(strcmp(argv[k],"-c")==0){
capacity=atoi(argv[k+1]);
}else if(strcmp(argv[k],"-p")==0){
parkperiod=atoi(argv[k+1]);
}else if(strcmp(argv[k],"-m")==0){
mantime=atoi(argv[k+1]);
}
}
if(strcmp(type,"VOR")==0){
bus_type=0;
}else if(strcmp(type,"ASK")==0){
bus_type=1;
}else if(strcmp(type,"PEL")==0){
bus_type=2;
}
FILE *fo;
if(shared_mem->file_checker==0){
fo=fopen("public_ledger.txt","w");
if(fo==NULL){
perror("Problem to open file!\n");
return 1;
}
shared_mem->file_checker=1;
}else{
fo=fopen("public_ledger.txt","a");
if(fo==NULL){
perror("Problem to open file!\n");
return 1;
}
}
sem_wait(&shared_mem->mutex);
fprintf(fo,"Bus with id=%d,type=%s ask permission to enter\n",getpid(),type);
fclose(fo);
sem_post(&shared_mem->mutex);
sem_wait(&shared_mem->enter);
sem_wait(&shared_mem->mutex);
fo=fopen("public_ledger.txt","a");
if(fo==NULL){
perror("Problem to open file!\n");
return 1;
}
fprintf(fo,"Bus with id=%d and type=%s is on the way to the parking area\n",getpid(),type);
fclose(fo);
sem_post(&shared_mem->mutex);
sleep(mantime);
sem_wait(&shared_mem->mantime_enter);
gettimeofday(&start, NULL);
sem_wait(&shared_mem->mutex);
shared_mem->sum_of_buses++;
if(bus_type==0){
shared_mem->VOR_buses++;
if(shared_mem->park_VOR>0){
shared_mem->park_VOR--;
tp=0;
}else if(shared_mem->park_PEL>0){
shared_mem->park_PEL--;
tp=2;
}
}
if(bus_type==1){
shared_mem->ASK_buses++;
if(shared_mem->park_ASK>0){
shared_mem->park_ASK--;
tp=1;
}else if(shared_mem->park_PEL>0){
shared_mem->park_PEL--;
tp=2;
}
}
if(bus_type==2){
shared_mem->PEL_buses++;
if(shared_mem->park_PEL>0){
shared_mem->park_PEL--;
tp=2;
}
}
fo=fopen("public_ledger.txt","a");
if(fo==NULL){
perror("Problem to open file!\n");
return 1;
}
if(tp==0){
fprintf(fo,"Bus with id=%d and type=%s finally is parked in a median VOR's type\n",getpid(),type);
}else if(tp==1){
fprintf(fo,"Bus with id=%d and type=%s finally is parked in a median ASK's type\n",getpid(),type);
}else{
fprintf(fo,"Bus with id=%d and type=%s finally is parked in a median PEL's type\n",getpid(),type);
}
fclose(fo);
shared_mem->buses ++;
int in_people=rand()%capacity;
shared_mem->people_left=shared_mem->people_left+ in_people;
fo=fopen("public_ledger.txt","a");
if(fo==NULL){
perror("Problem to open file!\n");
return 1;
}
fprintf(fo,"Bus with id=%d and type=%s %d passengers get in this bus\n",getpid(),type,in_people);
fclose(fo);
shared_mem->people_in_station=shared_mem->people_in_station+num_pas;
shared_mem->current_people_station=shared_mem->current_people_station+num_pas;
sem_post(&shared_mem->mutex);
sleep(parkperiod);
fo=fopen("public_ledger.txt","a");
if(fo==NULL){
perror("Problem to open file!\n");
return 1;
}
fprintf(fo,"Bus with id=%d,type=%s ask permission to exit\n",getpid(),type);
fclose(fo);
sem_wait(&shared_mem->exit);
gettimeofday(&stop, NULL);
long int t=(stop.tv_sec - start.tv_sec) * 1000000 + stop.tv_usec - start.tv_usec ;
sem_wait(&shared_mem->mutex);
shared_mem->sum_time=shared_mem->sum_time +t;
if(tp==0){
shared_mem->time_VOR=shared_mem->time_VOR + t;;
}else if(tp==1){
shared_mem->time_ASK=shared_mem->time_ASK + t;
}else if(tp==2){
shared_mem->time_PEL=shared_mem->time_PEL + t;
}
sem_post(&shared_mem->mutex);
fo=fopen("public_ledger.txt","a");
if(fo==NULL){
perror("Problem to open file!\n");
return 1;
}
fprintf(fo,"Bus with id=%d and type=%s is on the way to exit\n",getpid(),type);
fclose(fo);
sem_post(&shared_mem->mutex);
sleep(mantime);
sem_wait(&shared_mem->mutex);
fo=fopen("public_ledger.txt","a");
if(fo==NULL){
perror("Problem to open file!\n");
return 1;
}
sem_wait(&shared_mem->mantime_exit);
fprintf(fo,"Bus with id=%d,type=%s finally is out of bus station\n",getpid(),type);
fclose(fo);
sem_post(&shared_mem->mutex);
//printf("EXIT BUS ID=%d \n",getpid());
shared_mem->complete_buses++;
shared_mem->current_people_station=shared_mem->people_in_station-num_pas;
sem_wait(&shared_mem->mutex);
if(tp==0){
shared_mem->park_VOR++;
}else if(tp==1){
shared_mem->park_ASK++;
}else if(tp==2){
shared_mem->park_PEL++;
}
shared_mem->buses --;
sem_post(&shared_mem->mutex);
shmdt(shared_mem);
} |
#!/bin/sh
# Copyright (c) 2017-2019 Franco Fichtner <franco@opnsense.org>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
make clean-obj,${1:-"plugins,core"} packages
|
<reponame>kaolalib/kaola
package org.grain.thread;
public class CycleTest implements ICycle {
public String name;
@Override
public void cycle() throws Exception {
System.out.println(name + "业务轮训,线程:" + Thread.currentThread().getName());
}
@Override
public void onAdd() throws Exception {
System.out.println(name + "加入动作,线程:" + Thread.currentThread().getName());
}
@Override
public void onRemove() throws Exception {
System.out.println(name + "离开动作,线程:" + Thread.currentThread().getName());
}
}
|
/*
* Copyright 2018 Akashic Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.yggdrash.contract;
import com.google.gson.JsonObject;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
public class ContractClassLoaderTest {
private static final Logger log = LoggerFactory.getLogger(ContractClassLoaderTest.class);
@Test
public void testContract() throws IllegalAccessException, InstantiationException,
InvocationTargetException {
File contractNone =
new File(".yggdrash/contract/9607aea1d4e358a594006c7926a07262b5258c31.class");
ContractMeta noneContract = ContractClassLoader.loadContractClass(null, contractNone);
Class<? extends Contract> none = noneContract.getContract();
assertEquals("9607aea1d4e358a594006c7926a07262b5258c31",
noneContract.getContractId().toString());
assertEquals("{}", invokeTest(none));
Contract a = none.newInstance();
Contract b = none.newInstance();
assertNotEquals("Two Contract are not same instance.", a, b);
}
@Test
public void testConvertContractClassToContractMeta() throws IOException {
Class<? extends Contract> c = NoneContract.class;
ByteArrayOutputStream bos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(bos);
oos.writeObject(c);
oos.flush();
byte[] classData = bos.toByteArray();
ContractId idByClassBinary = ContractId.of(classData);
ContractMeta classMeta = new ContractMeta(classData, c);
assertEquals(idByClassBinary, classMeta.getContractId());
}
@Test
public void testLoadByHash() {
// LOAD Stem Contract
ContractMeta classMeta = ContractClassLoader.loadContractById(
"4fc0d50cba2f2538d6cda789aa4955e88c810ef5");
assertNotNull(classMeta);
assertEquals("4fc0d50cba2f2538d6cda789aa4955e88c810ef5",
classMeta.getContractId().toString());
assertEquals("io.yggdrash.contract.StemContract", classMeta.getContract().getName());
// LOAD None Contract
classMeta = ContractClassLoader.loadContractById(
"9607aea1d4e358a594006c7926a07262b5258c31");
assertNotNull(classMeta);
assertEquals("9607aea1d4e358a594006c7926a07262b5258c31",
classMeta.getContractId().toString());
assertEquals("io.yggdrash.contract.NoneContract", classMeta.getContract().getName());
}
private String invokeTest(Class a) throws InvocationTargetException, IllegalAccessException,
InstantiationException {
Object t = a.newInstance();
Method[] ms = a.getDeclaredMethods();
for (Method m : ms) {
if ("query".equals(m.getName())) {
m.setAccessible(true);
return m.invoke(t, new JsonObject()).toString();
}
}
return null;
}
}
|
#!/usr/bin/env bash
# Copyright 2022 MongoDB Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
STAGED_GO_FILES=$(git diff --name-only | grep ".go$" | grep -v "mock")
echo "==> Linting changed go files..."
for FILE in ${STAGED_GO_FILES}; do
golangci-lint run --timeout 5m "${FILE}"
done
echo "==> Done..."
|
#!/bin/bash
set -e -u
REGISTRY='us.gcr.io/container-suite'
VERSION=$CCP_IMAGE_TAG
IMAGES=(
crunchy-postgres-ha
crunchy-collect
crunchy-pgbadger
crunchy-pgbouncer
crunchy-pgdump
crunchy-pgrestore
)
function echo_green() {
echo -e "\033[0;32m"
echo "$1"
echo -e "\033[0m"
}
gcloud auth login
gcloud config set project container-suite
gcloud auth configure-docker
for image in "${IMAGES[@]}"
do
echo_green "=> Pulling ${REGISTRY?}/${image?}:${VERSION?}.."
docker pull ${REGISTRY?}/${image?}:${VERSION?}
docker tag ${REGISTRY?}/${image?}:${VERSION?} crunchydata/${image?}:${VERSION?}
done
echo_green "=> Done!"
exit 0
|
# frozen_string_literal: true
require 'rails_helper'
RSpec.describe Report, type: :model do
describe 'validations' do
subject(:instance) do
report = described_class.new(
projects: [],
time_zone: 'Etc/UTC',
start_date: start_date,
end_date: end_date
)
report.valid?
report
end
let(:start_date) { Time.zone.now }
let(:end_date) { Time.zone.now }
it { is_expected.to validate_presence_of(:end_date) }
it { is_expected.to validate_presence_of(:start_date) }
it { is_expected.to validate_presence_of(:time_zone) }
context 'when start_date < end_date' do
let(:end_date) { Time.zone.now + 1.hour }
it { is_expected.to be_valid }
end
context 'when start_date = end_date' do
let(:end_date) { start_date }
it { expect(instance.errors).to be_include :start_date }
end
context 'when start_date > end_date' do
let(:end_date) { 1.hour.ago }
it { expect(instance.errors).to be_include :start_date }
end
end
describe '#sums' do
subject(:sums) do
described_class.new(
projects: user.projects,
time_zone: 'Etc/UTC',
start_date: now,
end_date: end_date
).sums
end
let(:now) { Time.zone.parse('2019-01-01T00:00:00') }
let(:user) { create(:user) }
context 'when range is hourly' do
let(:end_date) { now + 6.hours }
let(:projects) { create_list(:project, 2, user: user) }
before do
create_list(
:activity,
3,
started_at: now + 2.hours,
stopped_at: now + 3.hours,
user: user,
project: projects[0]
)
create_list(
:activity,
3,
started_at: now + 4.hours,
stopped_at: now + 5.hours,
user: user,
project: projects[1]
)
end
it 'returns sums correctly' do
expect(sums).to eq [
[projects[0].id, [0, 0, 10_800, 0, 0, 0, 0]],
[projects[1].id, [0, 0, 0, 0, 10_800, 0, 0]]
].to_h
end
end
context 'when range is daily' do
let(:end_date) { now + 6.days }
let(:projects) { create_list(:project, 2, user: user) }
before do
create_list(
:activity,
3,
started_at: now + 2.days,
stopped_at: now + 3.days,
user: user,
project: projects[0]
)
create_list(
:activity,
3,
started_at: now + 4.days,
stopped_at: now + 5.days,
user: user,
project: projects[1]
)
end
it 'returns sums correctly' do
expect(sums).to eq [
[projects[0].id, [0, 0, 259_200, 0, 0, 0, 0]],
[projects[1].id, [0, 0, 0, 0, 259_200, 0, 0]]
].to_h
end
end
context 'when range is monthly' do
let(:end_date) { now + 6.months }
let(:projects) { create_list(:project, 2, user: user) }
before do
create_list(
:activity,
3,
started_at: now + 2.months,
stopped_at: now + 3.months,
user: user,
project: projects[0]
)
create_list(
:activity,
3,
started_at: now + 4.months,
stopped_at: now + 5.months,
user: user,
project: projects[1]
)
end
it 'returns sums correctly' do
expect(sums).to eq [
[projects[0].id, [0, 0, 8_035_200, 0, 0, 0, 0]],
[projects[1].id, [0, 0, 0, 0, 8_035_200, 0, 0]]
].to_h
end
end
context 'when range is yearly' do
let(:end_date) { now + 6.years }
let(:projects) { create_list(:project, 2, user: user) }
before do
create_list(
:activity,
3,
started_at: now + 2.years,
stopped_at: now + 3.years,
user: user,
project: projects[0]
)
create_list(
:activity,
3,
started_at: now + 4.years,
stopped_at: now + 5.years,
user: user,
project: projects[1]
)
end
it 'returns sums correctly' do
expect(sums).to eq [
[projects[0].id, [0, 0, 94_608_000, 0, 0, 0, 0]],
[projects[1].id, [0, 0, 0, 0, 94_608_000, 0, 0]]
].to_h
end
end
context 'when user has project but activities are empty' do
let(:end_date) { now + 6.days }
it 'returns sums correctly' do
project = create(:project, user: user)
expect(sums).to eq [[project.id, [0, 0, 0, 0, 0, 0, 0]]].to_h
end
end
context 'when user has project but date is out of range' do
let(:end_date) { now + 6.days }
let(:project) { create(:project, user: user) }
before do
create(
:activity,
started_at: now + 7.days,
stopped_at: now + 8.days,
user: user,
project: project
)
create(
:activity,
started_at: now - 2.days,
stopped_at: now - 1.day,
user: user,
project: project
)
end
it 'returns data correctly' do
expect(sums).to eq [[project.id, [0, 0, 0, 0, 0, 0, 0]]].to_h
end
end
context 'when user does not have project' do
let(:end_date) { now + 6.days }
it 'returns empty' do
expect(sums).to eq({})
end
end
end
describe '#totals' do
subject(:totals) do
described_class.new(
projects: user.projects,
time_zone: 'Etc/UTC',
start_date: now,
end_date: now + 1.day
).totals
end
let(:now) { Time.zone.now }
let(:user) { create(:user) }
context 'when user has projects and activities' do
let(:projects) { create_list(:project, 2, user: user) }
before do
create_list(
:activity,
3,
started_at: now,
stopped_at: now + 2.hours,
user: user,
project: projects[0]
)
create_list(
:activity,
3,
started_at: now,
stopped_at: now + 3.hours,
user: user,
project: projects[1]
)
end
it 'returns totals correctly' do
expect(totals).to eq [
[projects[0].id, 21_600],
[projects[1].id, 32_400]
].to_h
end
end
context 'when user has project but activities are empty' do
it 'returns totals correctly' do
project = create(:project, user: user)
expect(totals).to eq [[project.id, 0]].to_h
end
end
context 'when user has project but date is out of range' do
let(:project) { create(:project, user: user) }
before do
create(
:activity,
started_at: now + 2.days,
stopped_at: now + 3.days,
user: user,
project: project
)
create(
:activity,
started_at: now - 2.days,
stopped_at: now - 1.day,
user: user,
project: project
)
end
it 'returns totals correctly' do
expect(totals).to eq [[project.id, 0]].to_h
end
end
context 'when user does not have project' do
it 'returns empty' do
expect(totals).to eq({})
end
end
end
describe '#colors' do
subject(:colors) do
described_class.new(
projects: user.projects,
time_zone: 'Etc/UTC',
start_date: now,
end_date: now + 1.day
).colors
end
let(:now) { Time.zone.now }
let(:user) { create(:user) }
context 'when user has projects' do
it 'returns colors correctly' do
projects = create_list(:project, 2, user: user)
expect(colors).to eq [
[projects[0].id, projects[0].color],
[projects[1].id, projects[1].color]
].to_h
end
end
context 'when user does not have projects' do
it 'returns empty' do
expect(colors).to eq({})
end
end
end
describe '#projects' do
subject(:projects) do
described_class.new(
projects: user.projects,
time_zone: 'Etc/UTC',
start_date: now,
end_date: now + 1.day
).projects
end
let(:now) { Time.zone.now }
let(:user) { create(:user) }
context 'when user has projects' do
before { create_list(:project, 2, user: user) }
it 'returns projects correctly' do
expect(projects).to eq user.projects
end
end
context 'when user does not have projects' do
it 'returns empty' do
expect(projects).to eq []
end
end
end
describe '#start_date' do
subject do
described_class.new(
projects: [],
time_zone: time_zone,
start_date: start_date,
end_date: start_date + 1.day
).start_date
end
let(:start_date) { Time.zone.parse('2019-01-01T00:00:00') }
context 'when time_zone is UTC' do
let(:time_zone) { 'UTC' }
it { is_expected.to eq start_date.in_time_zone('UTC') }
end
context 'when time_zone is Asia/Tokyo' do
let(:time_zone) { 'Asia/Tokyo' }
it { is_expected.to eq start_date.in_time_zone('Asia/Tokyo') }
end
end
describe '#end_date' do
subject do
described_class.new(
projects: [],
time_zone: time_zone,
start_date: end_date - 1.day,
end_date: end_date
).end_date
end
let(:end_date) { Time.zone.parse('2019-01-01T00:00:00') }
context 'when time_zone is UTC' do
let(:time_zone) { 'UTC' }
it { is_expected.to eq end_date.in_time_zone('UTC') }
end
context 'when time_zone is Asia/Tokyo' do
let(:time_zone) { 'Asia/Tokyo' }
it { is_expected.to eq end_date.in_time_zone('Asia/Tokyo') }
end
end
describe '#labels' do
subject(:labels) do
described_class.new(
projects: [],
time_zone: time_zone,
start_date: start_date,
end_date: end_date
).labels
end
let(:start_date) { Time.zone.parse('2019-01-01T00:00:00') }
let(:time_zone) { 'UTC' }
context 'when range is hourly' do
let(:end_date) { start_date + 23.hours }
it 'returns hourly labels' do
expect(labels).to eq %w[
00 01 02 03 04 05 06 07 08 09
10 11 12 13 14 15 16 17 18 19
20 21 22 23
]
end
end
context 'when range is daily' do
let(:end_date) { start_date + 5.days }
it 'returns daily labels' do
expect(labels).to eq %w[01 02 03 04 05 06]
end
end
context 'when range is monthly' do
let(:end_date) { start_date + 11.months }
it 'returns monthly labels' do
expect(labels).to eq %w[
Jan Feb Mar Apr May Jun
Jul Aug Sep Oct Nov Dec
]
end
end
context 'when range is yearly' do
let(:end_date) { start_date + 2.years }
it 'returns yearly labels' do
expect(labels).to eq %w[2019 2020 2021]
end
end
context 'when range is minutely' do
let(:end_date) { start_date + 3.minutes }
it 'returns hours label' do
expect(labels).to eq ['00']
end
end
context 'when time_zone is not UTC' do
let(:start_date) { Time.zone.parse('2019-01-01T15:00:00') }
let(:end_date) { Time.zone.parse('2019-01-02T14:59:59') }
let(:time_zone) { 'Asia/Tokyo' }
it 'returns labels correctly' do
expect(labels).to eq %w[
00 01 02 03 04 05 06 07 08 09
10 11 12 13 14 15 16 17 18 19
20 21 22 23
]
end
end
context 'when range has leap day' do
let(:start_date) { Time.zone.parse('2020-01-01T00:00:00') }
let(:end_date) { Time.zone.parse('2020-12-31T23:59:59') }
it 'returns labels correctly' do
expect(labels).to eq %w[
Jan Feb Mar Apr May Jun
Jul Aug Sep Oct Nov Dec
]
end
end
end
describe '#bar_chart_data' do
subject(:bar_chart_data) do
described_class.new(
projects: user.projects,
time_zone: 'Etc/UTC',
start_date: now,
end_date: now + 6.days
).bar_chart_data
end
let(:now) { Time.zone.parse('2019-01-01T00:00:00') }
let(:user) { create(:user) }
let(:projects) { create_list(:project, 2, user: user) }
before do
create_list(
:activity,
3,
started_at: now + 2.days,
stopped_at: now + 3.days,
user: user,
project: projects[0]
)
create_list(
:activity,
3,
started_at: now + 4.days,
stopped_at: now + 5.days,
user: user,
project: projects[1]
)
end
it 'returns data correctly' do
expect(bar_chart_data).to eq [
[projects[0].id, 0, 0, 259_200, 0, 0, 0, 0],
[projects[1].id, 0, 0, 0, 0, 259_200, 0, 0]
]
end
end
describe '#activities' do
subject(:activities) do
described_class.new(
projects: user.projects,
time_zone: 'Etc/UTC',
start_date: now,
end_date: now + 1.day
).activities
end
let(:now) { Time.zone.now }
let(:user) { create(:user) }
context 'when user has activities' do
before do
create(
:activity,
started_at: now,
stopped_at: now,
description: 'example1',
user: user
)
create(
:activity,
started_at: now + 1.day,
stopped_at: now + 1.day,
description: 'example1',
user: user
)
end
it 'returns activities' do
expect(activities).to eq user.activities
end
end
context 'when user has activities but out of range' do
before do
create(
:activity,
started_at: now - 1.day,
stopped_at: now - 1.day,
description: 'example1',
user: user
)
create(
:activity,
started_at: now + 2.days,
stopped_at: now + 2.days,
description: 'example1',
user: user
)
end
it 'returns empty' do
expect(activities).to eq []
end
end
context 'when user has working activities' do
before do
create(
:activity,
started_at: now,
stopped_at: nil,
duration: nil,
description: 'example1',
user: user
)
end
it 'returns empty' do
expect(activities).to eq []
end
end
context 'when user does not have activities' do
it 'returns empty' do
expect(activities).to eq []
end
end
end
describe '#activity_groups' do
subject(:activity_groups) do
described_class.new(
projects: user.projects,
time_zone: 'Etc/UTC',
start_date: now,
end_date: now + 1.day
).activity_groups
end
let(:now) { Time.zone.now }
let(:user) { create(:user) }
context 'when user has activities' do
let(:projects) { create_list(:project, 2, user: user) }
before do
create_list(
:activity,
3,
started_at: now,
stopped_at: now + 1.hour,
description: 'example1',
user: user,
project: projects[0]
)
create_list(
:activity,
3,
started_at: now,
stopped_at: now + 1.hour,
description: 'example2',
user: user,
project: projects[1]
)
end
it 'returns grouped activities with description' do
expect(activity_groups.map(&:description)).to eq(%w[example1 example2])
end
it 'returns grouped activities with duration' do
expect(activity_groups.map(&:duration)).to eq([10_800, 10_800])
end
end
context 'when user has activities but out of range' do
before do
create(
:activity,
started_at: now - 1.day,
stopped_at: now,
description: 'example1',
user: user
)
create(
:activity,
started_at: now + 2.days,
stopped_at: now + 3.days,
description: 'example1',
user: user
)
end
it 'returns empty' do
expect(activity_groups).to eq []
end
end
context 'when user has working activities' do
before do
create(
:activity,
started_at: now,
stopped_at: nil,
duration: nil,
description: 'example1',
user: user
)
end
it 'returns empty' do
expect(activity_groups).to eq []
end
end
context 'when user does not have activities' do
it 'returns empty' do
expect(activity_groups).to eq []
end
end
end
end
|
<reponame>anotheria/moskito-control
package org.moskito.control.plugins.slack;
import com.github.seratch.jslack.api.methods.request.chat.ChatPostMessageRequest;
import com.github.seratch.jslack.api.model.Attachment;
import com.github.seratch.jslack.api.model.Field;
import net.anotheria.util.NumberUtils;
import org.moskito.control.common.HealthColor;
import org.moskito.control.core.status.StatusChangeEvent;
import org.moskito.control.plugins.notifications.NotificationUtils;
import java.util.Collections;
import java.util.LinkedList;
/**
* Builds request object to post notification message to Slack
* from configuration data and status change event
*/
public class SlackMessageBuilder {
/**
* Channel name to post notification
*/
private String channel;
/**
* Source status change event
*/
private StatusChangeEvent event;
/**
* Template for alert links leads to component where status changes
*/
private String alertLinkTemplate;
/**
* Base to path to images that inserted in message
*/
private String thumbImageBasePath;
/**
* Is required to make asUser request to post message
* Avatar and name are shown in that case, but posting to not joined in channels is disabled
*/
private boolean asUser;
/**
* Bot auth token
*/
private String token;
/**
* Builds attachment filed for slack message
* @param title name of filed
* @param value value of filed
* @return filed object
*/
private Field buildField(String title, String value){
return Field.builder().title(title).value(value).valueShortEnough(true).build();
}
/**
* Transforms HealthColor object to
* color hex code
* @param color color to take hex code
* @return hex code of color suite to color from arguments
*/
/* test scope */ static String color2color(HealthColor color){
switch(color){
case GREEN:
return "#94cc19";
case RED:
return "#fc3e39";
case ORANGE:
return "#ff8400";
case YELLOW:
return "#f4e300";
case PURPLE:
return "#ff53d6";
case NONE:
return "#cccccc";
default:
throw new IllegalArgumentException("Color "+color+" is not yet mapped");
}
}
/**
* Builds request object filling it parameters.
* @return request object ready to make request. (In case all builder parameters fill)
*/
public ChatPostMessageRequest build(){
ChatPostMessageRequest.ChatPostMessageRequestBuilder requestBuilder = ChatPostMessageRequest.builder()
.token(token)
.asUser(asUser)
.channel(channel);
// START COMPOSING MESSAGE HEADER
// Putting component name to message header
String messageHeader = event.getComponent().getName();
if(alertLinkTemplate != null) // inserting link to component name if it set in config
messageHeader =
"<" + NotificationUtils.buildAlertLink(alertLinkTemplate, event) + "|" + messageHeader + ">";
messageHeader += " status changed to " + event.getStatus(); // Adding status change to message header
requestBuilder.text(messageHeader); // attaching message header to request
// END COMPOSING MESSAGE TEXT
// START COMPOSING ATTACHMENTS
Attachment.AttachmentBuilder attachmentBuilder = Attachment.builder();
String fallbackText = event.getComponent().getName()
+" status changed from " + event.getOldStatus()
+ " to " + event.getStatus() + " @ "
+ NumberUtils.makeISO8601TimestampString(event.getTimestamp());
attachmentBuilder.color(color2color(event.getStatus().getHealth()));
attachmentBuilder.fallback(fallbackText);
// Building fields in message
LinkedList<Field> fields = new LinkedList<>();
fields.add(buildField("NewStatus", event.getStatus().getHealth().toString()));
fields.add(buildField("OldStatus", event.getOldStatus().getHealth().toString()));
fields.add(buildField("Timestamp", NumberUtils.makeISO8601TimestampString(event.getTimestamp())));
attachmentBuilder.fields(fields);
attachmentBuilder.thumbUrl(
NotificationUtils.getThumbImageUrlByColor(thumbImageBasePath, event.getStatus().getHealth())
);
requestBuilder.attachments(
Collections.singletonList(attachmentBuilder.build())
);
// END COMPOSING ATTACHMENTS
return requestBuilder.build();
}
public SlackMessageBuilder setChannel(String channel) {
this.channel = channel;
return this;
}
public SlackMessageBuilder setEvent(StatusChangeEvent event) {
this.event = event;
return this;
}
public SlackMessageBuilder setAlertLinkTemplate(String alertLinkTemplate) {
this.alertLinkTemplate = alertLinkTemplate;
return this;
}
public SlackMessageBuilder setThumbImageBasePath(String thumbImageBasePath) {
this.thumbImageBasePath = thumbImageBasePath;
return this;
}
public SlackMessageBuilder setAsUser(boolean asUser) {
this.asUser = asUser;
return this;
}
public SlackMessageBuilder setToken(String token) {
this.token = token;
return this;
}
} |
int factorial(int num){
if (num == 0)
return 1;
return num * factorial(num - 1);
} |
<filename>app/models/validators/premium_total_validator.rb
module Validators
class PremiumTotalValidator
def initialize(change_request, listener)
@change_request = change_request
@listener = listener
end
def validate
provided = @change_request.premium_amount_total.round(2)
expected = @change_request.enrollee_premium_sum.round(2)
if(provided != expected)
@listener.group_has_incorrect_premium_total({provided: provided, expected: expected})
return false
end
true
end
end
end
|
public class AlertTrigger {
private final String displayTopic;
private final static Logger logger = LoggerFactory.getLogger(AlertTrigger.class);
private final MsgCommandChannel commandChannel;
private final Port port;
public AlertTrigger(FogRuntime runtime, Port alertPort, String displayTopic) {
this.commandChannel = runtime.newCommandChannel(MsgCommandChannel.DYNAMIC_MESSAGING);
this.port = alertPort;
this.displayTopic = displayTopic;
}
public void sendAlert(String message) {
// Send the alert message to the specified port using the command channel
commandChannel.publishTopic(port, displayTopic, message);
}
public void handleAlertMessage(String message) {
// Log the incoming alert message using the logger
logger.info("Received alert message for topic {}: {}", displayTopic, message);
}
} |
import { BaseModel } from './base_model';
import { TranslationProvider as TranslationProviderInterface } from "../interfaces";
export class TranslationProvider extends BaseModel implements TranslationProviderInterface {
public provider_id: number;
public name: string;
public slug: string;
public price_pair_min: number;
public website_url: string;
public description: string;
public tiers: object;
public pairs: object;
} |
<reponame>saadshams/puremvc-js-multicore-framework
/* @todo split ino buildReleaseCJS and buildReleaseES6 */
/* @todo process.ENV to disable terser during development */
import pkg from "../../package.json"
import multiEntry from "rollup-plugin-multi-entry"
import includePaths from "rollup-plugin-includepaths"
import { terser } from "rollup-plugin-terser"
export default {
input: [
`tool/rollup/manifest/index.js`,
`src/main/**/*.js`
],
output: {
file: `${pkg.exports}`,
format: "esm"
},
plugins: [
includePaths({
paths: [
`src/main`
]
}),
multiEntry({
exports: true
}),
terser({
mangle: {
safari10: true
, keep_classnames: true
, properties: {
regex: /^_|_$/
}
}
})
]
}
|
echo -e "[\n$(for i in `ls`;do name=${i%.png}; name=${name/-/ }; id=$((++id));echo "\t{\t\n\t\t\"id\": $id\n\t\t\"name\": $name\n\t\t\"image\": \"assets/images/foods/$i\n\t},";done)\n]" |
<reponame>TVilaboa/IO-Sim2
package model.simulation;
import java.util.ArrayList;
import java.util.List;
/**
* Created by chelen on 02/05/15.
*/
public class Result {
private final NumericResults results;
private final List<Event> events;
public Result() {
events = new ArrayList<>();
results = new NumericResults();
}
public void addEvent(Event e) {
events.add(e);
}
public List<Event> getEvents() {
return events;
}
public NumericResults getResults(){
return results;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.