text stringlengths 1 1.05M |
|---|
import re
def extract_license_info(file_path):
with open(file_path, 'r') as file:
content = file.read()
# Extracting license information using regular expressions
license_type = re.search(r'The (.+?) License', content).group(1)
copyright_year = re.search(r'Copyright \(c\) (\d{4})', content).group(1)
name = re.search(r'Copyright \(c\) \d{4} (.+?)\n', content).group(1)
date = re.search(r'(\d{4}-\d{2}-\d{2}),', content).group(1)
version = re.search(r'v(\d+\.\d+)', content).group(1)
return {
"License Type": license_type,
"Copyright Year": copyright_year,
"Name": name,
"Date": date,
"Version": version
}
# Example usage
file_path = 'path_to_license_file.txt'
license_info = extract_license_info(file_path)
for key, value in license_info.items():
print(f"{key}: {value}") |
package com.github.chen0040.leetcode.day09.easy;
/**
* Created by xschen on 4/8/2017.
*
* summary:
*
* link: https://leetcode.com/problems/move-zeroes/description/
*/
public class MoveZeroes {
public class Solution {
public void moveZeroes(int[] nums) {
int j = 0;
for(int i=0; i < nums.length; i++) {
if(nums[i] == 0) {
j = Math.max(j, i+1);
while(j < nums.length && nums[j]==0){
j++;
}
if(j == nums.length) {
break;
}
exchange(nums, i, j);
}
}
}
private void exchange(int[] a, int i, int j) {
int temp = a[i];
a[i] = a[j];
a[j] = temp;
}
}
}
|
class CurlCommand
attr_reader :artifact_url, :output_file_name, :api_key, :follow_redirects
def initialize(artifact_url, output_file_name, api_key, follow_redirects = true)
@artifact_url = artifact_url
@output_file_name = output_file_name
@api_key = api_key
@follow_redirects = follow_redirects
end
def follow_redirects?
!!follow_redirects
end
def authenticated_artifact_url
"#{artifact_url}?circle-token=#{api_key}"
end
def to_s
"curl #{follow_redirects? ? '-L' : ''} -o #{output_file_name} #{authenticated_artifact_url}"
end
end
|
<reponame>iqbalmalik89/cargo
$(function () {
uploadFile('#user_image_upload', 'testimonial/image', '#file_path', '#user_image', '#response_msg')
$( "#save_btn" ).click(function() {
$.addUpdateTestimonial();
});
$( "#addbutton" ).click(function() {
$('#id').val('');
$.resetForm();
});
// List records
$.getListing(0);
$('body').keypress(function (e) {
var key = e.which;
if(key == 13) // the enter key code
{
if($('#add_popup').is(':visible'))
{
$.addUpdateTestimonial();
}
}
});
});
$.resetForm = function()
{
var id = $.trim($('#id').val());
if(id != '' && id != '0')
$('#popupTitle').html('Update Testimonial');
else
$('#popupTitle').html('Add Testimonial');
//reset fields
$('#name, #description, #id').val('');
$('#statusactive').prop('checked', true);
$('div').removeClass('has-error');
}
$.getListing = function(page)
{
var requestData = {"page":page, limit:12};
var request = ajaxExec('testimonial', requestData, 'get', '#response_msg', $.listing);
}
$.showEditPopup = function(id)
{
$.resetForm();
$('#popupTitle').html('Update Testimonial');
$('#id').val(id);
$('#add_popup').modal('show');
$.getRec();
}
$.getRec = function() {
var id = $('#id').val();
var requestData = {"id": id};
var request = ajaxExec('testimonial/' + id, requestData, 'GET', '#response_msg');
request.done(function(data) {
if(data.status == 'success')
{
$('#name').val(data.data.name);
$('#description').val(data.data.description);
$('#status' + data.data.status).prop('checked', true);
if(data.data.pic_path != '')
{
var userImage = '<img src="'+data.data.image+'" style="margin-right:10px;" width="40" class="img-circle" /> ';
}
else
{
var userImage = '<img style="float:left; margin-right:10px;" data-name="'+data.data.name+'" class="profile"/> ';
}
$('#user_image').html(userImage);
$('.profile').initial({width:30, height: 30, fontSize:10});
}
});
}
$.deleteTestimonial = function(id)
{
var requestData = {};
var request = ajaxExec('testimonial/' + id, requestData, 'delete', '#response_msg');
request.done(function(data) {
if(data.status == 'success' )
{
// $.msgShow('#response_msg', data.message, 'success');
$('#deletePopup').modal('hide');
$.getListing(0);
}
else
{
$.msgShow('#response_msg', data.message, 'error');
}
});
}
$.listing = function(data) {
var html = '';
if(data.status == 'success')
{
if(data.data.data.length > 0)
{
$.each( data.data.data, function( key, rec ) {
if(rec.status == 'active')
{
var statusClass = 'btn-success';
}
else
{
var statusClass = 'btn-inverse';
}
if(rec.image == '')
{
var userImage = '<img style="float:left; margin-right:10px;" data-name="'+rec.name+'" class="profile"/> ';
}
else
{
var userImage = '<img src="'+rec.image+'" style="margin-right:10px; width:30px; height:30px;" /> ';
}
html += '<tr>\
<td class="text-left">'+ (key + 1) +'</td>\
<td class="text-left">\
'+userImage + rec.name +'\
</td>\
<td class="text-left">'+ rec.description +'</td>\
<td class="text-left"> <span class="label label-primary">'+rec.created_at_formatted+'</span> </td>\
<td class="text-right">\
<a href="javascript:void(0);"><button class="btn '+statusClass+' btn-xs">'+ucfirst(rec.status)+'</button></a>\
<a href="javascript:void(0);" onclick="$.showEditPopup('+rec.id+');" class="btn btn-default btn-xs" data-target="#add_popup" data-modal-options="slide-down" data-content-options="modal-sm h-center" title="Edit"><i class="fa fa-pencil"></i></a>\
<a href="javascript:void(0);" onclick="$.confirmDel('+rec.id+', this, \'deleteTestimonial\');" data-entityname="' + rec.name+'" class="btn btn-danger btn-xs" title="Delete"><i class="fa fa-times"></i></a>\
</td>\
</tr>';
});
$('#responsive-table-body').html(html);
$('#pagination').twbsPagination({
totalPages: data.data.paginator.total_pages,
visiblePages: 7,
onPageClick: function (event, page) {
$.getListing(page);
}
});
$('.profile').initial({width:30, height: 30, fontSize:10});
}
else
{
$('#responsive-table-body').html('<tr><th style="text-align:center;" colspan="5">No records found</th></tr>');
}
}
}
$.addUpdateTestimonial = function()
{
var check = true;
var method = 'POST';
var endPoint = 'testimonial';
var name = $.trim($('#name').val());
var description = $.trim($('#description').val());
var status = $('input[name=status]:checked').val();
var imagePath = $.trim($('#file_path').val());
var id = $.trim($('#id').val());
check = validateText('#name', name, check);
check = validateText('#description', description, check);
if(id != '')
{
method = 'PUT';
endPoint += '/' + id;
}
if(check)
{
requestData = {"id": id, 'name': name, 'description': description, 'status': status, "pic_path":imagePath};
var request = ajaxExec(endPoint, requestData, method, '#response_msg');
request.done(function(data) {
if(data.status == 'success')
{
setTimeout(function(){
$('#add_popup').modal('hide');
$.getListing(0);
// // Rest Data
// $.resetForm();
}, 2000);
$.msgShow('#response_msg', data.message, 'success');
}
else
{
$.msgShow('#response_msg', data.message, 'error');
}
});
}
} |
import admin from 'firebase-admin';
const {
FIREBASE_DATABASE_URL,
FIREBASE_PROJECT_ID,
FIREBASE_PRIVATE_KEY,
FIREBASE_CLIENT_EMAIL,
FIREBASE_STORAGE_BUCKET,
} = process.env;
if (!admin.apps.length) {
try {
admin.initializeApp({
credential: admin.credential.cert({
projectId: FIREBASE_PROJECT_ID,
privateKey: FIREBASE_PRIVATE_KEY,
clientEmail: FIREBASE_CLIENT_EMAIL,
}),
databaseURL: FIREBASE_DATABASE_URL,
projectId: FIREBASE_PROJECT_ID,
storageBucket: FIREBASE_STORAGE_BUCKET,
});
admin.firestore().settings({
ignoreUndefinedProperties: true,
});
} catch (error) {
console.error(error);
}
}
export default admin;
|
<filename>src/components/contact/ContactForm.tsx
import * as React from 'react';
import styled from 'styled-components';
import { StyledBtn } from '../styles/Buttons';
interface Props {}
const StyledContact = styled.section`
padding: 4rem 1rem;
min-height: 66vh;
/* background: ${({ theme }) => theme.colors.secondary}; */
display: flex;
flex-direction: column;
justify-content: center;
h4{
text-align:center;
font-size:2rem;
}
`;
const StyledForm = styled.form`
.head {
display: flex;
justify-content: center;
}
button {
margin: 0 auto;
display: block;
width: 12rem;
border-radius: ${({ theme }) => theme.borderRadius};
}
`;
const FormGroup = styled.div`
padding: 1rem 0.5rem;
`;
const StyledInput = styled.input`
padding: 0.8rem 0.9rem;
width: 100%;
box-shadow: ${({ theme }) => theme.shadow.lightShadow};
border: 1px solid ${({ theme }) => theme.colors.black};
border-radius: ${({ theme }) => theme.borderRadius};
font-size: 1.1rem;
transition: ${({ theme }) => theme.transition.mainTransition};
&:focus {
transform: scale(1.01);
width: 32rem;
box-shadow: ${({ theme }) => theme.shadow.darkShadow};
}
@media (min-width: 838px) {
width: 20rem;
}
@media (max-width: 1038px) {
&:focus {
width: 28rem;
}
}
@media (max-width: 740px) {
&:focus {
width: 13rem;
}
}
@media (max-width: 440px) {
&:focus {
width: 9rem;
}
}
`;
const StyledTextArea = styled.textarea`
padding: 0.5rem 0.9rem;
width: 55%;
margin: 0 auto;
border-radius: ${({ theme }) => theme.borderRadius};
display: block;
border: 1px solid ${({ theme }) => theme.colors.black};
height: 8rem;
box-shadow: ${({ theme }) => theme.shadow.lightShadow};
font-size: 1.1rem;
&:focus {
border: 2px solid ${({ theme }) => theme.colors.primaryColor};
box-shadow: ${({ theme }) => theme.shadow.darkShadow};
}
@media (min-width: 1238px) {
width: 60%;
}
`;
const ContactForm: React.FC<Props> = () => {
return (
<StyledContact>
<h4>Contact us</h4>
<StyledForm method="POST" action="https://formspree.io/test123@gmail.com">
<div className="head">
<FormGroup>
<StyledInput type="text" placeholder="name" />
</FormGroup>
<FormGroup>
<StyledInput type="email" placeholder="email" />
</FormGroup>
</div>
<FormGroup>
<StyledTextArea placeholder="message" />
</FormGroup>
<FormGroup>
<StyledBtn type="submit">Send</StyledBtn>
</FormGroup>
</StyledForm>{' '}
</StyledContact>
);
};
export default ContactForm;
|
<gh_stars>0
from matterhook.incoming import Webhook
__all__ = ['Webhook']
|
export { default } from "./ProfitSharingAnalytics";
|
#!/usr/bin/env bats
load ../helpers
function teardown() {
swarm_manage_cleanup
stop_docker
}
@test "docker run" {
start_docker_with_busybox 2
swarm_manage
# make sure no container exist
run docker_swarm ps -qa
[ "${#lines[@]}" -eq 0 ]
# run
docker_swarm run -d --name test_container busybox sleep 100
# verify, container is running
[ -n $(docker_swarm ps -q --filter=name=test_container --filter=status=running) ]
# error check
run docker_swarm run -d 4e8aa3148a132f19ec560952231c4d39522043994df7d2dc239942c0f9424ebd
[[ "${output}" == *"cannot specify 64-byte hexadecimal strings"* ]]
}
@test "docker run with resources" {
start_docker_with_busybox 2
swarm_manage
# run
docker_swarm run -d --name test_container \
--add-host=host-test:127.0.0.1 \
--cap-add=NET_ADMIN \
--cap-drop=MKNOD \
--label=com.example.version=1.0 \
--read-only=true \
--ulimit=nofile=10 \
--device=/dev/loop0:/dev/loop0 \
--ipc=host \
--pid=host \
--memory-swappiness=2 \
--group-add="root" \
--memory-reservation=100 \
--kernel-memory=100 \
--dns-opt="someDnsOption" \
--stop-signal="SIGKILL" \
busybox sleep 1000
# verify, container is running
[ -n $(docker_swarm ps -q --filter=name=test_container --filter=status=running) ]
run docker_swarm inspect test_container
# label
[[ "${output}" == *"com.example.version"* ]]
# add-host
[[ "${output}" == *"host-test:127.0.0.1"* ]]
# cap-add
[[ "${output}" == *"NET_ADMIN"* ]]
# cap-drop
[[ "${output}" == *"MKNOD"* ]]
# read-only
[[ "${output}" == *"\"ReadonlyRootfs\": true"* ]]
# ulimit
[[ "${output}" == *"nofile"* ]]
# device
[[ "${output}" == *"/dev/loop0"* ]]
# ipc
[[ "${output}" == *"\"IpcMode\": \"host\""* ]]
# pid
[[ "${output}" == *"\"PidMode\": \"host\""* ]]
# memory-swappiness
[[ "${output}" == *"\"MemorySwappiness\": 2"* ]]
# group-add
[[ "${output}" == *"root"* ]]
# memory-reservation
[[ "${output}" == *"\"MemoryReservation\": 100"* ]]
# kernel-memory
[[ "${output}" == *"\"KernelMemory\": 100"* ]]
# dns-opt
[[ "${output}" == *"someDnsOption"* ]]
# stop-signal
[[ "${output}" == *"\"StopSignal\": \"SIGKILL\""* ]]
}
@test "docker run - reschedule with soft-image-affinity" {
start_docker_with_busybox 1
start_docker 1
docker -H ${HOSTS[0]} tag busybox:latest busyboxabcde:latest
swarm_manage
# make sure busyboxabcde exists
run docker_swarm images
[ "$status" -eq 0 ]
[[ "${output}" == *"busyboxabcde"* ]]
# try to create container on node-1, node-1 does not have busyboxabcde and will pull it
# but can not find busyboxabcde in dockerhub
# then will retry with soft-image-affinity
docker_swarm run -d --name test_container -e constraint:node==~node-1 busyboxabcde sleep 1000
# check container running on node-0
run docker_swarm ps
[[ "${output}" == *"node-0/test_container"* ]]
}
@test "docker run - reschedule with soft-image-affinity and node constraint" {
start_docker_with_busybox 1
start_docker 1
docker -H ${HOSTS[0]} tag busybox:latest busyboxabcde:latest
swarm_manage
# make sure busyboxabcde exists
run docker_swarm images
[ "$status" -eq 0 ]
[[ "${output}" == *"busyboxabcde"* ]]
# create container on node-1, node-1 does not have busyboxabcde and will pull it
# but can not find busyboxabcde in dockerhub
# because run with node constraint, will not retry with soft-image-affinity
run docker_swarm run -d --name test_container -e constraint:node==node-1 busyboxabcde sleep 1000
# check error message
[[ "${output}" != *"unable to find a node that satisfies"* ]]
[[ "${output}" == *"busyboxabcde:latest not found"* ]]
}
@test "docker run - with not exist volume driver" {
start_docker_with_busybox 2
swarm_manage
# make sure no container exist
run docker_swarm ps -qa
[ "${#lines[@]}" -eq 0 ]
# run
run docker_swarm run -d --volume-driver=not_exist_volume_driver -v testvolume:/testvolume --name test_container busybox sleep 100
# check error message
[ "$status" -ne 0 ]
[[ "${output}" == *"Plugin not found"* ]]
}
|
import re
import nltk
from nltk.corpus import stopwords
from nltk.tokenize import word_tokenize
stop_words = stopwords.words("english")
# remove punctuations
test_review = re.sub(r"\W", " ", test_review)
# tokenize the sentence
review_tokens = word_tokenize(test_review)
# remove stopwords
review_words = [word for word in review_tokens if not word in stop_words]
# classify
if len(review_words) > 0:
pos_words = 0
for word in review_words:
if word in pos_words:
pos_words += 1
# if more than half of the words are positive then label it as positive
if pos_words / len(review_words) > 0.5:
print("Positive")
else:
print("Negative")
else:
print("Neutral") |
<filename>lib/audit/audit_helper/json_helper.rb
# TODO: Vrushali - handle hash is not allowed in Google DataStore
module AuditHelper
module JsonHelper
def self.format json, allow_hash=true
if json.is_a?(Array)
result = json.collect do |value|
klass = value.class
if [Float, Fixnum, TrueClass, FalseClass, Date, DateTime, Time].include?(klass)
value
elsif value.is_a?(Array)
self.format value, false
elsif value.is_a?(Hash)
if allow_hash
self.format value, false
else
value.to_json
end
else
begin
if value.to_s.length > 250
"changed"
else
value.to_s.encode("UTF-8")
end
rescue
# TODO: log error
""
end
end
end
result
elsif json.is_a?(Hash)
result = {}
json.each do |key, value|
klass = value.class
if [Float, Fixnum, TrueClass, FalseClass, Date, DateTime, Time].include?(klass)
result[key.to_s] = value
elsif value.is_a?(Array)
result[key.to_s] = self.format value, false
elsif value.is_a?(Hash)
result[key.to_s] = value.to_json
else
begin
if value.to_s.length > 250
result[key.to_s] = "changed"
else
result[key.to_s] = value.to_s.encode("UTF-8")
end
rescue
# TODO: log error
result[key.to_s] = ""
end
end
end
result
end
end
end
end
|
/* External dependencies */
import styled from "styled-components";
// Used to make sure the element also has a height when empty by setting min-height equal to line-height.
const height = "20px";
const FacebookTitle = styled.span`
line-height: ${ height };
min-height : ${ height };
color: #1d2129;
font-weight: 600;
overflow: hidden;
font-size: 17px;
margin: 0;
letter-spacing: normal;
white-space: normal;
flex-shrink: 0;
cursor: pointer;
display: -webkit-box;
-webkit-line-clamp: 2;
-webkit-box-orient: vertical;
overflow: hidden;
`;
export default FacebookTitle;
|
const mongoose = require("mongoose");
const express = require("express");
const router = express.Router();
const questions = require("../models/questions.js");
const Question = mongoose.model("Questions");
// middleware that is specific to this router
router.use(function (req, res, next) {
console.log("Entered /categories");
next();
});
// routers
router.get("/", function (req, res) {
Question.distinct("category")
.then((categories) => {
res.send(categories);
})
.catch((err) => {
res.sendStatus(500);
throw err;
});
});
router.get("/:category/questions/", function (req, res) {
const reqCategory = req.params.category;
Question.find({ category: reqCategory }, { question: 1, answer: 1 })
.then((questions) => {
if (reqCategory === undefined) {
throw "params";
} else {
return questions;
}
})
.then((questions) => {
res.send(questions);
})
.catch((err) => {
if (err == "params") {
res.status(404).send("404: Missing parameters");
} else {
res.sendStatus(500);
}
throw err;
});
});
router.get("/:category/questions/random", function (req, res) {
const reqCategory = req.params.category;
Question.find({ category: reqCategory }, { question: 1, answer: 1 })
.then((questions) => {
if (reqCategory === undefined) {
throw "params";
} else {
return questions;
}
})
.then((questions) => {
const randomIndex = Math.floor(
Math.random() * Math.floor(questions.length)
);
let message = questions[randomIndex];
message = JSON.stringify(message);
message = JSON.parse(message);
res.json(message);
})
.catch((err) => {
if (err == "params") {
res.status(404).send("404: Missing parameters");
} else {
res.sendStatus(500);
}
throw err;
});
});
router.get("/:category/questions/:questionid", function (req, res) {
const reqCategory = req.params.category;
const reqQuestionid = req.params.questionid;
Question.findOne(
{ category: reqCategory, _id: reqQuestionid },
{ question: 1, answer: 1 }
)
.then((question) => {
if (reqCategory === undefined || reqQuestionid === undefined) {
throw "params";
} else {
return question;
}
})
.then((question) => {
res.send(question);
})
.catch((err) => {
if (err == "params") {
res.status(404).send("404: Missing parameters");
} else {
res.sendStatus(500);
}
throw err;
});
});
module.exports = router;
|
const Model = require('./Model').Model
const Condition = require('../../util/Condition').Condition
class ActivityVote extends Model {
constructor (user, activity) {
super()
Condition.parameter(user).isType(String)
Condition.parameter(activity).isType(String)
this.user = user
this.activity = activity
}
static deserialise (json) {
return Model.deserialise(json, new ActivityVote('', ''))
}
}
module.exports.ActivityVote = ActivityVote
|
<reponame>SynthSys/BioDare2-BACK<gh_stars>0
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package ed.biodare2.backend.util.concurrent.id;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
/**
*
* @author tzielins
*/
//@Service
public class IdGenerators {
final LongRecordManager recordManager;
final Map<String,IdGenerator> generators = new ConcurrentHashMap<>();
@Autowired
public IdGenerators(LongRecordManager recordManager) {
this.recordManager = recordManager;
}
public void initGenerator(String name,int increament,long START_VAL,long MAX_VAL) {
if (name == null || name.trim().isEmpty()) throw new IllegalArgumentException("Generator name cannot be empty");
if (increament < 1) throw new IllegalArgumentException("Increament must be >= 1");
DBIdGenerator gen = (DBIdGenerator)generators.putIfAbsent(name, DBIdGenerator.newInstance(name,increament,START_VAL,MAX_VAL,recordManager));
if (gen != null) gen.updateStart(START_VAL);
}
public void initGenerator(String name,int increament) {
initGenerator(name, increament,1,Long.MAX_VALUE);
}
public void initGenerators(Map<String,Integer> generators) {
for (Map.Entry<String,Integer> ent : generators.entrySet())
initGenerator(ent.getKey(), ent.getValue());
}
public IdGenerator getGenerator(String name) {
IdGenerator gen = generators.get(name);
if (gen == null) throw new IllegalArgumentException("Unknown generator: "+name+"; please configure it first");
return gen;
}
}
|
#!/bin/sh
default_bundles=(
ack.vim
browser-refresh.vim
bufkill.vim
delimitMate
gist-vim
gundo
indexed-search.vim
jade.vim
json.vim
lustyjuggler
markdown-preview.vim
nerdcommenter
nerdtree
snipmate.vim
statusline
supertab
syntastic
tabular
taglist.vim
vcscommand.vim
vim-coffee-script
vim-cucumber
vim-fugitive
vim-haml
vim-javascript
vim-markdown
vim-rails
vim-repeat
vim-ruby
vim-rvm
vim-stylus
vim-surround
vim-unimpaired
YankRing.vim
)
full_path=`pwd`
echo "Creating directories..."
mkdir -p $full_path/home/.vim/bundle
mkdir -p $full_path/home/.vim/snippets
mkdir -p $full_path/home/.vim/tmp/swap
mkdir -p $full_path/home/.vim/tmp/yankring
mkdir -p $full_path/home/.vim/spell
echo "Initializing submodules..."
git submodule init
git submodule update
git submodule foreach git checkout master
git submodule foreach git clean -f
echo "Symlinking default bundles..."
for i in "${default_bundles[@]}"; do
ln -sv $full_path/home/.vim/bundle_storage/$i $full_path/home/.vim/bundle/$i
done
echo "Symlinking default snippets..."
for f in `ls $full_path/home/.vim/snippets_storage/`; do
ln -sv $full_path/home/.vim/snippets_storage/$f $full_path/home/.vim/snippets/$f
done
# Make an additional symlink of css for scss
ln -sv $full_path/home/.vim/snippets_storage/css.snippets $full_path/home/.vim/snippets/scss.snippets
echo "Symlinking to home directory, dot files -A"
for f in `ls -A $full_path/home/`; do
ln -sv $full_path/home/$f $HOME/$f
done
|
<gh_stars>0
package epizza.order.catalog;
import com.google.common.collect.Sets;
import org.springframework.hateoas.Identifiable;
import java.util.Set;
import javax.money.MonetaryAmount;
import javax.persistence.Access;
import javax.persistence.AccessType;
import javax.persistence.Basic;
import javax.persistence.Column;
import javax.persistence.ElementCollection;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.FetchType;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import lombok.AccessLevel;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Singular;
import lombok.ToString;
@Entity
@Access(AccessType.FIELD)
@Getter
@Builder
@NoArgsConstructor
@AllArgsConstructor(access = AccessLevel.PRIVATE)
@EqualsAndHashCode(of = {"id"})
@ToString(of = {"id", "name"})
public class Pizza implements Identifiable<Long> {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
@Column(nullable = false, unique = true)
private String name;
@Basic
private String description;
@Basic(optional = false)
private String imageUrl;
@Basic(optional = false)
private MonetaryAmount price;
@Singular
@ElementCollection(fetch = FetchType.EAGER)
@Enumerated(EnumType.STRING)
@Column(nullable = false)
private Set<Topping> toppings = Sets.newHashSet();
}
|
#!/usr/bin/env bash
# MIT License
#
# Copyright (c) 2019 Pablo Gómez-Caldito Gómez
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
check_dependencies(){
missing=""
command -v curl >/dev/null 2>&1 || missing="${missing} curl"
command -v echo >/dev/null 2>&1 || missing="${missing} echo"
command -v whoami >/dev/null 2>&1 || missing="${missing} whoami"
command -v sed >/dev/null 2>&1 || missing="${missing} sed"
[ -z "$missing" ] || { echo >&2 "Missing dependencies:${missing}. Exiting"; exit 1; }
}
write_config(){
echo 'IP=""' >> /etc/ipwarn.conf
echo >> /etc/ipwarn.conf
echo '# The purpose of this file is to store the API keys and other information in order to be able to use the services you need.' >> /etc/ipwarn.conf
echo '# Fill in the data of the services you are going to use.' >> /etc/ipwarn.conf
echo '# Do not edit the IP variable.' >> /etc/ipwarn.conf
echo >> /etc/ipwarn.conf
echo '#---- Edit below this line ----#' >> /etc/ipwarn.conf
echo >> /etc/ipwarn.conf
echo '#Telegram configuration' >> /etc/ipwarn.conf
echo 'TEL_API_TOKEN=""' >> /etc/ipwarn.conf
echo 'TEL_API_ID=""' >> /etc/ipwarn.conf
echo >> /etc/ipwarn.conf
echo '#GoDaddy configuration' >> /etc/ipwarn.conf
echo 'GD_DOMAIN=""' >> /etc/ipwarn.conf
echo 'GD_RECORD_NAME="@" # Default value' >> /etc/ipwarn.conf
echo 'GD_RECORD_TYPE="A" # Default value' >> /etc/ipwarn.conf
echo 'GD_API_KEY=""' >> /etc/ipwarn.conf
echo 'GD_API_SECRET=""' >> /etc/ipwarn.conf
echo "Done. Edit /etc/ipwarn.conf to configure warning channels"
}
main(){
if [ "$(whoami)" != "root" ]; then
echo "Setup should be executed as root. Exiting"
exit 1
fi
check_dependencies
write_config
chmod 600 /etc/ipwarn.conf
cp ${PWD}/ipwarn /usr/local/bin/ipwarn
chmod 755 /usr/local/bin/ipwarn
}
main
|
#!/bin/sh
# common helpers
get_kernel_base() {
local deb_dir="$1"
# discover KERNEL_BASE from the linux-headers .deb file name
local headers_deb="${deb_dir}/linux-headers-*_*_all.deb"
if [ ! -r $headers_deb ] ; then
echo "ERROR: unable to find common header .deb: $headers_deb"
exit 1
fi
local file_name=$(basename $(ls $headers_deb))
local kernel_base=${file_name#linux-headers-}
kernel_base=${kernel_base%%_*_all.deb}
echo -n $kernel_base
}
get_deb_version() {
local deb_dir="$1"
# discover DEB_VERSION from the linux-headers .deb file name
local headers_deb="${deb_dir}/linux-headers-*_*_all.deb"
if [ ! -r $headers_deb ] ; then
echo "ERROR: unable to find common header .deb: $headers_deb"
exit 1
fi
local file_name=$(basename $(ls $headers_deb))
local deb_version=${file_name%_all.deb}
deb_version=${deb_version##*_}
echo -n $deb_version
}
upload_artifact() {
local archive="$1"
local astore_path="$2"
local arch="$3"
local tag="$4"
if [ ! -r "$archive" ] ; then
echo "ERROR: unable to find archive: $archive"
exit 1
fi
# upload archive to astore
# TODO: add '-t "$tag"' after INFRA-1047 is fixed.
enkit astore upload "${archive}@${astore_path}" -a $arch
echo "Upload sha256sum:"
sha256sum "$archive"
}
|
#!/bin/bash
set -eux
rm -rf gcc*
curl -o gcc-4.9.2.tar.bz2 http://mirrors-ru.go-parts.com/gcc/releases/gcc-4.9.2/gcc-4.9.2.tar.bz2
tar -xf gcc-4.9.2.tar.bz2
mkdir -p gcc_build
cd gcc_build
../gcc-4.9.2/configure --disable-multilib --disable-bootstrap --enable-languages=c,c++ --prefix=${PWD}/../gcc_install
make -j64
make install
|
<filename>toglacier_test.go<gh_stars>1-10
package toglacier_test
import (
"context"
"encoding/hex"
"fmt"
"io/ioutil"
"net/smtp"
"os"
"path"
"reflect"
"regexp"
"sort"
"strings"
"testing"
"time"
"github.com/aryann/difflib"
"github.com/davecgh/go-spew/spew"
"github.com/pkg/errors"
"github.com/rafaeljusto/toglacier"
"github.com/rafaeljusto/toglacier/internal/archive"
"github.com/rafaeljusto/toglacier/internal/cloud"
"github.com/rafaeljusto/toglacier/internal/log"
"github.com/rafaeljusto/toglacier/internal/report"
"github.com/rafaeljusto/toglacier/internal/storage"
)
func TestToGlacier_Backup(t *testing.T) {
now := time.Now()
type scenario struct {
description string
backupPaths []string
backupSecret string
modifyTolerance float64
ignorePatterns []*regexp.Regexp
archive archive.Archive
envelop archive.Envelop
cloud cloud.Cloud
storage storage.Storage
logger log.Logger
expectedError error
}
scenarios := []scenario{
{
description: "it should backup correctly an archive",
backupPaths: func() []string {
d, err := ioutil.TempDir("", "toglacier-test")
if err != nil {
t.Fatalf("error creating temporary directory. details %s", err)
}
if err := ioutil.WriteFile(path.Join(d, "file1"), []byte("file1 test"), os.ModePerm); err != nil {
t.Fatalf("error creating temporary file. details %s", err)
}
if err := ioutil.WriteFile(path.Join(d, "file2"), []byte("file2 test"), os.ModePerm); err != nil {
t.Fatalf("error creating temporary file. details %s", err)
}
return []string{d}
}(),
modifyTolerance: 50.0,
ignorePatterns: []*regexp.Regexp{
regexp.MustCompile(`^.*\~\$.*$`),
},
archive: mockArchive{
mockBuild: func(lastArchiveInfo archive.Info, ignorePatterns []*regexp.Regexp, backupPaths ...string) (string, archive.Info, error) {
if len(backupPaths) == 0 {
t.Fatalf("no backup path informed")
}
if len(ignorePatterns) != 1 {
t.Fatalf("unexpected ignore patterns: %v", ignorePatterns)
}
f, err := ioutil.TempFile("", "toglacier-test")
if err != nil {
t.Fatalf("error creating temporary file. details: %s", err)
}
defer f.Close()
return f.Name(), archive.Info{
path.Join(backupPaths[0], "file1"): archive.ItemInfo{
ID: "",
Status: archive.ItemInfoStatusModified,
Checksum: "11e87f16676135f6b4bc8da00883e4e02e51595d07841dbc8c16c5d2047a304d",
},
path.Join(backupPaths[0], "file2"): archive.ItemInfo{
ID: "",
Status: archive.ItemInfoStatusNew,
Checksum: "643e692567bfeedc34f914ce740fa353c624ed6a9662ad158266549dd8fd8b70",
},
}, nil
},
},
cloud: mockCloud{
mockSend: func(filename string) (cloud.Backup, error) {
return cloud.Backup{
ID: "123456",
CreatedAt: now,
Checksum: "ca34f069795292e834af7ea8766e9e68fdddf3f46c7ce92ab94fc2174910adb7",
VaultName: "test",
}, nil
},
},
storage: mockStorage{
mockSave: func(b storage.Backup) error {
return nil
},
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "123455",
CreatedAt: now.Add(-time.Hour),
Checksum: "03c7c9c26fbb71dbc1546fd2fd5f2fbc3f4a410360e8fc016c41593b2456cf59",
VaultName: "test",
},
Info: archive.Info{
"file1": archive.ItemInfo{
ID: "123455",
Status: archive.ItemInfoStatusNew,
Checksum: "49ddf1762657fa04e29aa8ca6b22a848ce8a9b590748d6d708dd208309bcfee6",
},
},
},
}, nil
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
},
{
description: "it should detect when there's a problem listing the current backups",
backupPaths: func() []string {
d, err := ioutil.TempDir("", "toglacier-test")
if err != nil {
t.Fatalf("error creating temporary directory. details %s", err)
}
if err := ioutil.WriteFile(path.Join(d, "file1"), []byte("file1 test"), os.ModePerm); err != nil {
t.Fatalf("error creating temporary file. details %s", err)
}
return []string{d}
}(),
storage: mockStorage{
mockList: func() (storage.Backups, error) {
return nil, errors.New("problem loading backups from storage")
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
expectedError: errors.New("problem loading backups from storage"),
},
{
description: "it should backup correctly an archive with encryption",
backupPaths: func() []string {
d, err := ioutil.TempDir("", "toglacier-test")
if err != nil {
t.Fatalf("error creating temporary directory. details %s", err)
}
if err := ioutil.WriteFile(path.Join(d, "file1"), []byte("file1 test"), os.ModePerm); err != nil {
t.Fatalf("error creating temporary file. details %s", err)
}
return []string{d}
}(),
backupSecret: "12345678901234567890123456789012",
archive: mockArchive{
mockBuild: func(lastArchiveInfo archive.Info, ignorePatterns []*regexp.Regexp, backupPaths ...string) (string, archive.Info, error) {
f, err := ioutil.TempFile("", "toglacier-test")
if err != nil {
t.Fatalf("error creating temporary file. details: %s", err)
}
defer f.Close()
return f.Name(), nil, nil
},
},
envelop: mockEnvelop{
mockEncrypt: func(filename, secret string) (string, error) {
f, err := ioutil.TempFile("", "toglacier-test")
if err != nil {
t.Fatalf("error creating temporary file. details: %s", err)
}
defer f.Close()
return f.Name(), nil
},
},
cloud: mockCloud{
mockSend: func(filename string) (cloud.Backup, error) {
return cloud.Backup{
ID: "123456",
CreatedAt: now,
Checksum: "ca34f069795292e834af7ea8766e9e68fdddf3f46c7ce92ab94fc2174910adb7",
VaultName: "test",
}, nil
},
},
storage: mockStorage{
mockSave: func(b storage.Backup) error {
return nil
},
mockList: func() (storage.Backups, error) {
return nil, nil
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
},
{
description: "it should detect an error while building the package",
backupPaths: func() []string {
return []string{"idontexist12345"}
}(),
archive: mockArchive{
mockBuild: func(lastArchiveInfo archive.Info, ignorePatterns []*regexp.Regexp, backupPaths ...string) (string, archive.Info, error) {
return "", nil, errors.New("path doesn't exist")
},
},
storage: mockStorage{
mockList: func() (storage.Backups, error) {
return nil, nil
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
expectedError: errors.New("path doesn't exist"),
},
{
description: "it should detect when there is nothing in the tarball",
backupPaths: func() []string {
d, err := ioutil.TempDir("", "toglacier-test")
if err != nil {
t.Fatalf("error creating temporary directory. details %s", err)
}
return []string{d}
}(),
archive: mockArchive{
mockBuild: func(lastArchiveInfo archive.Info, ignorePatterns []*regexp.Regexp, backupPaths ...string) (string, archive.Info, error) {
if len(backupPaths) == 0 {
t.Fatalf("no backup path informed")
}
return "", nil, nil
},
},
storage: mockStorage{
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "123455",
CreatedAt: now.Add(-time.Hour),
Checksum: "03c7c9c26fbb71dbc1546fd2fd5f2fbc3f4a410360e8fc016c41593b2456cf59",
VaultName: "test",
},
Info: archive.Info{
"file1": archive.ItemInfo{
ID: "123455",
Status: archive.ItemInfoStatusNew,
Checksum: "49ddf1762657fa04e29aa8ca6b22a848ce8a9b590748d6d708dd208309bcfee6",
},
},
},
}, nil
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
},
func() scenario {
d, err := ioutil.TempDir("", "toglacier-test")
if err != nil {
t.Fatalf("error creating temporary directory. details %s", err)
}
if err := ioutil.WriteFile(path.Join(d, "file1"), []byte("file1 test"), os.ModePerm); err != nil {
t.Fatalf("error creating temporary file. details %s", err)
}
if err := ioutil.WriteFile(path.Join(d, "file2"), []byte("file2 test"), os.ModePerm); err != nil {
t.Fatalf("error creating temporary file. details %s", err)
}
if err := ioutil.WriteFile(path.Join(d, "file3"), []byte("file3 test"), os.ModePerm); err != nil {
t.Fatalf("error creating temporary file. details %s", err)
}
var s scenario
s.description = "it should detect when the modified tolerance is reached"
s.backupPaths = []string{d}
s.modifyTolerance = 50.0
s.archive = mockArchive{
mockBuild: func(lastArchiveInfo archive.Info, ignorePatterns []*regexp.Regexp, backupPaths ...string) (string, archive.Info, error) {
if len(backupPaths) == 0 {
t.Fatalf("no backup path informed")
}
f, err := ioutil.TempFile("", "toglacier-test")
if err != nil {
t.Fatalf("error creating temporary file. details: %s", err)
}
defer f.Close()
return f.Name(), archive.Info{
path.Join(backupPaths[0], "file1"): archive.ItemInfo{
ID: "",
Status: archive.ItemInfoStatusModified,
Checksum: "11e87f16676135f6b4bc8da00883e4e02e51595d07841dbc8c16c5d2047a304d",
},
path.Join(backupPaths[0], "file2"): archive.ItemInfo{
ID: "",
Status: archive.ItemInfoStatusNew,
Checksum: "11e87f16676135f6b4bc8da00883e4e02e51595d07841dbc8c16c5d2047a304d",
},
path.Join(backupPaths[0], "file3"): archive.ItemInfo{
ID: "",
Status: archive.ItemInfoStatusModified,
Checksum: "11e87f16676135f6b4bc8da00883e4e02e51595d07841dbc8c16c5d2047a304d",
},
}, nil
},
}
s.storage = mockStorage{
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "123455",
CreatedAt: now.Add(-time.Hour),
Checksum: "03c7c9c26fbb71dbc1546fd2fd5f2fbc3f4a410360e8fc016c41593b2456cf59",
VaultName: "test",
},
Info: archive.Info{
"file1": archive.ItemInfo{
ID: "123455",
Status: archive.ItemInfoStatusNew,
Checksum: "49ddf1762657fa04e29aa8ca6b22a848ce8a9b590748d6d708dd208309bcfee6",
},
},
},
}, nil
},
}
s.logger = mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
}
s.expectedError = toglacier.Error{
Paths: []string{d},
Code: toglacier.ErrorCodeModifyTolerance,
}
return s
}(),
{
description: "it should detect an error while encrypting the package",
backupPaths: func() []string {
d, err := ioutil.TempDir("", "toglacier-test")
if err != nil {
t.Fatalf("error creating temporary directory. details %s", err)
}
if err := ioutil.WriteFile(path.Join(d, "file1"), []byte("file1 test"), os.ModePerm); err != nil {
t.Fatalf("error creating temporary file. details %s", err)
}
return []string{d}
}(),
backupSecret: "123456",
archive: mockArchive{
mockBuild: func(lastArchiveInfo archive.Info, ignorePatterns []*regexp.Regexp, backupPaths ...string) (string, archive.Info, error) {
f, err := ioutil.TempFile("", "toglacier-test")
if err != nil {
t.Fatalf("error creating temporary file. details: %s", err)
}
defer f.Close()
return f.Name(), nil, nil
},
},
envelop: mockEnvelop{
mockEncrypt: func(filename, secret string) (string, error) {
return "", errors.New("failed to encrypt the archive")
},
},
cloud: mockCloud{
mockSend: func(filename string) (cloud.Backup, error) {
return cloud.Backup{
ID: "123456",
CreatedAt: now,
Checksum: "ca34f069795292e834af7ea8766e9e68fdddf3f46c7ce92ab94fc2174910adb7",
VaultName: "test",
}, nil
},
},
storage: mockStorage{
mockSave: func(b storage.Backup) error {
return nil
},
mockList: func() (storage.Backups, error) {
return nil, nil
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
expectedError: errors.New("failed to encrypt the archive"),
},
{
description: "it should detect an error while sending the backup",
backupPaths: func() []string {
d, err := ioutil.TempDir("", "toglacier-test")
if err != nil {
t.Fatalf("error creating temporary directory. details %s", err)
}
if err := ioutil.WriteFile(path.Join(d, "file1"), []byte("file1 test"), os.ModePerm); err != nil {
t.Fatalf("error creating temporary file. details %s", err)
}
return []string{d}
}(),
archive: mockArchive{
mockBuild: func(lastArchiveInfo archive.Info, ignorePatterns []*regexp.Regexp, backupPaths ...string) (string, archive.Info, error) {
f, err := ioutil.TempFile("", "toglacier-test")
if err != nil {
t.Fatalf("error creating temporary file. details: %s", err)
}
defer f.Close()
return f.Name(), nil, nil
},
},
cloud: mockCloud{
mockSend: func(filename string) (cloud.Backup, error) {
return cloud.Backup{}, errors.New("error sending backup")
},
},
storage: mockStorage{
mockList: func() (storage.Backups, error) {
return nil, nil
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
expectedError: errors.New("error sending backup"),
},
{
description: "it should detect an error while saving the backup information",
backupPaths: func() []string {
d, err := ioutil.TempDir("", "toglacier-test")
if err != nil {
t.Fatalf("error creating temporary directory. details %s", err)
}
if err := ioutil.WriteFile(path.Join(d, "file1"), []byte("file1 test"), os.ModePerm); err != nil {
t.Fatalf("error creating temporary file. details %s", err)
}
return []string{d}
}(),
archive: mockArchive{
mockBuild: func(lastArchiveInfo archive.Info, ignorePatterns []*regexp.Regexp, backupPaths ...string) (string, archive.Info, error) {
f, err := ioutil.TempFile("", "toglacier-test")
if err != nil {
t.Fatalf("error creating temporary file. details: %s", err)
}
defer f.Close()
return f.Name(), nil, nil
},
},
cloud: mockCloud{
mockSend: func(filename string) (cloud.Backup, error) {
return cloud.Backup{
ID: "123456",
CreatedAt: now,
Checksum: "ca34f069795292e834af7ea8766e9e68fdddf3f46c7ce92ab94fc2174910adb7",
VaultName: "test",
}, nil
},
},
storage: mockStorage{
mockSave: func(b storage.Backup) error {
return errors.New("error saving the backup information")
},
mockList: func() (storage.Backups, error) {
return nil, nil
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
expectedError: errors.New("error saving the backup information"),
},
}
for _, scenario := range scenarios {
t.Run(scenario.description, func(t *testing.T) {
toGlacier := toglacier.ToGlacier{
Context: context.Background(),
Archive: scenario.archive,
Envelop: scenario.envelop,
Cloud: scenario.cloud,
Storage: scenario.storage,
Logger: scenario.logger,
}
err := toGlacier.Backup(scenario.backupPaths, scenario.backupSecret, scenario.modifyTolerance, scenario.ignorePatterns)
if !archive.ErrorEqual(scenario.expectedError, err) && !archive.PathErrorEqual(scenario.expectedError, err) && !ErrorEqual(scenario.expectedError, err) {
t.Errorf("errors don't match. expected “%v” and got “%v”", scenario.expectedError, err)
}
})
}
}
func TestToGlacier_ListBackups(t *testing.T) {
now := time.Now()
scenarios := []struct {
description string
remote bool
cloud cloud.Cloud
storage storage.Storage
logger log.Logger
expected storage.Backups
expectedError error
}{
{
description: "it should list the remote backups correctly",
remote: true,
cloud: mockCloud{
mockList: func() ([]cloud.Backup, error) {
return []cloud.Backup{
{
ID: "123456",
CreatedAt: now,
Checksum: "ca34f069795292e834af7ea8766e9e68fdddf3f46c7ce92ab94fc2174910adb7",
VaultName: "test",
},
}, nil
},
},
storage: mockStorage{
mockSave: func(b storage.Backup) error {
if b.Backup.ID != "123456" {
return fmt.Errorf("adding unexpected id %s", b.Backup.ID)
}
return nil
},
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "123454",
CreatedAt: now.Add(-24 * time.Hour),
Checksum: "03c7c9c26fbb71dbc1546fd2fd5f2fbc3f4a410360e8fc016c41593b2456cf59",
VaultName: "test",
},
},
{
Backup: cloud.Backup{
ID: "123455",
CreatedAt: now.Add(-30 * time.Hour),
Checksum: "49ddf1762657fa04e29aa8ca6b22a848ce8a9b590748d6d708dd208309bcfee6",
VaultName: "test",
},
},
{
Backup: cloud.Backup{
ID: "123456",
CreatedAt: now.Add(-time.Hour),
Checksum: "75fcc5623af832086719316b41dcf744893514d8a5fefb376c6426d7911f215f",
VaultName: "test",
},
Info: archive.Info{
"file1": archive.ItemInfo{
ID: "123454",
Status: archive.ItemInfoStatusModified,
Checksum: "915bd6a5873681a273f405c62993b6a96237eab9150fc525c9d57af0becb7ec1",
},
},
},
{
Backup: cloud.Backup{
ID: "123457",
CreatedAt: now.Add(-23 * time.Hour),
Checksum: "e1f6e5d1d7c964e46503bcf1812910c005634236ea087d9cadb1abdef3ae9a61",
VaultName: "test",
},
},
}, nil
},
mockRemove: func(id string) error {
if id != "123454" && id != "123455" && id != "123456" {
return fmt.Errorf("removing unexpected id %s", id)
}
return nil
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
expected: storage.Backups{
{
Backup: cloud.Backup{
ID: "123456",
CreatedAt: now,
Checksum: "ca34f069795292e834af7ea8766e9e68fdddf3f46c7ce92ab94fc2174910adb7",
VaultName: "test",
},
Info: archive.Info{
"file1": archive.ItemInfo{
ID: "123454",
Status: archive.ItemInfoStatusModified,
Checksum: "915bd6a5873681a273f405c62993b6a96237eab9150fc525c9d57af0becb7ec1",
},
},
},
{
Backup: cloud.Backup{
ID: "123457",
CreatedAt: now.Add(-23 * time.Hour),
Checksum: "e1f6e5d1d7c964e46503bcf1812910c005634236ea087d9cadb1abdef3ae9a61",
VaultName: "test",
},
},
},
},
{
description: "it should list the local backups correctly",
storage: mockStorage{
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "123456",
CreatedAt: now,
Checksum: "ca34f069795292e834af7ea8766e9e68fdddf3f46c7ce92ab94fc2174910adb7",
VaultName: "test",
},
},
}, nil
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
expected: storage.Backups{
{
Backup: cloud.Backup{
ID: "123456",
CreatedAt: now,
Checksum: "ca34f069795292e834af7ea8766e9e68fdddf3f46c7ce92ab94fc2174910adb7",
VaultName: "test",
},
},
},
},
{
description: "it should detect an error while listing the remote backups",
remote: true,
cloud: mockCloud{
mockList: func() ([]cloud.Backup, error) {
return nil, errors.New("error listing backups")
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
expectedError: errors.New("error listing backups"),
},
{
description: "it should detect an error while listing the local backups",
storage: mockStorage{
mockList: func() (storage.Backups, error) {
return nil, errors.New("error listing backups")
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
expectedError: errors.New("error listing backups"),
},
{
description: "it should detect an error while retrieving local backups for synch",
remote: true,
cloud: mockCloud{
mockList: func() ([]cloud.Backup, error) {
return []cloud.Backup{
{
ID: "123456",
CreatedAt: now,
Checksum: "ca34f069795292e834af7ea8766e9e68fdddf3f46c7ce92ab94fc2174910adb7",
VaultName: "test",
},
}, nil
},
},
storage: mockStorage{
mockSave: func(b storage.Backup) error {
if b.Backup.ID != "123456" {
return fmt.Errorf("adding unexpected id %s", b.Backup.ID)
}
return nil
},
mockList: func() (storage.Backups, error) {
return nil, errors.New("error retrieving backups")
},
mockRemove: func(id string) error {
if id != "123454" && id != "123455" {
return fmt.Errorf("removing unexpected id %s", id)
}
return nil
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
expectedError: errors.New("error retrieving backups"),
},
{
description: "it should detect an error while removing local backups due to synch",
remote: true,
cloud: mockCloud{
mockList: func() ([]cloud.Backup, error) {
return []cloud.Backup{
{
ID: "123456",
CreatedAt: now,
Checksum: "ca34f069795292e834af7ea8766e9e68fdddf3f46c7ce92ab94fc2174910adb7",
VaultName: "test",
},
}, nil
},
},
storage: mockStorage{
mockSave: func(b storage.Backup) error {
if b.Backup.ID != "123456" {
return fmt.Errorf("adding unexpected id %s", b.Backup.ID)
}
return nil
},
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "123454",
CreatedAt: now.Add(-30 * time.Hour),
Checksum: "03c7c9c26fbb71dbc1546fd2fd5f2fbc3f4a410360e8fc016c41593b2456cf59",
VaultName: "test",
},
},
{
Backup: cloud.Backup{
ID: "123455",
CreatedAt: now.Add(-40 * time.Hour),
Checksum: "49ddf1762657fa04e29aa8ca6b22a848ce8a9b590748d6d708dd208309bcfee6",
VaultName: "test",
},
},
}, nil
},
mockRemove: func(id string) error {
return errors.New("error removing backup")
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
expectedError: errors.New("error removing backup"),
},
{
description: "it should detect an error while removing local recent backups due to synch",
remote: true,
cloud: mockCloud{
mockList: func() ([]cloud.Backup, error) {
return []cloud.Backup{
{
ID: "123456",
CreatedAt: now,
Checksum: "ca34f069795292e834af7ea8766e9e68fdddf3f46c7ce92ab94fc2174910adb7",
VaultName: "test",
},
}, nil
},
},
storage: mockStorage{
mockSave: func(b storage.Backup) error {
if b.Backup.ID != "123456" {
return fmt.Errorf("adding unexpected id %s", b.Backup.ID)
}
return nil
},
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "123456",
CreatedAt: now.Add(-time.Hour),
Checksum: "03c7c9c26fbb71dbc1546fd2fd5f2fbc3f4a410360e8fc016c41593b2456cf59",
VaultName: "test",
},
},
}, nil
},
mockRemove: func(id string) error {
return errors.New("error removing backup")
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
expectedError: errors.New("error removing backup"),
},
{
description: "it should detect an error while adding new backups due to synch",
remote: true,
cloud: mockCloud{
mockList: func() ([]cloud.Backup, error) {
return []cloud.Backup{
{
ID: "123456",
CreatedAt: now,
Checksum: "ca34f069795292e834af7ea8766e9e68fdddf3f46c7ce92ab94fc2174910adb7",
VaultName: "test",
},
}, nil
},
},
storage: mockStorage{
mockSave: func(b storage.Backup) error {
return errors.New("error adding backup")
},
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "123454",
CreatedAt: now.Add(-time.Second),
Checksum: "03c7c9c26fbb71dbc1546fd2fd5f2fbc3f4a410360e8fc016c41593b2456cf59",
VaultName: "test",
},
},
{
Backup: cloud.Backup{
ID: "123455",
CreatedAt: now.Add(-time.Minute),
Checksum: "49ddf1762657fa04e29aa8ca6b22a848ce8a9b590748d6d708dd208309bcfee6",
VaultName: "test",
},
},
}, nil
},
mockRemove: func(id string) error {
if id != "123454" && id != "123455" {
return fmt.Errorf("removing unexpected id %s", id)
}
return nil
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
expectedError: errors.New("error adding backup"),
},
}
for _, scenario := range scenarios {
t.Run(scenario.description, func(t *testing.T) {
toGlacier := toglacier.ToGlacier{
Context: context.Background(),
Cloud: scenario.cloud,
Storage: scenario.storage,
Logger: scenario.logger,
}
backups, err := toGlacier.ListBackups(scenario.remote)
if !reflect.DeepEqual(scenario.expected, backups) {
t.Errorf("backups don't match.\n%s", Diff(scenario.expected, backups))
}
if !ErrorEqual(scenario.expectedError, err) {
t.Errorf("errors don't match. expected “%v” and got “%v”", scenario.expectedError, err)
}
})
}
}
func TestToGlacier_RetrieveBackup(t *testing.T) {
scenarios := []struct {
description string
id string
backupSecret string
skipUnmodified bool
storage storage.Storage
envelop archive.Envelop
cloud cloud.Cloud
archive archive.Archive
logger log.Logger
expectedError error
}{
{
description: "it should retrieve a backup correctly",
id: "AWSID123",
storage: mockStorage{
mockSave: func(b storage.Backup) error {
if b.Backup.ID != "AWSID123" && b.Backup.ID != "AWSID122" && b.Backup.ID != "AWSID124" {
return fmt.Errorf("unexpected id %s", b.Backup.ID)
}
return nil
},
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "AWSID122",
CreatedAt: time.Date(2015, 12, 27, 8, 14, 53, 0, time.UTC),
Checksum: "8d9ccbb4e474dbd211a7b1f115c7bddaa950842e51a60418c4e943dee29e9113",
VaultName: "vault",
Size: 41,
},
},
{
Backup: cloud.Backup{
ID: "AWSID123",
CreatedAt: time.Date(2016, 12, 27, 8, 14, 53, 0, time.UTC),
Checksum: "cb63324d2c35cdfcb4521e15ca4518bd0ed9dc2364a9f47de75151b3f9b4b705",
VaultName: "vault",
Size: 41,
},
Info: archive.Info{
"file1": archive.ItemInfo{
ID: "AWSID123",
Status: archive.ItemInfoStatusNew,
Checksum: "a6d392677577af12fb1f4ceb510940374c3378455a1485b0226a35ef5ad65242",
},
"file2": archive.ItemInfo{
ID: "AWSID122",
Status: archive.ItemInfoStatusUnmodified,
Checksum: "a6d392677577af12fb1f4ceb510940374c3378455a1485b0226a35ef5ad65242",
},
"file3": archive.ItemInfo{
ID: "AWSID123",
Status: archive.ItemInfoStatusNew,
Checksum: "429713c8e82ae8d02bff0cd368581903ac6d368cfdacc5bb5ec6fc14d13f3fd0",
},
"file4": archive.ItemInfo{
ID: "AWSID124",
Status: archive.ItemInfoStatusUnmodified,
Checksum: "352c30aa6751b62c658473a90d0a3ffcf98e66f00968c5320a2f1c2969db7024",
},
},
},
}, nil
},
},
cloud: mockCloud{
mockGet: func(ids ...string) (filenames map[string]string, err error) {
if len(ids) != 3 {
return nil, fmt.Errorf("unexpected number of ids: %v", ids)
}
return map[string]string{
"AWSID123": "toglacier-archive-1.tar.gz",
"AWSID122": "toglacier-archive-2.tar.gz",
"AWSID124": "toglacier-archive-3.tar.gz",
}, nil
},
},
archive: mockArchive{
mockExtract: func(filename string, filter []string) (archive.Info, error) {
sort.Strings(filter)
switch filename {
case "toglacier-archive-1.tar.gz":
if len(filter) != 2 || filter[0] != "file1" || filter[1] != "file3" {
return nil, fmt.Errorf("unexpected filter “%v”", filter)
}
return archive.Info{
"file1": archive.ItemInfo{
ID: "AWSID123",
Status: archive.ItemInfoStatusNew,
Checksum: "a6d392677577af12fb1f4ceb510940374c3378455a1485b0226a35ef5ad65242",
},
"file2": archive.ItemInfo{
ID: "AWSID122",
Status: archive.ItemInfoStatusUnmodified,
Checksum: "a6d392677577af12fb1f4ceb510940374c3378455a1485b0226a35ef5ad65242",
},
"file3": archive.ItemInfo{
ID: "AWSID123",
Status: archive.ItemInfoStatusNew,
Checksum: "429713c8e82ae8d02bff0cd368581903ac6d368cfdacc5bb5ec6fc14d13f3fd0",
},
"file4": archive.ItemInfo{
ID: "AWSID124",
Status: archive.ItemInfoStatusUnmodified,
Checksum: "352c30aa6751b62c658473a90d0a3ffcf98e66f00968c5320a2f1c2969db7024",
},
}, nil
case "toglacier-archive-2.tar.gz":
if len(filter) != 1 || filter[0] != "file2" {
return nil, fmt.Errorf("unexpected filter “%v”", filter)
}
return archive.Info{
"file2": archive.ItemInfo{
ID: "AWSID122",
Status: archive.ItemInfoStatusNew,
Checksum: "a6d392677577af12fb1f4ceb510940374c3378455a1485b0226a35ef5ad65242",
},
"file4": archive.ItemInfo{
ID: "AWSID124",
Status: archive.ItemInfoStatusUnmodified,
Checksum: "352c30aa6751b62c658473a90d0a3ffcf98e66f00968c5320a2f1c2969db7024",
},
}, nil
case "toglacier-archive-3.tar.gz":
if len(filter) != 1 || filter[0] != "file4" {
return nil, fmt.Errorf("unexpected filter “%v”", filter)
}
return archive.Info{
"file4": archive.ItemInfo{
ID: "AWSID124",
Status: archive.ItemInfoStatusNew,
Checksum: "352c30aa6751b62c658473a90d0a3ffcf98e66f00968c5320a2f1c2969db7024",
},
}, nil
}
return nil, nil
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
},
{
description: "it should retrieve an encrypted backup correctly",
id: "AWSID123",
backupSecret: "1234567890123456",
storage: mockStorage{
mockSave: func(b storage.Backup) error {
if b.Backup.ID != "AWSID123" {
return fmt.Errorf("unexpected id %s", b.Backup.ID)
}
return nil
},
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "AWSID123",
CreatedAt: time.Date(2016, 12, 27, 8, 14, 53, 0, time.UTC),
Checksum: "cb63324d2c35cdfcb4521e15ca4518bd0ed9dc2364a9f47de75151b3f9b4b705",
VaultName: "vault",
Size: 41,
},
},
}, nil
},
},
envelop: mockEnvelop{
mockDecrypt: func(encryptedFilename, secret string) (string, error) {
f, err := ioutil.TempFile("", "toglacier-test")
if err != nil {
t.Fatalf("error creating temporary file. details: %s", err)
}
defer f.Close()
return f.Name(), nil
},
},
cloud: mockCloud{
mockGet: func(ids ...string) (filenames map[string]string, err error) {
if len(ids) == 0 {
return nil, nil
}
n := path.Join(os.TempDir(), "toglacier-test-getenc")
if _, err := os.Stat(n); os.IsNotExist(err) {
f, err := os.Create(n)
if err != nil {
t.Fatalf("error creating a temporary file. details: %s", err)
}
defer f.Close()
content, err := hex.DecodeString("656e637279707465643a8fbd41664a1d72b4ea1fcecd618a6ed5c05c95bf65bfda2d4d176e8feff96f710000000000000000000000000000000091d8e827b5136dfac6bb3dbc51f15c17d34947880f91e62799910ea05053969abc28033550b3781111")
if err != nil {
t.Fatalf("error decoding encrypted archive. details: %s", err)
}
f.Write(content)
}
return map[string]string{ids[0]: n}, nil
},
},
archive: mockArchive{
mockExtract: func(filename string, filter []string) (archive.Info, error) {
return nil, nil
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
},
{
description: "it should retrieve a backup correctly with no archive information and all other backup parts",
id: "AWSID123",
storage: mockStorage{
mockSave: func(b storage.Backup) error {
if b.Backup.ID != "AWSID123" && b.Backup.ID != "AWSID122" {
return fmt.Errorf("unexpected id %s", b.Backup.ID)
}
return nil
},
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "AWSID122",
CreatedAt: time.Date(2015, 12, 27, 8, 14, 53, 0, time.UTC),
Checksum: "325152353325adc8854e185ab59daf44c51e78404e1512eea9dca116f3a8c16d",
VaultName: "vault",
Size: 38,
},
},
{
Backup: cloud.Backup{
ID: "AWSID123",
CreatedAt: time.Date(2016, 12, 27, 8, 14, 53, 0, time.UTC),
Checksum: "cb63324d2c35cdfcb4521e15ca4518bd0ed9dc2364a9f47de75151b3f9b4b705",
VaultName: "vault",
Size: 41,
},
},
}, nil
},
},
cloud: mockCloud{
mockGet: func(ids ...string) (filenames map[string]string, err error) {
if len(ids) == 0 {
return nil, nil
}
switch ids[0] {
case "AWSID123":
return map[string]string{
"AWSID123": "toglacier-archive-1.tar.gz",
}, nil
case "AWSID122":
return map[string]string{
"AWSID122": "toglacier-archive-2.tar.gz",
}, nil
}
return nil, fmt.Errorf("unexpected id “%s”", ids[0])
},
},
archive: mockArchive{
mockExtract: func(filename string, filter []string) (archive.Info, error) {
switch filename {
case "toglacier-archive-1.tar.gz":
if len(filter) != 0 {
return nil, fmt.Errorf("unexpected filter “%v”", filter)
}
return archive.Info{
"file1": archive.ItemInfo{
Status: archive.ItemInfoStatusNew,
ID: "AWSID123",
Checksum: "a5b2df3d72bd28d2382b0b4cca4c25fa260e018b58a915f1e5af14485a746ca8",
},
"file2": archive.ItemInfo{
Status: archive.ItemInfoStatusUnmodified,
ID: "AWSID122",
Checksum: "a8c23a9b1441de7f048471994f9500664acb0f6551e418e5b9da5af559606a63",
},
}, nil
case "toglacier-archive-2.tar.gz":
if len(filter) != 1 || filter[0] != "file2" {
return nil, fmt.Errorf("unexpected filter “%v”", filter)
}
return archive.Info{
"file2": archive.ItemInfo{
Status: archive.ItemInfoStatusNew,
ID: "AWSID122",
Checksum: "a8c23a9b1441de7f048471994f9500664acb0f6551e418e5b9da5af559606a63",
},
}, nil
}
return nil, nil
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
},
{
description: "it should retrieve a backup correctly that does not exist locally",
id: "AWSID123",
storage: mockStorage{
mockSave: func(b storage.Backup) error {
if b.Backup.ID != "AWSID123" && b.Backup.ID != "AWSID122" {
return fmt.Errorf("unexpected id %s", b.Backup.ID)
}
return nil
},
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "AWSID122",
CreatedAt: time.Date(2015, 12, 27, 8, 14, 53, 0, time.UTC),
Checksum: "325152353325adc8854e185ab59daf44c51e78404e1512eea9dca116f3a8c16d",
VaultName: "vault",
Size: 38,
},
},
}, nil
},
},
cloud: mockCloud{
mockGet: func(ids ...string) (filenames map[string]string, err error) {
if len(ids) == 0 {
return nil, nil
}
switch ids[0] {
case "AWSID123":
return map[string]string{
"AWSID123": "toglacier-archive-1.tar.gz",
}, nil
case "AWSID122":
return map[string]string{
"AWSID122": "toglacier-archive-2.tar.gz",
}, nil
}
return nil, fmt.Errorf("unexpected id “%s”", ids[0])
},
},
archive: mockArchive{
mockExtract: func(filename string, filter []string) (archive.Info, error) {
switch filename {
case "toglacier-archive-1.tar.gz":
if len(filter) != 0 {
return nil, fmt.Errorf("unexpected filter “%v”", filter)
}
return archive.Info{
"file1": archive.ItemInfo{
Status: archive.ItemInfoStatusNew,
ID: "AWSID123",
Checksum: "a5b2df3d72bd28d2382b0b4cca4c25fa260e018b58a915f1e5af14485a746ca8",
},
"file2": archive.ItemInfo{
Status: archive.ItemInfoStatusUnmodified,
ID: "AWSID122",
Checksum: "a8c23a9b1441de7f048471994f9500664acb0f6551e418e5b9da5af559606a63",
},
}, nil
case "toglacier-archive-2.tar.gz":
if len(filter) != 1 || filter[0] != "file2" {
return nil, fmt.Errorf("unexpected filter “%v”", filter)
}
return archive.Info{
"file2": archive.ItemInfo{
Status: archive.ItemInfoStatusNew,
ID: "AWSID122",
Checksum: "a8c23a9b1441de7f048471994f9500664acb0f6551e418e5b9da5af559606a63",
},
}, nil
}
return nil, nil
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
},
{
description: "it should retrieve a backup correctly skipping unmodified files in disk",
id: "AWSID123",
skipUnmodified: true,
storage: mockStorage{
mockSave: func(b storage.Backup) error {
if b.Backup.ID != "AWSID123" {
return fmt.Errorf("unexpected id %s", b.Backup.ID)
}
return nil
},
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "AWSID123",
CreatedAt: time.Date(2016, 12, 27, 8, 14, 53, 0, time.UTC),
Checksum: "cb63324d2c35cdfcb4521e15ca4518bd0ed9dc2364a9f47de75151b3f9b4b705",
VaultName: "vault",
Size: 41,
},
Info: archive.Info{
"file1": archive.ItemInfo{
ID: "AWSID123",
Status: archive.ItemInfoStatusNew,
Checksum: "a6d392677577af12fb1f4ceb510940374c3378455a1485b0226a35ef5ad65242",
},
"file2": archive.ItemInfo{
ID: "AWSID122",
Status: archive.ItemInfoStatusUnmodified,
Checksum: "46813af30d24fb7ad0a019b0da4fcde88368133fcfe39c5a8b25a328e6be4ab2",
},
"file3": archive.ItemInfo{
ID: "AWSID123",
Status: archive.ItemInfoStatusNew,
Checksum: "429713c8e82ae8d02bff0cd368581903ac6d368cfdacc5bb5ec6fc14d13f3fd0",
},
"file4": archive.ItemInfo{
ID: "AWSID124",
Status: archive.ItemInfoStatusUnmodified,
Checksum: "79edf074b55cdb3088721e88814523124c7da05001175e14b0dcf78336730fcd",
},
},
},
}, nil
},
},
cloud: mockCloud{
mockGet: func(ids ...string) (filenames map[string]string, err error) {
if len(ids) != 1 {
return nil, fmt.Errorf("unexpected number of ids: %v", ids)
}
return map[string]string{
"AWSID123": "toglacier-archive-1.tar.gz",
}, nil
},
},
archive: mockArchive{
mockExtract: func(filename string, filter []string) (archive.Info, error) {
sort.Strings(filter)
switch filename {
case "toglacier-archive-1.tar.gz":
if len(filter) != 2 || filter[0] != "file1" || filter[1] != "file3" {
return nil, fmt.Errorf("unexpected filter “%v”", filter)
}
case "toglacier-archive-2.tar.gz":
if len(filter) != 1 || filter[0] != "file2" {
return nil, fmt.Errorf("unexpected filter “%v”", filter)
}
}
return nil, nil
},
mockFileChecksum: func(filename string) (string, error) {
switch filename {
case "file1":
return "a9300479a7d2c663b4806af1bce4483f93175cae287979ee0364d057445482c8", nil
case "file2":
return "46813af30d24fb7ad0a019b0da4fcde88368133fcfe39c5a8b25a328e6be4ab2", nil
case "file3":
return "64bd312e9c81172627d898d7ad146d2e9ea47f47dd67ea79477ab224ab8fb01b", nil
case "file4":
return "79edf074b55cdb3088721e88814523124c7da05001175e14b0dcf78336730fcd", nil
}
return "", fmt.Errorf("unexpected filename “%s”", filename)
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
},
{
description: "it should detect when there is a problem calculating the file checksum",
id: "AWSID123",
skipUnmodified: true,
storage: mockStorage{
mockSave: func(b storage.Backup) error {
if b.Backup.ID != "AWSID123" {
return fmt.Errorf("unexpected id %s", b.Backup.ID)
}
return nil
},
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "AWSID123",
CreatedAt: time.Date(2016, 12, 27, 8, 14, 53, 0, time.UTC),
Checksum: "cb63324d2c35cdfcb4521e15ca4518bd0ed9dc2364a9f47de75151b3f9b4b705",
VaultName: "vault",
Size: 41,
},
Info: archive.Info{
"file1": archive.ItemInfo{
ID: "AWSID123",
Status: archive.ItemInfoStatusNew,
Checksum: "a6d392677577af12fb1f4ceb510940374c3378455a1485b0226a35ef5ad65242",
},
"file2": archive.ItemInfo{
ID: "AWSID122",
Status: archive.ItemInfoStatusUnmodified,
Checksum: "46813af30d24fb7ad0a019b0da4fcde88368133fcfe39c5a8b25a328e6be4ab2",
},
"file3": archive.ItemInfo{
ID: "AWSID123",
Status: archive.ItemInfoStatusNew,
Checksum: "429713c8e82ae8d02bff0cd368581903ac6d368cfdacc5bb5ec6fc14d13f3fd0",
},
"file4": archive.ItemInfo{
ID: "AWSID124",
Status: archive.ItemInfoStatusUnmodified,
Checksum: "79edf074b55cdb3088721e88814523124c7da05001175e14b0dcf78336730fcd",
},
},
},
}, nil
},
},
cloud: mockCloud{
mockGet: func(ids ...string) (filenames map[string]string, err error) {
if len(ids) != 1 {
return nil, fmt.Errorf("unexpected number of ids: %v", ids)
}
return map[string]string{
"AWSID123": "toglacier-archive-1.tar.gz",
}, nil
},
},
archive: mockArchive{
mockExtract: func(filename string, filter []string) (archive.Info, error) {
sort.Strings(filter)
switch filename {
case "toglacier-archive-1.tar.gz":
if len(filter) != 2 || filter[0] != "file1" || filter[1] != "file3" {
return nil, fmt.Errorf("unexpected filter “%v”", filter)
}
case "toglacier-archive-2.tar.gz":
if len(filter) != 1 || filter[0] != "file2" {
return nil, fmt.Errorf("unexpected filter “%v”", filter)
}
}
return nil, nil
},
mockFileChecksum: func(filename string) (string, error) {
return "", errors.New("checksum failed")
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
expectedError: errors.New("checksum failed"),
},
{
description: "it should detect an error while retrieving a backup part",
id: "AWSID123",
storage: mockStorage{
mockSave: func(b storage.Backup) error {
if b.Backup.ID != "AWSID123" {
return fmt.Errorf("unexpected id %s", b.Backup.ID)
}
return nil
},
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "AWSID123",
CreatedAt: time.Date(2016, 12, 27, 8, 14, 53, 0, time.UTC),
Checksum: "cb63324d2c35cdfcb4521e15ca4518bd0ed9dc2364a9f47de75151b3f9b4b705",
VaultName: "vault",
Size: 41,
},
},
}, nil
},
},
cloud: mockCloud{
mockGet: func(ids ...string) (filenames map[string]string, err error) {
if len(ids) == 0 {
return nil, nil
}
switch ids[0] {
case "AWSID123":
return map[string]string{
"AWSID123": "toglacier-archive-1.tar.gz",
}, nil
case "AWSID122":
return nil, errors.New("failed to download backup")
}
return nil, fmt.Errorf("unexpected id “%s”", ids[0])
},
},
archive: mockArchive{
mockExtract: func(filename string, filter []string) (archive.Info, error) {
switch filename {
case "toglacier-archive-1.tar.gz":
if len(filter) != 0 {
return nil, fmt.Errorf("unexpected filter “%v”", filter)
}
return archive.Info{
"file1": archive.ItemInfo{
Status: archive.ItemInfoStatusNew,
ID: "AWSID123",
Checksum: "a5b2df3d72bd28d2382b0b4cca4c25fa260e018b58a915f1e5af14485a746ca8",
},
"file2": archive.ItemInfo{
Status: archive.ItemInfoStatusUnmodified,
ID: "AWSID122",
Checksum: "a8c23a9b1441de7f048471994f9500664acb0f6551e418e5b9da5af559606a63",
},
}, nil
}
return nil, nil
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
expectedError: errors.New("failed to download backup"),
},
{
description: "it should detect an error listing backups from local storage",
id: "AWSID123",
storage: mockStorage{
mockList: func() (storage.Backups, error) {
return nil, errors.New("error listing the backups")
},
},
expectedError: errors.New("error listing the backups"),
},
{
description: "it should detect when there's an error retrieving a backup",
id: "AWSID123",
storage: mockStorage{
mockSave: func(b storage.Backup) error {
if b.Backup.ID != "AWSID123" {
return fmt.Errorf("unexpected id %s", b.Backup.ID)
}
return nil
},
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "AWSID123",
CreatedAt: time.Date(2016, 12, 27, 8, 14, 53, 0, time.UTC),
Checksum: "cb63324d2c35cdfcb4521e15ca4518bd0ed9dc2364a9f47de75151b3f9b4b705",
VaultName: "vault",
Size: 41,
},
},
}, nil
},
},
cloud: mockCloud{
mockGet: func(ids ...string) (filenames map[string]string, err error) {
return nil, errors.New("error retrieving the backup")
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
expectedError: errors.New("error retrieving the backup"),
},
{
description: "it should detect an error decrypting the backup",
id: "AWSID123",
backupSecret: "123456",
storage: mockStorage{
mockSave: func(b storage.Backup) error {
if b.Backup.ID != "AWSID123" {
return fmt.Errorf("unexpected id %s", b.Backup.ID)
}
return nil
},
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "AWSID123",
CreatedAt: time.Date(2016, 12, 27, 8, 14, 53, 0, time.UTC),
Checksum: "cb63324d2c35cdfcb4521e15ca4518bd0ed9dc2364a9f47de75151b3f9b4b705",
VaultName: "vault",
Size: 41,
},
},
}, nil
},
},
envelop: mockEnvelop{
mockDecrypt: func(encryptedFilename, secret string) (string, error) {
return "", errors.New("invalid encrypted content")
},
},
cloud: mockCloud{
mockGet: func(ids ...string) (filenames map[string]string, err error) {
if len(ids) == 0 {
return nil, errors.New("no ids given")
}
n := path.Join(os.TempDir(), "toglacier-test-getenc")
if _, err := os.Stat(n); os.IsNotExist(err) {
f, err := os.Create(n)
if err != nil {
t.Fatalf("error creating a temporary file. details: %s", err)
}
defer f.Close()
content, err := hex.DecodeString("656e637279707465643a8fbd41664a1d72b4ea1fcecd618a6ed5c05c95bf65bfda2d4d176e8feff96f710000000000000000000000000000000091d8e827b5136dfac6bb3dbc51f15c17d34947880f91e62799910ea05053969abc28033550b3781111")
if err != nil {
t.Fatalf("error decoding encrypted archive. details: %s", err)
}
f.Write(content)
}
return map[string]string{ids[0]: n}, nil
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
expectedError: errors.New("invalid encrypted content"),
},
{
description: "it should detect an error while extracting the backup",
id: "AWSID123",
storage: mockStorage{
mockSave: func(b storage.Backup) error {
if b.Backup.ID != "AWSID123" {
return fmt.Errorf("unexpected id %s", b.Backup.ID)
}
return nil
},
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "AWSID122",
CreatedAt: time.Date(2015, 12, 27, 8, 14, 53, 0, time.UTC),
Checksum: "350c8ae1300b38a6cc74793e28712b5473c5f663bf8085b5c9bb0f191ed68f6d",
VaultName: "vault",
Size: 89,
},
},
{
Backup: cloud.Backup{
ID: "AWSID123",
CreatedAt: time.Date(2016, 12, 27, 8, 14, 53, 0, time.UTC),
Checksum: "cb63324d2c35cdfcb4521e15ca4518bd0ed9dc2364a9f47de75151b3f9b4b705",
VaultName: "vault",
Size: 41,
},
Info: archive.Info{
"file1": archive.ItemInfo{
ID: "AWSID123",
Status: archive.ItemInfoStatusNew,
Checksum: "a6d392677577af12fb1f4ceb510940374c3378455a1485b0226a35ef5ad65242",
},
"file2": archive.ItemInfo{
ID: "AWSID122",
Status: archive.ItemInfoStatusUnmodified,
Checksum: "a6d392677577af12fb1f4ceb510940374c3378455a1485b0226a35ef5ad65242",
},
},
},
}, nil
},
},
cloud: mockCloud{
mockGet: func(ids ...string) (filenames map[string]string, err error) {
return map[string]string{
"AWSID123": "toglacier-archive-1.tar.gz",
"AWSID122": "toglacier-archive-2.tar.gz",
}, nil
},
},
archive: mockArchive{
mockExtract: func(filename string, filter []string) (archive.Info, error) {
switch filename {
case "toglacier-archive-2.tar.gz":
return nil, errors.New("error extracting backup")
}
return nil, nil
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
expectedError: errors.New("error extracting backup"),
},
{
description: "it should detect an error while saving a backup locally",
id: "AWSID123",
storage: mockStorage{
mockSave: func(b storage.Backup) error {
return errors.New("something went wrong")
},
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "AWSID122",
CreatedAt: time.Date(2015, 12, 27, 8, 14, 53, 0, time.UTC),
Checksum: "325152353325adc8854e185ab59daf44c51e78404e1512eea9dca116f3a8c16d",
VaultName: "vault",
Size: 38,
},
},
{
Backup: cloud.Backup{
ID: "AWSID123",
CreatedAt: time.Date(2016, 12, 27, 8, 14, 53, 0, time.UTC),
Checksum: "cb63324d2c35cdfcb4521e15ca4518bd0ed9dc2364a9f47de75151b3f9b4b705",
VaultName: "vault",
Size: 41,
},
},
}, nil
},
},
cloud: mockCloud{
mockGet: func(ids ...string) (filenames map[string]string, err error) {
if len(ids) == 0 {
return nil, nil
}
switch ids[0] {
case "AWSID123":
return map[string]string{
"AWSID123": "toglacier-archive-1.tar.gz",
}, nil
case "AWSID122":
return map[string]string{
"AWSID122": "toglacier-archive-2.tar.gz",
}, nil
}
return nil, fmt.Errorf("unexpected id “%s”", ids[0])
},
},
archive: mockArchive{
mockExtract: func(filename string, filter []string) (archive.Info, error) {
switch filename {
case "toglacier-archive-1.tar.gz":
if len(filter) != 0 {
return nil, fmt.Errorf("unexpected filter “%v”", filter)
}
return archive.Info{
"file1": archive.ItemInfo{
Status: archive.ItemInfoStatusNew,
ID: "AWSID123",
Checksum: "a5b2df3d72bd28d2382b0b4cca4c25fa260e018b58a915f1e5af14485a746ca8",
},
"file2": archive.ItemInfo{
Status: archive.ItemInfoStatusUnmodified,
ID: "AWSID122",
Checksum: "a8c23a9b1441de7f048471994f9500664acb0f6551e418e5b9da5af559606a63",
},
}, nil
case "toglacier-archive-2.tar.gz":
if len(filter) != 1 || filter[0] != "file2" {
return nil, fmt.Errorf("unexpected filter “%v”", filter)
}
}
return nil, nil
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
expectedError: errors.New("something went wrong"),
},
{
description: "it should detect an error while saving a backup part locally",
id: "AWSID123",
storage: mockStorage{
mockSave: func(b storage.Backup) error {
return errors.New("something went wrong")
},
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "AWSID122",
CreatedAt: time.Date(2015, 12, 27, 8, 14, 53, 0, time.UTC),
Checksum: "8d9ccbb4e474dbd211a7b1f115c7bddaa950842e51a60418c4e943dee29e9113",
VaultName: "vault",
Size: 41,
},
},
{
Backup: cloud.Backup{
ID: "AWSID123",
CreatedAt: time.Date(2016, 12, 27, 8, 14, 53, 0, time.UTC),
Checksum: "cb63324d2c35cdfcb4521e15ca4518bd0ed9dc2364a9f47de75151b3f9b4b705",
VaultName: "vault",
Size: 41,
},
Info: archive.Info{
"file1": archive.ItemInfo{
ID: "AWSID123",
Status: archive.ItemInfoStatusNew,
Checksum: "a6d392677577af12fb1f4ceb510940374c3378455a1485b0226a35ef5ad65242",
},
"file2": archive.ItemInfo{
ID: "AWSID122",
Status: archive.ItemInfoStatusUnmodified,
Checksum: "a6d392677577af12fb1f4ceb510940374c3378455a1485b0226a35ef5ad65242",
},
"file3": archive.ItemInfo{
ID: "AWSID123",
Status: archive.ItemInfoStatusNew,
Checksum: "429713c8e82ae8d02bff0cd368581903ac6d368cfdacc5bb5ec6fc14d13f3fd0",
},
},
},
}, nil
},
},
cloud: mockCloud{
mockGet: func(ids ...string) (filenames map[string]string, err error) {
if len(ids) != 2 {
return nil, fmt.Errorf("unexpected number of ids: %v", ids)
}
return map[string]string{
"AWSID123": "toglacier-archive-1.tar.gz",
"AWSID122": "toglacier-archive-2.tar.gz",
}, nil
},
},
archive: mockArchive{
mockExtract: func(filename string, filter []string) (archive.Info, error) {
sort.Strings(filter)
switch filename {
case "toglacier-archive-1.tar.gz":
if len(filter) != 2 || filter[0] != "file1" || filter[1] != "file3" {
return nil, fmt.Errorf("unexpected filter “%v”", filter)
}
return archive.Info{
"file1": archive.ItemInfo{
Status: archive.ItemInfoStatusNew,
ID: "AWSID123",
Checksum: "a5b2df3d72bd28d2382b0b4cca4c25fa260e018b58a915f1e5af14485a746ca8",
},
"file2": archive.ItemInfo{
Status: archive.ItemInfoStatusUnmodified,
ID: "AWSID122",
Checksum: "a8c23a9b1441de7f048471994f9500664acb0f6551e418e5b9da5af559606a63",
},
}, nil
case "toglacier-archive-2.tar.gz":
if len(filter) != 1 || filter[0] != "file2" {
return nil, fmt.Errorf("unexpected filter “%v”", filter)
}
return archive.Info{
"file2": archive.ItemInfo{
Status: archive.ItemInfoStatusNew,
ID: "AWSID122",
Checksum: "a8c23a9b1441de7f048471994f9500664acb0f6551e418e5b9da5af559606a63",
},
}, nil
}
return nil, nil
},
},
logger: mockLogger{
mockDebug: func(args ...interface{}) {},
mockDebugf: func(format string, args ...interface{}) {},
mockInfo: func(args ...interface{}) {},
mockInfof: func(format string, args ...interface{}) {},
mockWarning: func(args ...interface{}) {},
mockWarningf: func(format string, args ...interface{}) {},
},
expectedError: errors.New("something went wrong"),
},
}
for _, scenario := range scenarios {
t.Run(scenario.description, func(t *testing.T) {
toGlacier := toglacier.ToGlacier{
Context: context.Background(),
Storage: scenario.storage,
Envelop: scenario.envelop,
Cloud: scenario.cloud,
Archive: scenario.archive,
Logger: scenario.logger,
}
err := toGlacier.RetrieveBackup(scenario.id, scenario.backupSecret, scenario.skipUnmodified)
if !archive.ErrorEqual(scenario.expectedError, err) && !ErrorEqual(scenario.expectedError, err) {
t.Errorf("errors don't match. expected “%v” and got “%v”", scenario.expectedError, err)
}
})
}
}
func TestToGlacier_RemoveBackups(t *testing.T) {
scenarios := []struct {
description string
ids []string
cloud cloud.Cloud
storage storage.Storage
expectedError error
}{
{
description: "it should remove a backup correctly (removing references)",
ids: []string{"123456"},
cloud: mockCloud{
mockRemove: func(id string) error {
return nil
},
},
storage: mockStorage{
mockSave: func(b storage.Backup) error {
if b.Backup.ID != "123457" {
return fmt.Errorf("saving unexpected backup id “%s”", b.Backup.ID)
}
if len(b.Info) > 0 {
return fmt.Errorf("unexpected number (%d) of items info", len(b.Info))
}
return nil
},
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "123457",
CreatedAt: time.Now(),
},
Info: archive.Info{
"filename1": archive.ItemInfo{
ID: "123456",
Status: archive.ItemInfoStatusUnmodified,
},
},
},
{
Backup: cloud.Backup{
ID: "123456",
CreatedAt: time.Now().Add(-10 * time.Minute),
},
Info: archive.Info{
"filename2": archive.ItemInfo{
ID: "123454",
Status: archive.ItemInfoStatusUnmodified,
},
},
},
{
Backup: cloud.Backup{
ID: "123455",
CreatedAt: time.Now().Add(-20 * time.Minute),
},
Info: archive.Info{
"filename2": archive.ItemInfo{
ID: "123455",
Status: archive.ItemInfoStatusNew,
},
},
},
}, nil
},
mockRemove: func(id string) error {
if id != "123456" {
return fmt.Errorf("unexpected id “%s”", id)
}
return nil
},
},
},
{
description: "it should remove a backup correctly (replacing references)",
ids: []string{"123456"},
cloud: mockCloud{
mockRemove: func(id string) error {
return nil
},
},
storage: mockStorage{
mockSave: func(b storage.Backup) error {
if b.Backup.ID != "123457" {
return fmt.Errorf("saving unexpected backup id “%s”", b.Backup.ID)
}
if itemInfo, ok := b.Info["filename1"]; !ok || itemInfo.ID != "123455" {
return fmt.Errorf("unexpected archive information for backup 123457: %v", b.Info)
}
return nil
},
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "123456",
CreatedAt: time.Now().Add(-10 * time.Minute),
},
Info: archive.Info{
"filename1": archive.ItemInfo{
ID: "123456",
Status: archive.ItemInfoStatusModified,
},
},
},
{
Backup: cloud.Backup{
ID: "123457",
CreatedAt: time.Now(),
},
Info: archive.Info{
"filename1": archive.ItemInfo{
ID: "123456",
Status: archive.ItemInfoStatusUnmodified,
},
},
},
{
Backup: cloud.Backup{
ID: "123455",
CreatedAt: time.Now().Add(-20 * time.Minute),
},
Info: archive.Info{
"filename1": archive.ItemInfo{
ID: "123455",
Status: archive.ItemInfoStatusNew,
},
},
},
}, nil
},
mockRemove: func(id string) error {
if id != "123456" {
return fmt.Errorf("unexpected id “%s”", id)
}
return nil
},
},
},
{
description: "it should detect an error while removing the remote backup",
ids: []string{"123456"},
cloud: mockCloud{
mockRemove: func(id string) error {
return errors.New("error removing backup")
},
},
storage: mockStorage{
mockRemove: func(id string) error {
return nil
},
},
expectedError: errors.New("error removing backup"),
},
{
description: "it should detect an error listing the backups",
ids: []string{"123456"},
cloud: mockCloud{
mockRemove: func(id string) error {
return nil
},
},
storage: mockStorage{
mockList: func() (storage.Backups, error) {
return nil, errors.New("failed to list backups")
},
mockRemove: func(id string) error {
return nil
},
},
expectedError: errors.New("failed to list backups"),
},
{
description: "it should detect an error saving the backup",
ids: []string{"123456"},
cloud: mockCloud{
mockRemove: func(id string) error {
return nil
},
},
storage: mockStorage{
mockSave: func(b storage.Backup) error {
return errors.New("could not save the backup")
},
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "123457",
CreatedAt: time.Now(),
},
Info: archive.Info{
"filename1": archive.ItemInfo{
ID: "123456",
Status: archive.ItemInfoStatusUnmodified,
},
},
},
{
Backup: cloud.Backup{
ID: "123456",
CreatedAt: time.Now().Add(-10 * time.Minute),
},
Info: archive.Info{
"filename2": archive.ItemInfo{
ID: "123454",
Status: archive.ItemInfoStatusUnmodified,
},
},
},
{
Backup: cloud.Backup{
ID: "123455",
CreatedAt: time.Now().Add(-20 * time.Minute),
},
Info: archive.Info{
"filename2": archive.ItemInfo{
ID: "123455",
Status: archive.ItemInfoStatusNew,
},
},
},
}, nil
},
mockRemove: func(id string) error {
return nil
},
},
expectedError: errors.New("could not save the backup"),
},
{
description: "it should detect an error while removing the local backup",
ids: []string{"123456"},
cloud: mockCloud{
mockRemove: func(id string) error {
return nil
},
},
storage: mockStorage{
mockSave: func(b storage.Backup) error {
return nil
},
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "123457",
CreatedAt: time.Now(),
},
Info: archive.Info{
"filename1": archive.ItemInfo{
ID: "123456",
Status: archive.ItemInfoStatusUnmodified,
},
},
},
{
Backup: cloud.Backup{
ID: "123456",
CreatedAt: time.Now().Add(-10 * time.Minute),
},
Info: archive.Info{
"filename2": archive.ItemInfo{
ID: "123454",
Status: archive.ItemInfoStatusUnmodified,
},
},
},
{
Backup: cloud.Backup{
ID: "123455",
CreatedAt: time.Now().Add(-20 * time.Minute),
},
Info: archive.Info{
"filename2": archive.ItemInfo{
ID: "123455",
Status: archive.ItemInfoStatusNew,
},
},
},
}, nil
},
mockRemove: func(id string) error {
return errors.New("error removing backup")
},
},
expectedError: errors.New("error removing backup"),
},
}
for _, scenario := range scenarios {
t.Run(scenario.description, func(t *testing.T) {
toGlacier := toglacier.ToGlacier{
Context: context.Background(),
Cloud: scenario.cloud,
Storage: scenario.storage,
}
if err := toGlacier.RemoveBackups(scenario.ids...); !ErrorEqual(scenario.expectedError, err) {
t.Errorf("errors don't match. expected “%v” and got “%v”", scenario.expectedError, err)
}
})
}
}
func TestToGlacier_RemoveOldBackups(t *testing.T) {
now := time.Now()
scenarios := []struct {
description string
keepBackups int
cloud cloud.Cloud
storage storage.Storage
expectedError error
}{
{
description: "it should remove all old backups correctly",
keepBackups: 2,
cloud: mockCloud{
mockRemove: func(id string) error {
if id != "123456" {
return fmt.Errorf("unexpected id %s", id)
}
return nil
},
},
storage: mockStorage{
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "123456",
CreatedAt: now,
Checksum: "ca34f069795292e834af7ea8766e9e68fdddf3f46c7ce92ab94fc2174910adb7",
VaultName: "test",
},
},
{
Backup: cloud.Backup{
ID: "123457",
CreatedAt: now.Add(time.Second),
Checksum: "0484ed70359cd1a4337d16a4143a3d247e0a3ecbce01482c318d709ed5161016",
VaultName: "test",
},
Info: archive.Info{
"file1": archive.ItemInfo{
ID: "123459",
Status: archive.ItemInfoStatusUnmodified,
Checksum: "4c6733f2d51c5cde947835279ce9f031bcacaa2265988ef1353078810695fb20",
},
},
},
{
Backup: cloud.Backup{
ID: "123458",
CreatedAt: now.Add(time.Minute),
Checksum: "5f9c426fb1e150c1c09dda260bb962c7602b595df7586a1f3899735b839b138f",
VaultName: "test",
},
},
{
Backup: cloud.Backup{
ID: "123459",
CreatedAt: now.Add(-time.Hour),
Checksum: "9a16f6eaebe1a7a3c9e456c5a37063d712de11d839040e5963cf864feb16e114",
VaultName: "test",
},
},
}, nil
},
mockRemove: func(id string) error {
if id != "123456" {
return fmt.Errorf("removing unexpected id %s", id)
}
return nil
},
},
},
{
description: "it should detect when there's an error listing the local backups",
keepBackups: 2,
storage: mockStorage{
mockList: func() (storage.Backups, error) {
return nil, errors.New("local storage corrupted")
},
},
expectedError: errors.New("local storage corrupted"),
},
{
description: "it should detect when there is an error removing an old backup from the cloud",
keepBackups: 2,
cloud: mockCloud{
mockRemove: func(id string) error {
return errors.New("backup not found")
},
},
storage: mockStorage{
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "123456",
CreatedAt: now,
Checksum: "ca34f069795292e834af7ea8766e9e68fdddf3f46c7ce92ab94fc2174910adb7",
VaultName: "test",
},
},
{
Backup: cloud.Backup{
ID: "123457",
CreatedAt: now.Add(time.Second),
Checksum: "0484ed70359cd1a4337d16a4143a3d247e0a3ecbce01482c318d709ed5161016",
VaultName: "test",
},
},
{
Backup: cloud.Backup{
ID: "123458",
CreatedAt: now.Add(time.Minute),
Checksum: "5f9c426fb1e150c1c09dda260bb962c7602b595df7586a1f3899735b839b138f",
VaultName: "test",
},
},
}, nil
},
mockRemove: func(id string) error {
if id != "123456" {
return fmt.Errorf("removing unexpected id %s", id)
}
return nil
},
},
expectedError: errors.New("backup not found"),
},
{
description: "it should detect when there is an error removing an old backup from the local storage",
keepBackups: 2,
cloud: mockCloud{
mockRemove: func(id string) error {
if id != "123456" {
return fmt.Errorf("unexpected id %s", id)
}
return nil
},
},
storage: mockStorage{
mockList: func() (storage.Backups, error) {
return storage.Backups{
{
Backup: cloud.Backup{
ID: "123456",
CreatedAt: now,
Checksum: "ca34f069795292e834af7ea8766e9e68fdddf3f46c7ce92ab94fc2174910adb7",
VaultName: "test",
},
},
{
Backup: cloud.Backup{
ID: "123457",
CreatedAt: now.Add(time.Second),
Checksum: "0484ed70359cd1a4337d16a4143a3d247e0a3ecbce01482c318d709ed5161016",
VaultName: "test",
},
},
{
Backup: cloud.Backup{
ID: "123458",
CreatedAt: now.Add(time.Minute),
Checksum: "5f9c426fb1e150c1c09dda260bb962c7602b595df7586a1f3899735b839b138f",
VaultName: "test",
},
},
}, nil
},
mockRemove: func(id string) error {
return errors.New("backup not found")
},
},
expectedError: errors.New("backup not found"),
},
}
for _, scenario := range scenarios {
t.Run(scenario.description, func(t *testing.T) {
toGlacier := toglacier.ToGlacier{
Context: context.Background(),
Cloud: scenario.cloud,
Storage: scenario.storage,
}
if err := toGlacier.RemoveOldBackups(scenario.keepBackups); !ErrorEqual(scenario.expectedError, err) {
t.Errorf("errors don't match. expected “%v” and got “%v”", scenario.expectedError, err)
}
})
}
}
func TestToGlacier_SendReport(t *testing.T) {
date := time.Date(2017, 3, 10, 14, 10, 46, 0, time.UTC)
scenarios := []struct {
description string
reports []report.Report
emailSender toglacier.EmailSender
emailServer string
emailPort int
emailUsername string
emailPassword string
emailFrom string
emailTo []string
format report.Format
expectedError error
}{
{
description: "it should send an e-mail correctly",
reports: []report.Report{
func() report.Report {
r := report.NewTest()
r.CreatedAt = date
r.Errors = append(r.Errors, errors.New("timeout connecting to aws"))
return r
}(),
},
emailSender: toglacier.EmailSenderFunc(func(addr string, a smtp.Auth, from string, to []string, msg []byte) error {
if addr != "127.0.0.1:587" {
return fmt.Errorf("unexpected “address” %s", addr)
}
if from != "<EMAIL>" {
return fmt.Errorf("unexpected “from” %s", from)
}
if !reflect.DeepEqual(to, []string{"<EMAIL>"}) {
return fmt.Errorf("unexpected “to” %v", to)
}
expectedMsg := `From: <EMAIL>
To: <EMAIL>
Subject: toglacier report
MIME-Version: 1.0
Content-Type: text/plain; charset=utf-8
[2017-03-10 14:10:46] Test report
Testing the notification mechanisms.
Errors
------
* timeout connecting to aws
`
msgLines := strings.Split(string(msg), "\n")
for i := range msgLines {
msgLines[i] = strings.TrimSpace(msgLines[i])
}
expectedLines := strings.Split(expectedMsg, "\n")
for i := range expectedLines {
expectedLines[i] = strings.TrimSpace(expectedLines[i])
}
if !reflect.DeepEqual(expectedLines, msgLines) {
return fmt.Errorf("unexpected message\n%v", Diff(expectedLines, msgLines))
}
return nil
}),
emailServer: "127.0.0.1",
emailPort: 587,
emailUsername: "user",
emailPassword: "<PASSWORD>",
emailFrom: "<EMAIL>",
emailTo: []string{
"<EMAIL>",
},
format: report.FormatPlain,
},
{
description: "it should fail to build the reports",
reports: []report.Report{
mockReport{
mockBuild: func(report.Format) (string, error) {
return "", errors.New("error generating report")
},
},
},
emailServer: "127.0.0.1",
emailPort: 587,
emailUsername: "user",
emailPassword: "<PASSWORD>",
emailFrom: "<EMAIL>",
emailTo: []string{
"<EMAIL>",
},
format: report.FormatPlain,
expectedError: errors.New("error generating report"),
},
{
description: "it should detect an error while sending the e-mail",
emailSender: toglacier.EmailSenderFunc(func(addr string, a smtp.Auth, from string, to []string, msg []byte) error {
return errors.New("generic error while sending e-mail")
}),
emailServer: "127.0.0.1",
emailPort: 587,
emailUsername: "user",
emailPassword: "<PASSWORD>",
emailFrom: "<EMAIL>",
emailTo: []string{
"<EMAIL>",
},
format: report.FormatPlain,
expectedError: errors.New("generic error while sending e-mail"),
},
}
for _, scenario := range scenarios {
report.Clear()
t.Run(scenario.description, func(t *testing.T) {
toGlacier := toglacier.ToGlacier{}
for _, r := range scenario.reports {
report.Add(r)
}
emailInfo := toglacier.EmailInfo{
Sender: scenario.emailSender,
Server: scenario.emailServer,
Port: scenario.emailPort,
Username: scenario.emailUsername,
Password: <PASSWORD>,
From: scenario.emailFrom,
To: scenario.emailTo,
Format: scenario.format,
}
if err := toGlacier.SendReport(emailInfo); !ErrorEqual(scenario.expectedError, err) {
t.Errorf("errors don't match. expected “%v” and got “%v”", scenario.expectedError, err)
}
})
}
}
type mockArchive struct {
mockBuild func(lastArchiveInfo archive.Info, ignorePatterns []*regexp.Regexp, backupPaths ...string) (string, archive.Info, error)
mockExtract func(filename string, filter []string) (archive.Info, error)
mockFileChecksum func(filename string) (string, error)
}
func (m mockArchive) Build(lastArchiveInfo archive.Info, ignorePatterns []*regexp.Regexp, backupPaths ...string) (string, archive.Info, error) {
return m.mockBuild(lastArchiveInfo, ignorePatterns, backupPaths...)
}
func (m mockArchive) Extract(filename string, filter []string) (archive.Info, error) {
return m.mockExtract(filename, filter)
}
func (m mockArchive) FileChecksum(filename string) (string, error) {
return m.mockFileChecksum(filename)
}
type mockEnvelop struct {
mockEncrypt func(filename, secret string) (string, error)
mockDecrypt func(encryptedFilename, secret string) (string, error)
}
func (m mockEnvelop) Encrypt(filename, secret string) (string, error) {
return m.mockEncrypt(filename, secret)
}
func (m mockEnvelop) Decrypt(encryptedFilename, secret string) (string, error) {
return m.mockDecrypt(encryptedFilename, secret)
}
type mockCloud struct {
mockSend func(filename string) (cloud.Backup, error)
mockList func() ([]cloud.Backup, error)
mockGet func(id ...string) (filenames map[string]string, err error)
mockRemove func(id string) error
mockClose func() error
}
func (m mockCloud) Send(ctx context.Context, filename string) (cloud.Backup, error) {
return m.mockSend(filename)
}
func (m mockCloud) List(ctx context.Context) ([]cloud.Backup, error) {
return m.mockList()
}
func (m mockCloud) Get(ctx context.Context, id ...string) (filenames map[string]string, err error) {
return m.mockGet(id...)
}
func (m mockCloud) Remove(ctx context.Context, id string) error {
return m.mockRemove(id)
}
func (m mockCloud) Close() error {
return m.mockClose()
}
type mockStorage struct {
mockSave func(storage.Backup) error
mockList func() (storage.Backups, error)
mockRemove func(id string) error
}
func (m mockStorage) Save(b storage.Backup) error {
return m.mockSave(b)
}
func (m mockStorage) List() (storage.Backups, error) {
return m.mockList()
}
func (m mockStorage) Remove(id string) error {
return m.mockRemove(id)
}
type mockReport struct {
mockBuild func(report.Format) (string, error)
}
func (r mockReport) Build(f report.Format) (string, error) {
return r.mockBuild(f)
}
type mockLogger struct {
mockDebug func(args ...interface{})
mockDebugf func(format string, args ...interface{})
mockInfo func(args ...interface{})
mockInfof func(format string, args ...interface{})
mockWarning func(args ...interface{})
mockWarningf func(format string, args ...interface{})
}
func (m mockLogger) Debug(args ...interface{}) {
m.mockDebug(args...)
}
func (m mockLogger) Debugf(format string, args ...interface{}) {
m.mockDebugf(format, args...)
}
func (m mockLogger) Info(args ...interface{}) {
m.mockInfo(args...)
}
func (m mockLogger) Infof(format string, args ...interface{}) {
m.mockInfof(format, args...)
}
func (m mockLogger) Warning(args ...interface{}) {
m.mockWarning(args...)
}
func (m mockLogger) Warningf(format string, args ...interface{}) {
m.mockWarningf(format, args...)
}
// ErrorEqual compares the errors messages. This is useful in unit tests to
// compare encapsulated error messages.
func ErrorEqual(first, second error) bool {
first = errors.Cause(first)
second = errors.Cause(second)
if first == nil || second == nil {
return first == second
}
return first.Error() == second.Error()
}
// Diff is useful to see the difference when comparing two complex types.
func Diff(a, b interface{}) []difflib.DiffRecord {
return difflib.Diff(strings.SplitAfter(spew.Sdump(a), "\n"), strings.SplitAfter(spew.Sdump(b), "\n"))
}
|
from textblob import TextBlob
text = "I am feeling frustrated"
# analyze sentiment of the text
blob = TextBlob(text)
sentiment = blob.sentiment
if sentiment.polarity < 0:
print("Text has a negative sentiment")
elif sentiment.polarity == 0:
print("Text is neutral")
else:
print("Text has a positive sentiment") |
enum ConnectionType {
Wired,
Wireless,
Satellite,
FiberOptic
}
interface ProviderNetwork {
displayName: string;
connectionType: ConnectionType;
internalName?: string;
address?: string;
networkID: number;
explorerLink: string;
}
function filterNetworks(networks: ProviderNetwork[], connectionType: ConnectionType): ProviderNetwork[] {
return networks.filter(network => network.connectionType === connectionType);
} |
require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
describe VK::Auth do
context "with valid email and password" do
before do
stub_request(:any, 'login.userapi.com/auth').with(:query => {
"site" => "2",
"pass" => "password",
"login" => "force",
"email" => "<EMAIL>"}).to_return(:headers => {
"Location" => ";sid=123",
"Set-Cookie" => "remixmid=123"},
:status => 302)
end
it 'should #login! properly' do
VK::Auth.login!("<EMAIL>", "password").should be(true)
end
context "should return proper" do
before do
VK::Auth.login!("<EMAIL>", "password")
end
it '#sid' do
VK::Auth.sid.should eql "123"
end
it '#user_id' do
VK::Auth.user_id.should eql "123"
end
end
end
context "with invalid email and password" do
before do
stub_request(:any, 'login.userapi.com/auth').with(:query => {
"site" => "2",
"pass" => "password",
"login" => "force",
"email" => "<EMAIL>"})
end
it "should error an exception" do
lambda { VK::Auth.login!("<EMAIL>", "password") }.should raise_error(VK::AuthFail)
end
end
end
|
<reponame>zonesgame/StendhalArcClient<gh_stars>1-10
///***************************************************************************
// * (C) Copyright 2003-2011 - Stendhal *
// ***************************************************************************
// ***************************************************************************
// * *
// * This program is free software; you can redistribute it and/or modify *
// * it under the terms of the GNU General Public License as published by *
// * the Free Software Foundation; either version 2 of the License, or *
// * (at your option) any later version. *
// * *
// ***************************************************************************/
//package games.stendhal.client.gui;
//
//import java.awt.Color;
//
//import javax.swing.SwingUtilities;
//
//import games.stendhal.client.GameScreen;
//import games.stendhal.client.gui.j2d.BackgroundPainter;
//import games.stendhal.client.gui.j2d.TextBoxFactory;
//import games.stendhal.client.listener.PositionChangeListener;
//import games.stendhal.client.sprite.Sprite;
//import games.stendhal.common.NotificationType;
//
///**
// * A controller for isolating the out-of event dispatch thread calls to the game
// * screen.
// */
//class ScreenController implements PositionChangeListener {
// /** The maximum width of text in text boxes, speech bubbles and similar. */
// private static final int BUBBLE_TEXT_WIDTH = 240;
//
// /** Image used for drawing tutorial box backgrounds. */
// private static final String TUTORIAL_BACKGROUND = "data/gui/tutorial_background.png";
// /** Depends on TUTORIAL_BACKGROUND. */
// private static final int TUTORIAL_LEFT_TILE_WIDTH = 48;
// /** Depends on TUTORIAL_BACKGROUND. */
// private static final int TUTORIAL_CENTER_TILE_WIDTH = 8;
// /** Depends on TUTORIAL_BACKGROUND. */
// private static final int TUTORIAL_TOP_TILE_HEIGHT = 32;
// /** Depends on TUTORIAL_BACKGROUND. */
// private static final int TUTORIAL_CENTER_TILE_HEIGHT = 8;
//
// private final GameScreen screen;
// private TextBoxFactory textBoxFactory;
//
// /**
// * Create a new ScreenController.
// *
// * @param screen controlled screen
// */
// ScreenController(GameScreen screen) {
// this.screen = screen;
// }
// /**
// * Adds a text bubble at a give position of the specified type. For
// * non-talking boxes the coordinates are ignored, and the box is attached
// * to the bottom of the screen.
// *
// * @param x The screen X coordinate.
// * @param y The screen Y coordinate.
// * @param text The textual content
// * @param type The notificationType
// * @param isTalking Is it a talking text bubble
// * @see games.stendhal.common.NotificationType
// */
// void addText(final double x, final double y, final String text, final NotificationType type,
// final boolean isTalking) {
// // createTextBox is thread safe, the rest is not
// final Sprite sprite = createTextBox(text, type, isTalking);
// final int textLength = text.length();
//
// if (!isTalking) {
// final int priority = getPriority(type);
// SwingUtilities.invokeLater(new Runnable() {
// @Override
// public void run() {
// screen.addStaticText(sprite, textLength, priority);
// }
// });
// } else {
// SwingUtilities.invokeLater(new Runnable() {
// @Override
// public void run() {
// screen.addTextBox(sprite, x, y, textLength);
// }
// });
// }
// }
//
// /**
// * Get the importance of a message to keep it above others
// *
// * @param type type of the message
// * @return priority
// */
// private int getPriority(NotificationType type) {
// // Tutorial above most messages, admin messages above everything
// // else
// switch (type) {
// case TUTORIAL:
// return 1;
// case SUPPORT:
// return 2;
// default:
// return 0;
// }
// }
//
// /**
// * Set the offline status of the client.
// *
// * @param offline
// */
// void setOffline(final boolean offline) {
// SwingUtilities.invokeLater(new Runnable() {
// @Override
// public void run() {
// screen.setOffline(offline);
// }
// });
// }
//
// @Override
// public void positionChanged(final double x, final double y) {
// SwingUtilities.invokeLater(new Runnable() {
// @Override
// public void run() {
// screen.positionChanged(x, y);
// }
// });
// }
//
// /**
// * Create a text box with the appropriate text color for a notification
// * type.
// *
// * @param text
// * @param type
// * @param isTalking if <code>true</code> create a text box with a bubble
// * handle
// * @return text sprite
// */
// private Sprite createTextBox(final String text, final NotificationType type,
// final boolean isTalking) {
// // Special handling for pretty tutorial events
// if (type == NotificationType.TUTORIAL) {
// BackgroundPainter painter = new BackgroundPainter(TUTORIAL_BACKGROUND,
// TUTORIAL_LEFT_TILE_WIDTH, TUTORIAL_CENTER_TILE_WIDTH,
// TUTORIAL_TOP_TILE_HEIGHT, TUTORIAL_CENTER_TILE_HEIGHT);
// return getTextFactory().createFancyTextBox(text, type.getColor(),
// BUBBLE_TEXT_WIDTH, 45, 6, 6, 6, painter);
// }
// return getTextFactory().createTextBox(text, BUBBLE_TEXT_WIDTH, type.getColor(), Color.white, isTalking);
// }
//
//
// /**
// * Lazy initialize the text box factory.
// *
// * @return factory
// */
// private TextBoxFactory getTextFactory() {
// if (textBoxFactory == null) {
// textBoxFactory = new TextBoxFactory();
// }
//
// return textBoxFactory;
// }
//}
|
#!/usr/bin/env bash
model_dir="/model_repos/compiled_model"
http_port=$SERVABLE_HTTP_PORT
grpc_port=$SERVABLE_GRPC_PORT
model_name=$MODEL_NAME
echo $model_dir
echo $http_port
echo $grpc_port
echo $model_name
if [ -z $http_port ] && [ -z $grpc_port ];then
echo "can't find the SERVABALE_HTTP_PORT or SERVABLE_GRPC_PORT"
echo "the grpc port will be set as 8500 and http port will be set as 8501"
fi
#confirm the cmd to run servinglite server
cmd_run_server="adlik-serving --model_base_path=${model_dir} "
if [ $http_port ];then
cmd_run_server="$cmd_run_server --http_port=${http_port}"
elif [ $grpc_port ]; then
cmd_run_server="$cmd_run_server --grpc_port=${grpc_port}"
else
cmd_run_server="adlik-serving --model_base_path=/srv/adlik-serving --grpc_port=8500 --http_port=8501"
fi
#start the infer server
eval $cmd_run_server
if [ "$?" != "0" ];then
echo "servinglite server abort"
exit 1
fi
|
class Employee {
constructor(name, title, age) {
this.name = name;
this.title = title;
this.age = age;
}
}
class EmployeeRecord {
constructor() {
this.employees = [];
}
addEmployee(name, title, age) {
let employee = new Employee(name, title, age);
this.employees.push(employee);
}
getEmployee(name) {
for(let employee of this.employees) {
if(employee.name === name) {
return employee;
}
}
}
} |
package service
import (
"github.com/eddieowens/ranvier/server/app/exchange/response"
"github.com/eddieowens/ranvier/server/app/model"
)
const MappingServiceKey = "MappingService"
type MappingService interface {
ToLevelConfigMeta(config *model.Config) response.ConfigMeta
ToLevelConfigMetaData(config *model.Config) *response.ConfigMetaData
ToResponse(config *model.Config) *response.Config
}
type mappingServiceImpl struct {
}
func (m *mappingServiceImpl) ToLevelConfigMetaData(config *model.Config) *response.ConfigMetaData {
if config == nil {
return nil
}
return &response.ConfigMetaData{
Name: config.Name,
}
}
func (m *mappingServiceImpl) ToResponse(config *model.Config) *response.Config {
if config == nil {
return nil
}
return &response.Config{
Data: config,
}
}
func (m *mappingServiceImpl) ToLevelConfigMeta(config *model.Config) response.ConfigMeta {
return response.ConfigMeta{
Data: m.ToLevelConfigMetaData(config),
}
}
|
<reponame>521libingxin/erlongshan
/**
* @fileoverview This file is generated by the Angular 2 template compiler.
* Do not edit.
* @suppress {suspiciousCode,uselessCode,missingProperties}
*/
/* tslint:disable */
import * as import0 from '../../../app/setup/settings-menu.component';
import * as import1 from '@angular/core/src/linker/view';
import * as import2 from '@angular/core/src/render/api';
import * as import3 from '@angular/core/src/linker/view_utils';
import * as import4 from '@angular/core/src/metadata/view';
import * as import5 from '@angular/core/src/linker/view_type';
import * as import6 from '@angular/core/src/change_detection/constants';
import * as import7 from '@angular/core/src/linker/component_factory';
import * as import8 from '@angular/router/src/router';
import * as import9 from '@angular/router/src/router_state';
import * as import10 from '../../../app/service/navigation.service';
import * as import11 from '../../../app/service/menus.service';
import * as import12 from '@angular/core/src/linker/view_container';
import * as import13 from '../../node_modules/@angular/common/src/directives/ng_for.ngfactory';
import * as import14 from '@angular/core/src/linker/template_ref';
import * as import15 from '@angular/core/src/change_detection/differs/iterable_differs';
import * as import16 from '@angular/common/src/directives/ng_for';
import * as import17 from '../../../app/service/setting-button.component';
import * as import18 from '../service/setting-button.component.ngfactory';
export class Wrapper_SettingsMenuComponent {
/*private*/ _eventHandler:Function;
context:import0.SettingsMenuComponent;
/*private*/ _changed:boolean;
constructor(p0:any,p1:any,p2:any,p3:any) {
this._changed = false;
this.context = new import0.SettingsMenuComponent(p0,p1,p2,p3);
}
ngOnDetach(view:import1.AppView<any>,componentView:import1.AppView<any>,el:any):void {
}
ngOnDestroy():void {
}
ngDoCheck(view:import1.AppView<any>,el:any,throwOnChange:boolean):boolean {
var changed:any = this._changed;
this._changed = false;
if (!throwOnChange) { if ((view.numberOfChecks === 0)) { this.context.ngOnInit(); } }
return changed;
}
checkHost(view:import1.AppView<any>,componentView:import1.AppView<any>,el:any,throwOnChange:boolean):void {
}
handleEvent(eventName:string,$event:any):boolean {
var result:boolean = true;
return result;
}
subscribe(view:import1.AppView<any>,_eventHandler:any):void {
this._eventHandler = _eventHandler;
}
}
var renderType_SettingsMenuComponent_Host:import2.RenderComponentType = import3.createRenderComponentType('',0,import4.ViewEncapsulation.None,([] as any[]),{});
class View_SettingsMenuComponent_Host0 extends import1.AppView<any> {
_el_0:any;
compView_0:import1.AppView<import0.SettingsMenuComponent>;
_SettingsMenuComponent_0_3:Wrapper_SettingsMenuComponent;
constructor(viewUtils:import3.ViewUtils,parentView:import1.AppView<any>,parentIndex:number,parentElement:any) {
super(View_SettingsMenuComponent_Host0,renderType_SettingsMenuComponent_Host,import5.ViewType.HOST,viewUtils,parentView,parentIndex,parentElement,import6.ChangeDetectorStatus.CheckAlways);
}
createInternal(rootSelector:string):import7.ComponentRef<any> {
this._el_0 = import3.selectOrCreateRenderHostElement(this.renderer,'settings-menu',import3.EMPTY_INLINE_ARRAY,rootSelector,(null as any));
this.compView_0 = new View_SettingsMenuComponent0(this.viewUtils,this,0,this._el_0);
this._SettingsMenuComponent_0_3 = new Wrapper_SettingsMenuComponent(this.injectorGet(import8.Router,this.parentIndex),this.injectorGet(import9.ActivatedRoute,this.parentIndex),this.injectorGet(import10.NavService,this.parentIndex),this.injectorGet(import11.menuService,this.parentIndex));
this.compView_0.create(this._SettingsMenuComponent_0_3.context);
this.init(this._el_0,((<any>this.renderer).directRenderer? (null as any): [this._el_0]),(null as any));
return new import7.ComponentRef_<any>(0,this,this._el_0,this._SettingsMenuComponent_0_3.context);
}
injectorGetInternal(token:any,requestNodeIndex:number,notFoundResult:any):any {
if (((token === import0.SettingsMenuComponent) && (0 === requestNodeIndex))) { return this._SettingsMenuComponent_0_3.context; }
return notFoundResult;
}
detectChangesInternal(throwOnChange:boolean):void {
this._SettingsMenuComponent_0_3.ngDoCheck(this,this._el_0,throwOnChange);
this.compView_0.internalDetectChanges(throwOnChange);
}
destroyInternal():void {
this.compView_0.destroy();
}
visitRootNodesInternal(cb:any,ctx:any):void {
cb(this._el_0,ctx);
}
}
export const SettingsMenuComponentNgFactory:import7.ComponentFactory<import0.SettingsMenuComponent> = new import7.ComponentFactory<import0.SettingsMenuComponent>('settings-menu',View_SettingsMenuComponent_Host0,import0.SettingsMenuComponent);
const styles_SettingsMenuComponent:any[] = ([] as any[]);
var renderType_SettingsMenuComponent:import2.RenderComponentType = import3.createRenderComponentType('',0,import4.ViewEncapsulation.None,styles_SettingsMenuComponent,{});
export class View_SettingsMenuComponent0 extends import1.AppView<import0.SettingsMenuComponent> {
_text_0:any;
_el_1:any;
_text_2:any;
_anchor_3:any;
/*private*/ _vc_3:import12.ViewContainer;
_TemplateRef_3_5:any;
_NgFor_3_6:import13.Wrapper_NgFor;
_text_4:any;
_text_5:any;
constructor(viewUtils:import3.ViewUtils,parentView:import1.AppView<any>,parentIndex:number,parentElement:any) {
super(View_SettingsMenuComponent0,renderType_SettingsMenuComponent,import5.ViewType.COMPONENT,viewUtils,parentView,parentIndex,parentElement,import6.ChangeDetectorStatus.CheckAlways);
}
createInternal(rootSelector:string):import7.ComponentRef<any> {
const parentRenderNode:any = this.renderer.createViewRoot(this.parentElement);
this._text_0 = this.renderer.createText(parentRenderNode,'\n ',(null as any));
this._el_1 = import3.createRenderElement(this.renderer,parentRenderNode,'ul',new import3.InlineArray2(2,'class','setting_right_ul'),(null as any));
this._text_2 = this.renderer.createText(this._el_1,'\n ',(null as any));
this._anchor_3 = this.renderer.createTemplateAnchor(this._el_1,(null as any));
this._vc_3 = new import12.ViewContainer(3,1,this,this._anchor_3);
this._TemplateRef_3_5 = new import14.TemplateRef_(this,3,this._anchor_3);
this._NgFor_3_6 = new import13.Wrapper_NgFor(this._vc_3.vcRef,this._TemplateRef_3_5,this.parentView.injectorGet(import15.IterableDiffers,this.parentIndex),this.ref);
this._text_4 = this.renderer.createText(this._el_1,'\n ',(null as any));
this._text_5 = this.renderer.createText(parentRenderNode,'\n ',(null as any));
this.init((null as any),((<any>this.renderer).directRenderer? (null as any): [
this._text_0,
this._el_1,
this._text_2,
this._anchor_3,
this._text_4,
this._text_5
]
),(null as any));
return (null as any);
}
injectorGetInternal(token:any,requestNodeIndex:number,notFoundResult:any):any {
if (((token === import14.TemplateRef) && (3 === requestNodeIndex))) { return this._TemplateRef_3_5; }
if (((token === import16.NgFor) && (3 === requestNodeIndex))) { return this._NgFor_3_6.context; }
return notFoundResult;
}
detectChangesInternal(throwOnChange:boolean):void {
const currVal_3_0_0:any = this.context.settings;
this._NgFor_3_6.check_ngForOf(currVal_3_0_0,throwOnChange,false);
this._NgFor_3_6.ngDoCheck(this,this._anchor_3,throwOnChange);
this._vc_3.detectChangesInNestedViews(throwOnChange);
}
destroyInternal():void {
this._vc_3.destroyNestedViews();
}
createEmbeddedViewInternal(nodeIndex:number):import1.AppView<any> {
if ((nodeIndex == 3)) { return new View_SettingsMenuComponent1(this.viewUtils,this,3,this._anchor_3,this._vc_3); }
return (null as any);
}
}
class View_SettingsMenuComponent1 extends import1.AppView<any> {
_el_0:any;
_text_1:any;
_el_2:any;
compView_2:import1.AppView<import17.SettingsButtonComponent>;
_SettingsButtonComponent_2_3:import18.Wrapper_SettingsButtonComponent;
_text_3:any;
constructor(viewUtils:import3.ViewUtils,parentView:import1.AppView<any>,parentIndex:number,parentElement:any,declaredViewContainer:import12.ViewContainer) {
super(View_SettingsMenuComponent1,renderType_SettingsMenuComponent,import5.ViewType.EMBEDDED,viewUtils,parentView,parentIndex,parentElement,import6.ChangeDetectorStatus.CheckAlways,declaredViewContainer);
}
createInternal(rootSelector:string):import7.ComponentRef<any> {
this._el_0 = import3.createRenderElement(this.renderer,(null as any),'li',new import3.InlineArray2(2,'class','setting_right_li'),(null as any));
this._text_1 = this.renderer.createText(this._el_0,'\n ',(null as any));
this._el_2 = import3.createRenderElement(this.renderer,this._el_0,'nav-icon-button',import3.EMPTY_INLINE_ARRAY,(null as any));
this.compView_2 = new import18.View_SettingsButtonComponent0(this.viewUtils,this,2,this._el_2);
this._SettingsButtonComponent_2_3 = new import18.Wrapper_SettingsButtonComponent();
this.compView_2.create(this._SettingsButtonComponent_2_3.context);
this._text_3 = this.renderer.createText(this._el_0,'\n ',(null as any));
var disposable_0:Function = import3.subscribeToRenderElement(this,this._el_2,new import3.InlineArray2(2,'click',(null as any)),this.eventHandler(this.handleEvent_2));
this.init(this._el_0,((<any>this.renderer).directRenderer? (null as any): [
this._el_0,
this._text_1,
this._el_2,
this._text_3
]
),[disposable_0]);
return (null as any);
}
injectorGetInternal(token:any,requestNodeIndex:number,notFoundResult:any):any {
if (((token === import17.SettingsButtonComponent) && (2 === requestNodeIndex))) { return this._SettingsButtonComponent_2_3.context; }
return notFoundResult;
}
detectChangesInternal(throwOnChange:boolean):void {
const currVal_2_0_0:any = this.context.$implicit;
this._SettingsButtonComponent_2_3.check_button(currVal_2_0_0,throwOnChange,false);
this._SettingsButtonComponent_2_3.ngDoCheck(this,this._el_2,throwOnChange);
this.compView_2.internalDetectChanges(throwOnChange);
}
destroyInternal():void {
this.compView_2.destroy();
}
visitRootNodesInternal(cb:any,ctx:any):void {
cb(this._el_0,ctx);
}
handleEvent_2(eventName:string,$event:any):boolean {
this.markPathToRootAsCheckOnce();
var result:boolean = true;
if ((eventName == 'click')) {
const pd_sub_0:any = ((<any>this.parentView.context.go(this.context.$implicit.destination)) !== false);
result = (pd_sub_0 && result);
}
return result;
}
} |
<gh_stars>0
// this file contains all the constants for action type
export const FETCH_USER_DETAILS = "FETCH_USER_DETAILS";
|
<gh_stars>0
package com.zhanyage.htmlparselib.api;
import android.content.Context;
import java.util.List;
/**
* html 中点击事件的接口
*/
public interface OnTagClickListener {
/**
* 图片点击事件的回调
* @param context context
* @param imageUrlList 本段 html 中图片的 url list
* @param position 点击第几张图片
*/
void onImageClick(Context context, List<String> imageUrlList, int position);
/**
* 超链接点击事件的回调
* @param context context
* @param url href 中的 url
*/
void onLinkClick(Context context, String url);
/**
* 视频点击事件的回调
* @param context context
* @param videoUrl 视频的 url
*/
void onVideoClick(Context context, String videoUrl);
} |
eval "$(jenv init -)" |
# main.tf
# Define the resource type and name for the IBM Cloud-GitHub integration
resource "opentoolchain_integration_ibm_github" "gh" {
# Define the integration_id, toolchain_id, and env_id as variables
integration_id = var.integration_id
toolchain_id = var.toolchain_id
env_id = var.env_id
}
# Define the variables for integration_id, toolchain_id, and env_id
variable "integration_id" {
description = "The unique identifier for the integration"
type = string
}
variable "toolchain_id" {
description = "The unique identifier for the toolchain"
type = string
}
variable "env_id" {
description = "The unique identifier for the environment"
type = string
} |
export default function organizationsReducer(state = { loading: false, orgsList: [] }, action) {
switch (action.type) {
case 'ADD_ORGANIZATION':
return { ...state, orgsList: state.orgsList.concat(action.payload) };
case "LOADING_ORGANIZATIONS":
return Object.assign({}, state, {loading: true})
case "FETCH_ORGANIZATIONS":
return {loading: false, orgsList: action.payload}
default:
return state;
}
} |
object LargestElement {
def main(args: Array[String]) {
val numbers = Array(314, 24, 5, 28, 9, 36, 99)
var large = numbers(0)
for ( i <- 1 to numbers.length-1)
{
if (numbers(i) > large)
large = numbers(i)
}
println("Largest number in given array is: " + large)
}
} |
#!/bin/bash
cd model-archiver/
# Lint test
pylint -rn --rcfile=./model_archiver/tests/pylintrc model_archiver/.
PY_LINT_EXIT_CODE=$?
# Execute python unit tests
python -m pytest --cov-report html:result_units --cov=./ model_archiver/tests/unit_tests
PY_UNITS_EXIT_CODE=$?
# Execute integration tests
python -m pytest model_archiver/tests/integ_tests # ToDo - Report for Integration tests ?
PY_INTEG_EXIT_CODE=$?
# If any one of the steps fail, exit with error
if [ "$PY_LINT_EXIT_CODE" -ne 0 ] || [ "$PY_UNITS_EXIT_CODE" -ne 0 ] || [ "$PY_INTEG_EXIT_CODE" -ne 0 ]
then exit 1
fi |
<filename>src/main/java/com/waflo/cooltimediaplattform/backend/model/Media.java
package com.waflo.cooltimediaplattform.backend.model;
import lombok.Data;
import javax.persistence.*;
import java.util.LinkedHashSet;
import java.util.Set;
@Entity
@Data
@Inheritance(strategy = InheritanceType.JOINED)
public class Media {
@Id
@GeneratedValue
long Id;
//list of use m-n
@ManyToMany(fetch = FetchType.EAGER)
Set<User> owner = new LinkedHashSet<>();
}
|
# -*- coding: utf-8 -*-
from unittest import TestCase
import QUANTAXIS as QA
from QUANTAXIS.QAFetch import QATdx
from QUANTAXIS.QAFetch.QATdx import QA_fetch_get_stock_day, select_best_ip, ping
from QUANTAXIS.QAUtil.QASetting import QA_Setting
import datetime
class TestSelect_best_ip(TestCase):
def test_select_best_ip(self):
best_ip = select_best_ip()
ip = best_ip['stock']['ip']
port = best_ip['stock']['port']
self.assertTrue(isinstance(ip, str), '未获取到ip')
self.assertTrue(isinstance(port, int), '未获取到端口号')
self.assertTrue(ping(ip, port, 'stock') < datetime.timedelta(0, 1, 0), '地址ping不通: {} {} {}'.format(ip, port, ping(ip, port, 'stock')))
# ip = best_ip['future']['ip']
# port = best_ip['future']['port']
# self.assertTrue(ping(ip, port, 'stock') < datetime.timedelta(0, 1, 0), '地址ping不通: {} {} {}'.format(ip, port, ping(ip, port, 'stock')))
code = '000001'
days = 300
start = datetime.datetime.now().date() - datetime.timedelta(days)
end = datetime.datetime.now().date() - datetime.timedelta(10)
data = QA_fetch_get_stock_day(code, start_date=start, end_date=end)
print(data)
self.assertTrue(len(data) > (end - start).days / 2,
'返回数据个数不匹配,数据长度:{},天数(包含节假日):{}'.format(len(data), (end - start).days / 2))
default_ip = {'stock': {'ip': None, 'port': None},
'future': {'ip': None, 'port': None}}
qasetting = QA_Setting()
qasetting.set_config(
section='IPLIST', option='default', default_value=default_ip)
best_ip = select_best_ip()
ip = best_ip['stock']['ip']
port = best_ip['stock']['port']
self.assertTrue(isinstance(ip, str) or ip is None, '未获取到ip')
self.assertTrue(isinstance(port, int) or port is None, '未获取到端口号')
ip = best_ip['future']['ip']
port = best_ip['future']['port']
self.assertTrue(isinstance(ip, str) or ip is None, '未获取到ip')
self.assertTrue(isinstance(port, int) or port is None, '未获取到端口号')
data = QA_fetch_get_stock_day(code, start, end)
self.assertTrue(len(data) > (end - start).days / 2,
'返回数据个数不匹配,数据长度:{},天数(包含节假日):{}'.format(len(data), (end - start).days / 2))
|
#!/bin/sh
#$ -S /bin/bash
#$ -cwd
#$ -l s_vmem=8G
#$ -l mem_req=8G
#$ -pe def_slot 1
#$ -t 1-95
# 1-95
source /lustre1/home/kfuku/.bashrc
ulimit -s unlimited
echo running on `hostname`
echo starting at `date`
#wget -r -c -nd -np -A .gtf.gz ftp://ftp.ensembl.org/pub/release-91/gtf/
#wget -r -c -nd -np -A .dna.toplevel.fa.gz ftp://ftp.ensembl.org/pub/release-91/fasta/
dir_work=/lustre1/home/kfuku/my_project/convergence_duplication/20180207_kallisto
dir_gtf=/lustre1/home/kfuku/my_db/Ensembl/release-91/gtf
dir_cdna=/lustre1/home/kfuku/my_db/Ensembl/release-91/cdna
dir_cds=/lustre1/home/kfuku/my_db/Ensembl/release-91/cds
dir_genome=/lustre1/home/kfuku/my_db/Ensembl/release-91/genome
dir_mask=/lustre1/home/kfuku/my_db/Ensembl/release-91/mask
cd ${dir_work}
ext=".gtf.gz"
files=( `ls ${dir_gtf}/*${ext}` )
file=${files[$[${SGE_TASK_ID}-1]]}
bn=`basename ${file} .gtf.gz`
sci_name=`echo ${bn} | sed -e "s/\..*//"`
cdna=`ls ${dir_cdna}/${sci_name}*.fa.gz`
genome=`ls ${dir_genome}/${sci_name}*.fa.gz`
echo file: ${file}
echo basename: ${bn}
echo sci_name: ${sci_name}
echo cdna: ${cdna}
echo genome: ${genome}
if [ ! -e ${dir_work}/${bn} ]; then
mkdir ${dir_work}/${bn}
fi
cd ${dir_work}/${bn}
if [ -e ${dir_mask}/fasta/${bn}.mask.fasta.1.bt2 ]; then
echo ${file} is already processed, skipped.
else
python /lustre1/home/kfuku/my_script/ensembl_gtf2geneid.py \
--gtf ${file} \
--search_attrs 'gene_biotype|transcript_name' \
--search_values 'lincRNA|macro_lncRNA|miRNA|misc_RNA|Mt_rRNA|Mt_tRNA|rRNA|scaRNA|scRNA|snoRNA|snRNA|sRNA|tRNA|mt-.*' \
--out_attr 'transcript_id'
gunzip -c ${cdna} | sed -e "s/\..*//" -e "s/[[:space:]].*//" > tmp.cdna.fasta
fatt extract --file ensembl_gtf2geneid.out_attr.tsv tmp.cdna.fasta > ${dir_mask}/fasta.cdna_overlap/${bn}.mask.cdna_overlap.fasta
gunzip -c ${genome} > tmp.genome.fasta
bedtools getfasta \
-s \
-fi tmp.genome.fasta \
-bed ensembl_gtf2geneid.gtf \
-fo ${dir_mask}/fasta/${bn}.mask.fasta
bowtie2-build -f ${dir_mask}/fasta/${bn}.mask.fasta ${dir_mask}/fasta/${bn}.mask.fasta
cp ensembl_gtf2geneid.gtf ${dir_mask}/gtf/${bn}.mask.gtf
cp ensembl_gtf2geneid.out_attr.tsv ${dir_mask}/transcript_id/${bn}.mask.txt
fi
if [ ! -e ${dir_cdna}.mask/${sci_name}.cdna.mask.fasta ]; then
transcript_id=`ls ${dir_mask}/transcript_id/${sci_name}.*`
cdna=`ls ${dir_cdna}/${sci_name}.*.fa.gz`
zcat ${cdna} | sed -e "s/[[:space:]].*//" > ${sci_name}.cdna.id_formatted.fasta
echo cDNA: num line before mask: `wc -l ${sci_name}.cdna.id_formatted.fasta`
python -c "import sys,re; keys = open(sys.argv[1]).read().split('\n'); entries = open(sys.argv[2]).read().split('>'); [ sys.stdout.write('>'+e) for e in entries if not re.sub('\\\..*','',e,flags=re.DOTALL) in keys ]" \
${transcript_id} ${sci_name}.cdna.id_formatted.fasta > ${sci_name}.cdna.mask.fasta
echo cDNA: num line after mask: `wc -l ${sci_name}.cdna.mask.fasta`
cp ${sci_name}.cdna.mask.fasta ${dir_cdna}.mask/${sci_name}.cdna.mask.fasta
fi
if [ ! -e ${dir_cds}.mask/${sci_name}.cds.mask.fasta ]; then
transcript_id=`ls ${dir_mask}/transcript_id/${sci_name}.*`
cds=`ls ${dir_cds}/${sci_name}.*.fa.gz`
zcat ${cds} | sed -e "s/[[:space:]].*//" > ${sci_name}.cds.id_formatted.fasta
echo CDS: num line before mask: `wc -l ${sci_name}.cds.id_formatted.fasta`
python -c "import sys,re; keys = open(sys.argv[1]).read().split('\n'); entries = open(sys.argv[2]).read().split('>'); [ sys.stdout.write('>'+e) for e in entries if not re.sub('\\\..*','',e,flags=re.DOTALL) in keys ]" \
${transcript_id} ${sci_name}.cds.id_formatted.fasta > ${sci_name}.cds.mask.fasta
echo CDS: num line after mask: `wc -l ${sci_name}.cds.mask.fasta`
cp ${sci_name}.cds.mask.fasta ${dir_cds}.mask/${sci_name}.cds.mask.fasta
fi
rm -rf ${dir_work}/${bn}
###################
echo ending at `date`
|
#!/system/bin/sh -x
#
# Copyright (C) 2015-2017 The Android Container Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
hostvar=`cat /proc/container/active`
if [ $hostvar -eq 0 ] ; then
echo "host was background"
echo "1" > /proc/container/active
echo "guest is foreground now"
else
echo "guest was foreground"
echo "0" > /proc/container/active
echo "host is backgroud now"
fi
|
const shuffleArray = (arr) => {
for (let i = arr.length - 1; i > 0; i--){
let j = Math.floor(Math.random() * (i + 1));
let temp = arr[i];
arr[i] = arr[j];
arr[j] = temp;
}
return arr;
}
const arr = [1,2,3,4,5];
console.log(shuffleArray(arr)); |
#!/bin/bash
echo "$DOCKERHUB_TOKEN" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin
#compile
pip3 install -r requirements.txt
pyinstaller --onefile --clean --add-data="rest/api/templates/:rest/api/templates/" main.py
#centos
docker build -t estuaryoss/discovery:latest -f Dockerfile .
docker push estuaryoss/discovery:latest |
git clone https://github.com/Prototik/HoloEverywhere.git
|
package com.a8plus1.seen;
public class TieZi {
//帖子ID
private String tieZiId;
//发帖人Id
private String userID;
//发帖人昵称
private String userNickName;
//标题
private String title;
//内容
private String context;
//图片
private String picString;
//已阅量
private int watchCount;
//点赞量
private int goodCount;
//发表时间
private String firstTime;
public String getImage1() {
return image1;
}
public void setImage1(String image1) {
this.image1 = image1;
}
private String image1;
public TieZi(String tieZiId, String userID, String userNickName, String title, String context, int watchCount, int goodCount, String firstTime ) {
this.tieZiId = tieZiId;
this.userID = userID;
this.userNickName = userNickName;
this.title = title;
this.context = context;
this.watchCount = watchCount;
this.goodCount = goodCount;
this.firstTime = firstTime;
}
public TieZi(String tieZiId, String userID, String userNickName, String title, String context, String picString, int watchCount, int goodCount, String firstTime , String image1) {
this.tieZiId = tieZiId;
this.userID = userID;
this.userNickName = userNickName;
this.title = title;
this.context = context;
this.picString = picString;
this.watchCount = watchCount;
this.goodCount = goodCount;
this.firstTime = firstTime;
this.image1 = image1;
}
public String getUserID() {
return userID;
}
public void setUserID(String userID) {
this.userID = userID;
}
public String getUserNickName() {
return userNickName;
}
public void setUserNickName(String userNickName) {
this.userNickName = userNickName;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getContext() {
return context;
}
public void setContext(String context) {
this.context = context;
}
public int getWatchCount() {
return watchCount;
}
public void setWatchCount(int watchCount) {
this.watchCount = watchCount;
}
public int getGoodCount() {
return goodCount;
}
public void setGoodCount(int goodCount) {
this.goodCount = goodCount;
}
public String getFirstTime() {
return firstTime;
}
public void setFirstTime(String firstTime) {
this.firstTime = firstTime;
}
public String getTieZiId() {
return this.tieZiId;
}
public void setTieZiId(String tieZiId) {
this.tieZiId = tieZiId;
}
public String getPicString() {
return picString;
}
public void setPicString(String picString) {
this.picString = picString;
}
@Override
public boolean equals(Object obj) {
return ((TieZi)obj).getTieZiId().equals( this.getTieZiId());
}
}
|
<gh_stars>0
import { Test, TestingModule } from '@nestjs/testing';
import { SignalTypesService } from './signal-types.service';
describe('SignalTypesService', () => {
let service: SignalTypesService;
beforeEach(async () => {
const module: TestingModule = await Test.createTestingModule({
providers: [SignalTypesService],
}).compile();
service = module.get<SignalTypesService>(SignalTypesService);
});
it('should be defined', () => {
expect(service).toBeDefined();
});
});
|
<reponame>crislerwin/azeria-front
import { AppProps } from 'next/app';
import { ThemeProvider } from 'styled-components';
import { light } from '../themes/light';
import GlobalStyles from '@/styles/globalStyles';
export default function MyApp({ Component, pageProps }: AppProps) {
return (
<ThemeProvider theme={light}>
<Component {...pageProps} />
<GlobalStyles />
</ThemeProvider>
);
}
|
#!/bin/bash
cd "$(dirname "$0")"
FNAME="/tmp/ttv.json"
URL="http://hmxuku36whbypzxi.onion/trash/ttv-list/as.json"
GNAME="${FNAME}.gz"
curl --fail -R -z "${GNAME}" -o "${GNAME}" -H "Accept-Encoding: gzip" "${URL}" && \
python gen_m3u.py ${GNAME} 1> ttv.m3u && \
tail -n+2 radio.m3u >> ttv.m3u && \
sed 's/127.0.0.1/odroid.lan/' ttv.m3u > out_ttv.m3u
# gunzip -c "${GNAME}" >"${FNAME}"
|
#!/bin/bash
#
# Copyright (C) 2013 Team XBMC
# http://kodi.tv
#
# This Program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This Program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with XBMC; see the file COPYING. If not, see
# <http://www.gnu.org/licenses/>.
#
RELEASEV=${RELEASEV:-"auto"}
VERSION_PREFIX=${VERSION_PREFIX:-""}
TAG=${TAG}
REPO_DIR=${WORKSPACE:-$(cd "$(dirname $0)/../../../" ; pwd)}
[[ $(which lsb_release) ]] && DISTS=${DISTS:-$(lsb_release -cs)} || DISTS=${DISTS:-"stable"}
ARCHS=${ARCHS:-$(dpkg --print-architecture)}
BUILDER=${BUILDER:-"debuild"}
DEBUILD_OPTS=${DEBUILD_OPTS:-""}
PDEBUILD_OPTS=${PDEBUILD_OPTS:-""}
PBUILDER_BASE=${PBUILDER_BASE:-"/var/cache/pbuilder"}
DPUT_TARGET=${DPUT_TARGET:-"local"}
DEBIAN=${DEBIAN:-"https://github.com/xbmc/xbmc-packaging/archive/master.tar.gz"}
BUILD_DATE=$(date '+%Y%m%d.%H%M')
function usage {
echo "$0: this script builds a Kodi debian package from a git repository."
echo "The build is controlled by ENV variables, which van be overridden as appropriate:"
echo "BUILDER is either debuild(default) or pdebuild(needs a proper pbuilder setup)"
checkEnv
}
function checkEnv {
echo "#------ build environment ------#"
echo "REPO_DIR: $REPO_DIR"
getVersion
echo "RELEASEV: $RELEASEV"
[[ -n $TAG ]] && echo "TAG: $TAG"
echo "DISTS: $DISTS"
echo "ARCHS: $ARCHS"
echo "DEBIAN: $DEBIAN"
echo "BUILDER: $BUILDER"
echo "CONFIGURATION: $Configuration"
if ! [[ $(which $BUILDER) ]]
then
echo "Error: can't find ${BUILDER}, consider using full path to [debuild|pdebuild]"
exit 1
fi
if [[ "$BUILDER" =~ "pdebuild" ]]
then
if ! [[ -d $PBUILDER_BASE ]] ; then echo "Error: $PBUILDER_BASE does not exist"; exit 1; fi
echo "PBUILDER_BASE: $PBUILDER_BASE"
echo "PDEBUILD_OPTS: $PDEBUILD_OPTS"
else
echo "DEBUILD_OPTS: $DEBUILD_OPTS"
fi
echo "#-------------------------------#"
}
function getVersion {
getGitRev
if [[ $RELEASEV == "auto" ]]
then
local MAJORVER=$(grep VERSION_MAJOR $REPO_DIR/version.txt | awk '{ print $2 }')
local MINORVER=$(grep VERSION_MINOR $REPO_DIR/version.txt | awk '{ print $2 }')
RELEASEV=${MAJORVER}.${MINORVER}
else
PACKAGEVERSION="${RELEASEV}~git${BUILD_DATE}-${TAG}"
fi
if [[ -n ${VERSION_PREFIX} ]]
then
PACKAGEVERSION="${VERSION_PREFIX}:${RELEASEV}~git${BUILD_DATE}-${TAG}"
else
PACKAGEVERSION="${RELEASEV}~git${BUILD_DATE}-${TAG}"
fi
}
function getGitRev {
cd $REPO_DIR || exit 1
REV=$(git log -1 --pretty=format:"%h")
[[ -z $TAG ]] && TAG=$REV
[[ -z $TAGREV ]] && TAGREV=0
}
function archiveRepo {
cd $REPO_DIR || exit 1
git clean -xfd
echo $REV > VERSION
tools/depends/target/ffmpeg/autobuild.sh -d
DEST="kodi-${RELEASEV}~git${BUILD_DATE}-${TAG}"
[[ -d debian ]] && rm -rf debian
cd ..
tar -czf ${DEST}.tar.gz -h --exclude .git $(basename $REPO_DIR)
ln -s ${DEST}.tar.gz ${DEST/-/_}.orig.tar.gz
echo "Output Archive: ${DEST}.tar.gz"
cd $REPO_DIR || exit 1
getDebian
}
function getDebian {
if [[ -d $DEBIAN ]]
then
cp -r $DEBIAN .
else
mkdir tmp && cd tmp
curl -L -s $DEBIAN -o debian.tar.gz
tar xzf debian.tar.gz
cd xbmc-packaging-*
for FILE in *.unified; do mv $FILE debian/${FILE%.unified}; done
mv debian $REPO_DIR
cd $REPO_DIR ; rm -rf tmp
fi
}
function buildDebianPackages {
archiveRepo
cd $REPO_DIR || exit 1
sed -e "s/#PACKAGEVERSION#/${PACKAGEVERSION}/g" -e "s/#TAGREV#/${TAGREV}/g" debian/changelog.in > debian/changelog.tmp
[ "$Configuration" == "Debug" ] && sed -i "s/XBMC_RELEASE = yes/XBMC_RELEASE = no/" debian/rules
for dist in $DISTS
do
sed "s/#DIST#/${dist}/g" debian/changelog.tmp > debian/changelog
for arch in $ARCHS
do
cd $REPO_DIR
echo "building: DIST=$dist ARCH=$arch"
if [[ "$BUILDER" =~ "pdebuild" ]]
then
DIST=$dist ARCH=$arch $BUILDER $PDEBUILD_OPTS
[ $? -eq 0 ] && uploadPkg || exit 1
else
$BUILDER $DEBUILD_OPTS
echo "output directory: $REPO_DIR/.."
fi
done
done
}
function uploadPkg {
PKG="${PBUILDER_BASE}/${dist}-${arch}/result/${DEST/-/_}-${TAGREV}_${arch}.changes"
echo "signing package"
debsign $PKG
echo "uploading $PKG to $DPUT_TARGET"
dput $DPUT_TARGET $PKG
UPLOAD_DONE=$?
}
function cleanup {
if [[ $UPLOAD_DONE -eq 0 ]] && [[ "$BUILDER" =~ "pdebuild" ]]
then
cd $REPO_DIR/.. || exit 1
rm ${DEST}*
rm ${DEST/-/_}*
fi
}
###
# main
###
if [[ $1 = "-h" ]] || [[ $1 = "--help" ]]
then
usage
exit
fi
checkEnv
buildDebianPackages
cleanup
|
package api
import (
"context"
"encoding/json"
"github.com/gorilla/mux"
"github.com/sakiib/apiServer/auth"
"github.com/sakiib/apiServer/data"
"github.com/sakiib/apiServer/model"
"log"
"net/http"
"os"
"os/signal"
"syscall"
"time"
)
func parseID(request *http.Request) string {
params := mux.Vars(request)
ID := params["id"]
if len(ID) > 0 {
return ID
}
values := request.URL.Query()
if val, ok := values["id"]; ok && len(val) > 0 {
return val[0]
}
return ""
}
//@route GET /api/users
//@desc Gets all the available users
func GetUsers(response http.ResponseWriter, request *http.Request) {
response.Header().Set("Content-Type", "application/json")
log.Println("getUsers")
log.Println("Authentication successful!")
response.WriteHeader(http.StatusOK)
if err := json.NewEncoder(response).Encode(data.Users); err != nil {
log.Println(err.Error())
return
}
}
//@route GET /api/user/id
//@desc Gets a single user with the given id
func GetUser(response http.ResponseWriter, request *http.Request) {
response.Header().Set("Content-Type", "application/json")
log.Println("getUser")
log.Println("Authentication successful!")
ID := parseID(request)
for _, user := range data.Users {
if user.ID == ID {
response.WriteHeader(http.StatusOK)
if err := json.NewEncoder(response).Encode(user); err != nil {
log.Println(err.Error())
return
}
return
}
}
response.WriteHeader(http.StatusNoContent)
}
//@route POST /api/user/id
//@desc Create a new user with given info
func AddUser(response http.ResponseWriter, request *http.Request) {
response.Header().Set("Content-Type", "application/json")
log.Println("addUser")
log.Println("Authentication successful!")
newUser := model.User{}
if err := json.NewDecoder(request.Body).Decode(&newUser); err != nil {
log.Println(err.Error())
return
}
ID := parseID(request)
for _, user := range data.Users {
if ID == user.ID {
response.WriteHeader(http.StatusConflict)
return
}
}
data.Users = append(data.Users, newUser)
response.WriteHeader(http.StatusCreated)
if err := json.NewEncoder(response).Encode(data.Users); err != nil {
log.Println(err.Error())
return
}
}
//@route PUT /api/user/id
//@desc Update a user details with given id
func UpdateUser(response http.ResponseWriter, request *http.Request) {
response.Header().Set("Content-Type", "application/json")
log.Println("updateUser")
log.Println("Authentication successful!")
newUser := model.User{}
if err := json.NewDecoder(request.Body).Decode(&newUser); err != nil {
log.Println(err.Error())
return
}
ID := parseID(request)
for index, user := range data.Users {
if user.ID == ID {
data.Users = append(data.Users[:index], data.Users[index+1:]...)
data.Users = append(data.Users, newUser)
response.WriteHeader(http.StatusCreated)
if err := json.NewEncoder(response).Encode(data.Users); err != nil {
log.Println(err.Error())
return
}
return
}
}
response.WriteHeader(http.StatusNoContent)
}
//@route DELETE /api/user/id
//@desc Delete an user with the given ID
func DeleteUser(response http.ResponseWriter, request *http.Request) {
response.Header().Set("Content-Type", "application/json")
log.Println("deleteUser")
log.Println("Authentication successful!")
ID := parseID(request)
for index, user := range data.Users {
if user.ID == ID {
data.Users = append(data.Users[:index], data.Users[index+1:]...)
response.WriteHeader(http.StatusOK)
if err := json.NewEncoder(response).Encode(data.Users); err != nil {
log.Println(err.Error())
return
}
return
}
}
response.WriteHeader(http.StatusNoContent)
}
func LogIn(response http.ResponseWriter, request *http.Request) {
response.Header().Set("Content-Type", "application/json")
log.Println("LogIn")
log.Println("Authentication successful!")
log.Println("successfully logged in!")
token, err := auth.GetToken()
if err != nil {
response.WriteHeader(http.StatusInternalServerError)
_, _ = response.Write([]byte("Error generating JWT token: " + err.Error()))
} else {
response.Header().Set("Authorization", "Bearer "+token)
response.WriteHeader(http.StatusOK)
_, _ = response.Write([]byte("Token: " + token))
}
}
func HandleRoutes(port string) {
log.Println("in HandleRoutes!")
router := mux.NewRouter().StrictSlash(true)
router.HandleFunc("/api/login", auth.BasicAuthentication(LogIn)).Methods("POST")
router.HandleFunc("/api/users", auth.JWTAuthentication(GetUsers)).Methods("GET")
router.HandleFunc("/api/user/{id}", auth.JWTAuthentication(GetUser)).Methods("GET")
router.HandleFunc("/api/user/{id}", auth.JWTAuthentication(AddUser)).Methods("POST")
router.HandleFunc("/api/user/{id}", auth.JWTAuthentication(UpdateUser)).Methods("PUT")
router.HandleFunc("/api/user/{id}", auth.JWTAuthentication(DeleteUser)).Methods("DELETE")
//log.Fatal(http.ListenAndServe(":"+port, router))
// gracefully shutdown the server
server := &http.Server{
Addr: ":" + port,
Handler: router,
}
done := make(chan os.Signal, 1)
signal.Notify(done, os.Interrupt, syscall.SIGINT, syscall.SIGTERM)
go func() {
if err := server.ListenAndServe(); err != nil && err != http.ErrServerClosed {
log.Printf("listen: %s\n", err)
}
}()
log.Print("Server Started")
<-done
log.Print("Server Stopped")
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second)
defer func() {
cancel()
}()
if err := server.Shutdown(ctx); err != nil {
log.Printf("Server Shutdown Failed:%+v", err)
}
log.Print("Server Exited Properly")
}
|
/**
* Copyright 2015-现在 广州市领课网络科技有限公司
*/
package com.roncoo.education.course.service.api.callback;
import javax.servlet.http.HttpServletRequest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RestController;
import com.roncoo.education.course.service.biz.callback.CallbackPolyvBiz;
import com.roncoo.education.util.base.BaseController;
import com.roncoo.education.util.polyv.PolyvAuth;
import com.roncoo.education.util.polyv.PolyvVideo;
import io.swagger.annotations.ApiOperation;
/**
* 课时信息-审核
*
* @author wujing123
*/
@RestController
@RequestMapping(value = "/callback/polyv")
public class CallbackPolyvController extends BaseController {
@Autowired
private CallbackPolyvBiz biz;
/**
* 保利威视,视频上传回调接口
*/
@ApiOperation(value = "保利威视,视频上传回调接口", notes = "保利威视,视频上传回调接口")
@RequestMapping(value = "/video", method = { RequestMethod.POST, RequestMethod.GET })
public String callbackPolyvVideo(PolyvVideo polyvVideo) {
return biz.video(polyvVideo);
}
/**
* 保利威视,视频授权播放回调接口
*/
@ApiOperation(value = "保利威视,视频授权播放回调接口", notes = "保利威视,视频授权播放回调接口")
@RequestMapping(value = "/auth", method = { RequestMethod.POST, RequestMethod.GET })
public String callbackPolyvAuth(PolyvAuth polyvAuth, HttpServletRequest request) {
if (StringUtils.isEmpty(polyvAuth.getCallback())) {
String sourceParam = request.getQueryString();
sourceParam = sourceParam.replaceAll("vid=" + polyvAuth.getVid(), "");
sourceParam = sourceParam.replaceAll("&t=" + polyvAuth.getT(), "");
sourceParam = sourceParam.replaceAll("&code=", "").replace("+", "%2B");
polyvAuth.setCode(sourceParam);
}
return biz.auth(polyvAuth);
}
}
|
<gh_stars>0
/*
* Copyright 2015 lixiaobo
*
* VersionUpgrade project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.cats.version.perference;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Properties;
import com.cats.version.utils.Utils;
/**
* @author xblia2
* Jun 10, 2015
*/
public class UserPreference
{
private static final String KEY_URL = "URL";
private static final String KEY_VESIONCHECK_INTERVAL = "check_interval";
private static final UserPreference PREFERENCE = new UserPreference();
private File file = new File(Utils.getWorkSpace() + File.separator + "version_cfg.properties");
private Properties properties = new Properties();;
public UserPreference()
{
initCfg();
}
private void initCfg()
{
if(!file.exists())
{
properties.setProperty(KEY_URL, "http://xblia2-OptiPlex-9020:8023");
properties.setProperty(KEY_VESIONCHECK_INTERVAL, "30000");
try
{
properties.store(new FileWriter(file), "init");
} catch (IOException e)
{
e.printStackTrace();
}
}else
{
try
{
properties.load(new FileReader(file));
} catch (FileNotFoundException e)
{
e.printStackTrace();
} catch (IOException e)
{
e.printStackTrace();
}
}
}
public String getUrl()
{
return properties.getProperty(KEY_URL);
}
public long getVersionCheckInterval()
{
long lInterval = 30000;
try
{
String strInterval = properties.getProperty(KEY_VESIONCHECK_INTERVAL);
if(null != strInterval && !strInterval.isEmpty())
{
lInterval = Long.parseLong(strInterval);
}
} catch (NumberFormatException e)
{
e.printStackTrace();
}
return lInterval;
}
public static UserPreference getInstance()
{
return PREFERENCE;
}
}
|
<filename>src/network/home.js
import {request} from './request'
export function getHomeMultidata() {
return request({
// url:'/test/getBanners'
url:'/home/multidata',
// baseurl:"http://172.16.17.32:8000/api/z8",
});
}
export function getHomeGoods(type,page){
return request({
// url:'/api/z8',
url:'/home/data',
params:{
type,
page
}
})
}
/* function test(){
const names = [
]
} */
/* export function getGoodsData(type, page) {
return request({
url:'/test/getGoods',
params:{
type,
page
}
});
} */
|
# Lib imports
import nltk
from nltk.classify import NaiveBayesClassifier
# Tokenize words
def tokenize_words(text):
tokens = nltk.word_tokenize(text)
return tokens
# Convert the words into features
def convert_features(words):
features = {}
for word in words:
features[word] = True
return features
# Prepare the training instances
training_data = [
({" President": True, " Trump": True, " immigration": True}, "Politics"),
({" Apple": True, " share": True, " market": True}, "Business"),
({" Singer": True, " concert": True, " stage": True}, "Entertainment"),
({" Player": True, " goal": True, " team": True}, "Sports")
]
# Initialize a classification model
model = NaiveBayesClassifier.train(training_data)
article = "President Trump is expected to unveil a new immigration plan on Wednesday."
article_tokens = tokenize_words(article)
article_features = convert_features(article_tokens)
category = model.classify(article_features)
print("The article is classified into the category: ", category) # The article is classified into the category: Politics |
# clear window
alias c='clear'
# reload / source .zshrx
alias reload='source ~/.zshrc'
# v for vim
alias v='vim'
# update ls defaults
alias ls='ls -FG' # add file type tips to ls command by default
alias ll='ls -lrth' # reverse time long list file with human readable sizes
alias la='ll -A' # as per ll but show dotfiles, excluding '.' and '..'
# make rm -rf shorter
alias rmf='rm -rf'
# filesystem usage, default human readable sizes
alias df='df -h'
alias du='du -h'
# git, see ~/.gitconfig for git aliases
alias g='git'
|
./lulesh -s 50
|
#!/bin/bash
#
# James Walters
# james_b_walters@yahoo.commands
# 30 APR 2020
#
# This is a GUI to call the Logitech C290 setup script,
# as well as a couple other programs I use.
# The following programs must be installed for this GUI to work.
# The user can replace my choice of additional applications
# with their personal preferences.
#
# This script will require the following programs to work:
# YAD
DEVICE_CONFIG_CMD=$( yad --title="Video Setup" \
--text "$DEVICE_REPORT" \
--image "/usr/share/icons/C920.jpeg" \
--form --separator="," --item-separator="," \
--field="C290 Setup":fbtn "c290setup.sh" \
--field="Cheese":fbtn "cheese" \
--field="Screen Capture":fbtn "simplescreenrecorder --logfile" \
--field="ZOOM":fbtn "/usr/bin/flatpak run --branch=stable --arch=x86_64 --command=zoom --file-forwarding us.zoom.Zoom @@u %U @@" \
--button="gtk-quit":0 )
exit 0
|
SELECT orders.*
FROM orders
INNER JOIN order_items
ON order_items.order_id = orders.id
GROUP BY orders.id
HAVING SUM(order_items.item_price) = <amount>; |
<gh_stars>0
import React from "react";
import {makeStyles} from "@material-ui/core/styles";
import GraphLink from "./GraphLink";
import EditorLink from "./EditorLink";
const useStyles = makeStyles({
appsNav: {
display: "flex",
justifyContent: "center",
alignItems: "flex-start",
gap: "20%"
},
})
export default function AppsNav() {
const classes = useStyles();
const appLinks = [GraphLink, EditorLink]
return (
<div
className={classes.appsNav}
>
{
appLinks.map((AppLink) => {
return (
<AppLink />
)
})
}
</div>
)
} |
package io.opensphere.analysis.table.model;
import java.util.List;
import javax.swing.table.TableModel;
import io.opensphere.mantle.data.element.DataElement;
/**
* Interface to a table model that contains {@link DataElement} and also
* contains additional {@link MetaColumn} pertaining to the individual
* {@link DataElement}.
*/
public interface MetaColumnsTableModel extends TableModel
{
/**
* Returns a column given its name. Implementation is naive so this should
* be overridden if this method is to be called often. This method is not in
* the <code>TableModel</code> interface and is not used by the
* <code>JTable</code>.
*
* @param columnName string containing name of column to be located
* @return the column with <code>columnName</code>, or -1 if not found
*/
int findColumn(String columnName);
/**
* Gets the data object at the given row index.
*
* @param rowIndex the row index
* @return the data object
*/
DataElement getDataAt(int rowIndex);
/**
* Gets the metaColumns.
*
* @return the metaColumns
*/
List<MetaColumn<?>> getMetaColumns();
}
|
import { PubSub } from 'apollo-server';
import * as HOTEL_EVENTS from './hotel';
export const EVENTS = {
HOTEL: HOTEL_EVENTS,
};
export default new PubSub();
|
/*
* Copyright (c) 2011 Intel Corporation. All Rights Reserved.
* Copyright (c) Imagination Technologies Limited, UK
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sub license, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice (including the
* next paragraph) shall be included in all copies or substantial portions
* of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
* OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
* IN NO EVENT SHALL PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR
* ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
* Authors:
* <NAME> <<EMAIL>>
*
*/
#include <stdio.h>
#include "JPEGMasterFirmware_bin.h"
#include "JPEGSlaveFirmware_bin.h"
#include "H263MasterFirmware_bin.h"
#include "H263MasterFirmwareCBR_bin.h"
#include "H263MasterFirmwareVBR_bin.h"
#include "H264MasterFirmware_bin.h"
#include "H264MasterFirmwareCBR_bin.h"
#include "H264MasterFirmwareVCM_bin.h"
#include "H264MasterFirmwareVBR_bin.h"
#include "MPG4MasterFirmware_bin.h"
#include "MPG4MasterFirmwareCBR_bin.h"
#include "MPG4MasterFirmwareVBR_bin.h"
#include "H263SlaveFirmware_bin.h"
#include "H263SlaveFirmwareCBR_bin.h"
#include "H263SlaveFirmwareVBR_bin.h"
#include "H264SlaveFirmware_bin.h"
#include "H264SlaveFirmwareCBR_bin.h"
#include "H264SlaveFirmwareVBR_bin.h"
#include "H264SlaveFirmwareVCM_bin.h"
#include "MPG4SlaveFirmware_bin.h"
#include "MPG4SlaveFirmwareCBR_bin.h"
#include "MPG4SlaveFirmwareVBR_bin.h"
#define FW_VER 0x5E
#define FW_FILE_NAME "topazsc_fw.bin"
#define FW_MASTER_INFO(codec,prefix) \
{ FW_MASTER_##codec,\
{ FW_VER,\
FW_MASTER_##codec,\
ui32##prefix##_MasterMTXTOPAZFWTextSize,\
ui32##prefix##_MasterMTXTOPAZFWDataSize,\
ui32##prefix##_MasterMTXTOPAZFWDataOrigin\
},\
aui32##prefix##_MasterMTXTOPAZFWText, aui32##prefix##_MasterMTXTOPAZFWData \
}
#define FW_SLAVE_INFO(codec,prefix) \
{ FW_SLAVE_##codec,\
{ FW_VER,\
FW_SLAVE_##codec,\
ui32##prefix##_SlaveMTXTOPAZFWTextSize,\
ui32##prefix##_SlaveMTXTOPAZFWDataSize,\
ui32##prefix##_SlaveMTXTOPAZFWDataOrigin\
},\
aui32##prefix##_SlaveMTXTOPAZFWText, aui32##prefix##_SlaveMTXTOPAZFWData \
}
struct topaz_fw_info_item_s {
unsigned short ver;
unsigned short codec;
unsigned int text_size;
unsigned int data_size;
unsigned int data_location;
};
typedef struct topaz_fw_info_item_s topaz_fw_info_item_t;
enum topaz_fw_codec_e {
FW_MASTER_JPEG = 0,
FW_SLAVE_JPEG,
FW_MASTER_H264_NO_RC,
FW_SLAVE_H264_NO_RC,
FW_MASTER_H264_VBR,
FW_SLAVE_H264_VBR,
FW_MASTER_H264_CBR,
FW_SLAVE_H264_CBR,
FW_MASTER_H263_NO_RC,
FW_SLAVE_H263_NO_RC,
FW_MASTER_H263_VBR,
FW_SLAVE_H263_VBR,
FW_MASTER_H263_CBR,
FW_SLAVE_H263_CBR,
FW_MASTER_MPEG4_NO_RC,
FW_SLAVE_MPEG4_NO_RC,
FW_MASTER_MPEG4_VBR,
FW_SLAVE_MPEG4_VBR,
FW_MASTER_MPEG4_CBR,
FW_SLAVE_MPEG4_CBR,
FW_MASTER_H264_VCM,
FW_SLAVE_H264_VCM,
FW_NUM
};
typedef enum topaz_fw_codec_e topaz_fw_codec_t;
struct fw_table_s {
topaz_fw_codec_t index;
topaz_fw_info_item_t header;
unsigned long *fw_text;
unsigned long *fw_data;
};
typedef struct fw_table_s fw_table_t;
int main()
{
FILE *fp = NULL;
topaz_fw_codec_t iter = FW_MASTER_JPEG;
unsigned int size = 0;
unsigned int i;
fw_table_t topaz_fw_table[] = {
/* index header
* { ver, codec, text_size, data_size, date_location }
* fw_text fw_data */
FW_MASTER_INFO(JPEG, JPEG),
FW_SLAVE_INFO(JPEG, JPEG),
FW_MASTER_INFO(H264_NO_RC, H264),
FW_SLAVE_INFO(H264_NO_RC, H264),
FW_MASTER_INFO(H264_VBR, H264VBR),
FW_SLAVE_INFO(H264_VBR, H264VBR),
FW_MASTER_INFO(H264_CBR, H264CBR),
FW_SLAVE_INFO(H264_CBR, H264CBR),
FW_MASTER_INFO(H263_NO_RC, H263),
FW_SLAVE_INFO(H263_NO_RC, H263),
FW_MASTER_INFO(H263_VBR, H263VBR),
FW_SLAVE_INFO(H263_VBR, H263VBR),
FW_MASTER_INFO(H263_CBR, H263CBR),
FW_SLAVE_INFO(H263_CBR, H263CBR),
FW_MASTER_INFO(MPEG4_NO_RC, MPG4),
FW_SLAVE_INFO(MPEG4_NO_RC, MPG4),
FW_MASTER_INFO(MPEG4_VBR, MPG4VBR),
FW_SLAVE_INFO(MPEG4_VBR, MPG4VBR),
FW_MASTER_INFO(MPEG4_CBR, MPG4CBR),
FW_SLAVE_INFO(MPEG4_CBR, MPG4CBR),
FW_MASTER_INFO(H264_VCM, H264VCM),
FW_SLAVE_INFO(H264_VCM, H264VCM),
};
/* open file */
fp = fopen(FW_FILE_NAME, "w");
if (NULL == fp)
return -1;
/* write fw table into the file */
while (iter < FW_NUM) {
/* record the size use bytes */
topaz_fw_table[iter].header.data_size *= 4;
topaz_fw_table[iter].header.text_size *= 4;
/* write header */
fwrite(&(topaz_fw_table[iter].header), sizeof(topaz_fw_table[iter].header), 1, fp);
/* write text */
size = topaz_fw_table[iter].header.text_size;
fwrite(topaz_fw_table[iter].fw_text, 1, size, fp);
/* write data */
size = topaz_fw_table[iter].header.data_size;
fwrite(topaz_fw_table[iter].fw_data, 1, size, fp);
++iter;
}
/* close file */
fclose(fp);
return 0;
}
|
<reponame>soulcutter/eye
module Eye::Group::Chain
private
def chained_call(call)
type, grace = chain_options(call[:command])
chain_schedule(type, grace, call)
end
def chain_schedule(type, grace, call)
command = call[:command]
args = call[:args]
info "starting #{type} with #{grace}s chain #{command} #{args}"
@chain_processes_count = @processes.size
@chain_processes_current = 0
@chain_breaker = false
started_at = Time.now
@processes.each do |process|
if process.skip_group_action?(command)
@chain_processes_current = @chain_processes_current.to_i + 1
next
end
chain_schedule_process(process, type, call)
@chain_processes_current = @chain_processes_current.to_i + 1
# to skip last sleep
break if @chain_processes_current.to_i == @chain_processes_count.to_i
break if @chain_breaker
# wait next process
sleep grace.to_f
break if @chain_breaker
end
debug { "chain finished #{Time.now - started_at}s" }
@chain_processes_count = nil
@chain_processes_current = nil
end
def chain_schedule_process(process, type, call)
debug { "chain_schedule_process #{process.name} #{type} #{call[:command]}" }
if type == :sync
# sync command, with waiting
Eye::Utils.wait_signal(call[:signal_timeout]) do |signal|
process.send_call(call.merge(signal: signal))
end
else
# async command
process.send_call(call)
end
end
def chain_status
if @config[:chain]
[:start, :restart].map { |c| @config[:chain][c].try(:[], :grace) }
end
end
# with such delay will chained processes by default
DEFAULT_CHAIN = 0.2
def chain_options(command)
command = :start if command == :monitor # HACK: for monitor command, work as start
if @config[:chain] && @config[:chain][command]
type = @config[:chain][command].try :[], :type
type = [:async, :sync].include?(type) ? type : :async
grace = @config[:chain][command].try :[], :grace
grace = (grace || DEFAULT_CHAIN).to_f rescue DEFAULT_CHAIN
[type, grace]
else
# default chain case
[:async, DEFAULT_CHAIN]
end
end
end
|
#!/bin/bash
ROS2_PATH=~/ros2_cc_ws
# You can specify a distribution, otherwise the default one will be used
distribution="eloquent"
if [ -z "$1" ]; then
echo "Using default ROS2 $distribution distribution sources"
else
distribution=$1
echo "Using ROS2 $distribution distribution sources"
fi
if [ -d $ROS2_PATH ]; then
echo "Error: Directory $ROS2_PATH already exists, remove it to get new sources."
exit 1
else
# Download sources for distribution
echo "Downloading ROS2 sources in directory: $ROS2_PATH"
mkdir -p $ROS2_PATH/src
cd $ROS2_PATH
wget https://raw.githubusercontent.com/ros2/ros2/$distribution/ros2.repos
vcs import src < ros2.repos
fi
|
#ifndef PARSIMON_INTERNAL_POW10_H
#define PARSIMON_INTERNAL_POW10_H
#include <limits>
#include <array>
namespace anpa::internal {
template <typename Floating>
struct pow_table {
static constexpr int min = std::numeric_limits<Floating>::min_exponent10;
static constexpr auto table = []() {
constexpr int max = std::numeric_limits<Floating>::max_exponent10;
constexpr int size = max - min + 1;
std::array<Floating, size> table{};
for (int i = 0; i <= max; ++i) {
if (i == 0) {
table[-min + i] = 1;
} else {
table[-min + i] = table[-min + i-1] * 10;
}
}
for (int i = -1; i >= min; --i) {
table[-min + i] = table[-min + i + 1] / 10;
}
return table;
}();
static constexpr auto pow(std::size_t n) {
return table[-min + n];
}
};
}
#endif // PARSIMON_INTERNAL_POW10_H
|
import { Entity, PrimaryGeneratedColumn, Column, OneToMany } from 'typeorm';
@Entity()
export class User {
@PrimaryGeneratedColumn()
id: number;
@Column()
username: string;
@Column({ type: 'timestamp', default: () => 'CURRENT_TIMESTAMP' })
createdAt: Date;
@OneToMany(type => User, user => user.id)
users: User[];
} |
#!/bin/bash
set -o nounset
set -o errexit
set -o pipefail
echo "************ telco5g cnf-tests commands ************"
cnf_dir=$(mktemp -d -t cnf-XXXXX)
cd "$cnf_dir" || exit 1
repo="https://github.com/openshift-kni/cnf-features-deploy.git"
branch="${PULL_BASE_REF}"
dir="cnf-features-deploy"
echo "cloning branch ${PULL_BASE_REF}"
git clone -b $branch $repo $dir
cd $dir
FEATURES_ENVIRONMENT="typical-baremetal" FEATURES="performance xt_u32 vrf sctp ovn" make functests-on-ci
|
<reponame>dbatten5/dagster
from dagster_aws.emr.utils import subset_run_config
def test_subset_run_config():
run_config = {
"solids": {"blah": {"config": {"foo": "a string", "bar": 123}}},
"resources": {
"pyspark": {
"config": {
"pipeline_file": "dagster_aws_tests/emr_tests/test_pyspark.py",
"pipeline_fn_name": "pipe",
"cluster_id": "j-272P42200OZ0Q",
"staging_bucket": "dagster-scratch-80542c2",
"region_name": "us-west-1",
}
}
},
}
res = subset_run_config(run_config, "blah")
assert res == run_config
res = subset_run_config(run_config, "not_here")
assert res["solids"] == {}
|
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
MXNET_ROOT=$(cd "$(dirname $0)/../../.."; pwd)
CLASS_PATH=$MXNET_ROOT/scala-package/assembly/linux-x86_64-gpu/target/*:$MXNET_ROOT/scala-package/examples/target/*:$MXNET_ROOT/scala-package/examples/target/classes/lib/*
# which gpu card to use, -1 means cpu
GPU=$1
# the mnist data path
# you can get the mnist data using the script core/scripts/get_mnist_data.sh
DATA_PATH=$2
java -Xmx4G -cp $CLASS_PATH \
org.apache.mxnetexamples.multitask.ExampleMultiTask \
--data-path $DATA_PATH \
--gpu $GPU \
|
import os
import random
def manage_temp_files(num_files, operations):
deleted_files = []
temp_files = [f"temp_file_{i}" for i in range(1, num_files + 1)]
for operation in operations:
if operation == "read":
file_to_read = random.choice(temp_files)
if os.path.exists(file_to_read):
with open(file_to_read, 'r') as file:
file_content = file.read()
print(f"Reading from {file_to_read}: {file_content}")
elif operation == "write":
file_to_write = random.choice(temp_files)
if os.path.exists(file_to_write):
with open(file_to_write, 'w') as file:
file.write("Sample content")
print(f"Writing to {file_to_write}")
elif operation == "delete":
file_to_delete = random.choice(temp_files)
if os.path.exists(file_to_delete):
os.remove(file_to_delete)
deleted_files.append(file_to_delete)
print(f"Deleting {file_to_delete}")
for file in temp_files:
if os.path.exists(file):
os.remove(file)
deleted_files.append(file)
return deleted_files |
<reponame>KPMP/portal-ui
import React from 'react';
export type TSetModalFunc = (
component: React.Component | null,
autoClose?: boolean
) => IModalAction;
export interface IModalAction {
type: string;
payload: {
component: React.Component | null;
autoClose?: boolean;
};
}
export const setModal: TSetModalFunc = (component, autoClose = false) => ({
type: 'TOGGLE_MODAL',
payload: {
component,
autoClose: typeof autoClose === 'boolean' ? autoClose : true,
},
});
function modal(state = null, action: IModalAction) {
switch (action.type) {
case 'TOGGLE_MODAL':
return action.payload;
default:
return state;
}
}
/*----------------------------------------------------------------------------*/
export default modal;
|
# -*- coding: utf-8 -*-
from wallabag.commands.command import Command
from wallabag.commands.params import Params
from wallabag.api.get_entry import GetEntry
from wallabag.api.update_entry import UpdateEntry, Params as UpdateEntryParams
from wallabag.entry import Entry
class UpdateCommandParams(Params):
toggle_read = None
toggle_star = None
new_title = None
set_read_state = None
set_star_state = None
force = False
quiet = False
check_toggle_options = True
def __init__(self, check_toggle_options=True):
self.check_toggle_options = check_toggle_options
def validate(self):
if self.set_star_state and self.toggle_star:
self.toggle_star = None
if self.set_read_state and self.toggle_read:
self.toggle_read = None
params = [
not self.new_title,
self.set_read_state is None,
self.set_star_state is None
]
if self.check_toggle_options:
params.extend([not self.toggle_read, not self.toggle_star])
for p in params:
if not p:
return True, None
return False, 'No parameter given'
class UpdateCommand(Command):
def __init__(self, config, entry_id, params):
Command.__init__(self)
self.config = config
self.entry_id = entry_id
self.params = params or UpdateCommandParams()
def _run(self):
params = self.params
read_value = params.set_read_state
star_value = params.set_star_state
request = GetEntry(self.config, self.entry_id).request()
entry = Entry(request.response)
if params.toggle_read is not None and params.toggle_read:
read_value = not entry.read
if params.toggle_star is not None and params.toggle_star:
star_value = not entry.starred
request = UpdateEntry(self.config, self.entry_id, {
UpdateEntryParams.TITLE: params.new_title,
UpdateEntryParams.STAR: star_value,
UpdateEntryParams.READ: read_value
}).request()
if not params.quiet:
return True, "Entry successfully updated."
return True, None
|
#!/data/data/com.termux/files/usr/bin/bash
echo -e "\e[031m" "──────▄▀▄─────▄▀▄"
echo -e "\e[031m" "─────▄█░░▀▀▀▀▀░░█▄"
echo -e "\e[031m" "─▄▄──█░░░░░░░░░░░█──▄▄"
echo -e "\e[031m" "█▄▄█─█░░▀░░┬░░▀░░█─█▄▄█"
echo -e "\e[036m" "╔════════════════════════════════════════╗"
echo -e "\e[032m" "║ ♚ Project Name : YouTer ║"
echo -e "\e[032m" "║ ♚ Author : richardson_ddos ║"
echo -e "\e[032m" "║ ♚ Channel : t.me/libernet_15 ║"
echo -e "\e[036m" "╠════════════════════════════════════════╝"
echo -e "\e[036m" "╠═▶ Please subscribe to @libernet_15 ;)"
echo -e "\e[036m" "╠═▶ 𝗦𝗲𝗹𝗲𝗰𝘁 𝗔 𝗙𝗼𝗿𝗺𝗮𝘁 ➳"
echo -e "\e[032m" "╠═▶ 1. Music Mp3♫"
echo -e "\e[032m" "╠═▶ 2. Video 360p"
echo -e "\e[032m" "╠═▶ 3. Video 480p"
echo -e "\e[032m" "╠═▶ 4. Video 720p"
echo -e "\e[032m" "╠═▶ 5. Video 1080p"
command='-no-mtime -o /data/data/com.termux/files/home/storage/shared/Youtube/%(title)s.%(ext)s -f'
read -p " ╚═➤ " option
if [ "$option" -eq "1" ]
then
echo "$command 140" > ~/.config/youtube-dl/config
youtube-dl $1
elif [ "$option" -eq "2" ]
then
echo "$command \"best[height<=360]\"" > ~/.config/youtube-dl/config
youtube-dl $1
elif [ "$option" -eq "3" ]
then
echo "$command \"best[height<=480]\"" > ~/.config/youtube-dl/config
youtube-dl $1
elif [ "$option" -eq "4" ]
then
echo "$command \"best[height<=720]\"" > ~/.config/youtube-dl/config
youtube-dl $1
elif [ "$option" -eq "5" ]
then
echo "$command \"best[height<=1080]\"" > ~/.config/youtube-dl/config
youtube-dl $1
fi |
'use strict';
import Reflux from 'reflux';
const MediaActions = Reflux.createActions({
stateCast: {},
insertRule: {},
deleteRule: {},
saveRule: {},
editRule: {},
cancelEditRule: {},
gotoStep: {},
save: {},
nextStep: {},
close: {},
openNew: {},
openUpdate: {},
setUrl: {},
setContentTabIndex: {},
setUploadError: {},
updateContentURL: {},
updateContentObject: {},
setInnerContentTabIndex: {},
setInnerStatus: {}
});
module.exports = MediaActions;
|
<reponame>avdempsey/benthos<filename>lib/stream/manager/from_directory.go
package manager
import (
"fmt"
"os"
"path/filepath"
"strings"
"github.com/Jeffail/benthos/v3/lib/config"
"github.com/Jeffail/benthos/v3/lib/stream"
)
//------------------------------------------------------------------------------
// LoadStreamConfigsFromDirectory reads a map of stream ids to configurations
// by walking a directory of .json and .yaml files.
func LoadStreamConfigsFromDirectory(replaceEnvVars bool, dir string) (map[string]stream.Config, error) {
streamMap := map[string]stream.Config{}
dir = filepath.Clean(dir)
if info, err := os.Stat(dir); err != nil {
if os.IsNotExist(err) {
return streamMap, nil
}
return nil, err
} else if !info.IsDir() {
return streamMap, nil
}
err := filepath.Walk(dir, func(path string, info os.FileInfo, werr error) error {
if werr != nil {
return werr
}
if info.IsDir() ||
(!strings.HasSuffix(info.Name(), ".yaml") &&
!strings.HasSuffix(info.Name(), ".json")) {
return nil
}
var id string
if id, werr = filepath.Rel(dir, path); werr != nil {
return werr
}
id = strings.Trim(id, string(filepath.Separator))
id = strings.Replace(id, string(filepath.Separator), "_", -1)
if strings.HasSuffix(info.Name(), ".yaml") {
id = strings.TrimSuffix(id, ".yaml")
} else {
id = strings.TrimSuffix(id, ".json")
}
if _, exists := streamMap[id]; exists {
return fmt.Errorf("stream id (%v) collision from file: %v", id, path)
}
conf := config.New()
if _, readerr := config.Read(path, true, &conf); readerr != nil {
// TODO: Read and report linting errors.
return readerr
}
streamMap[id] = conf.Config
return nil
})
return streamMap, err
}
//------------------------------------------------------------------------------
|
<reponame>yura-chaikovsky/api-decorator
class ApiFactory {
constructor(baseUrl, declarations, options = {}) {
if(!baseUrl || baseUrl.substr(-1) === "/") {
throw new Error(`Base url should be set and should not have "/" in the end, given "${baseUrl}".`);
}
this._baseUrl = baseUrl;
this._options = options;
this.headers = {"Content-Type": "application/json"};
buildApi.call(this, declarations);
}
_call(method, url, payload) {
return fetch(url,{
method,
mode: this._options.cors,
cache: this._options.cache,
credentials: this._options.credentials,
headers: Object.assign({}, this._options.headers, this.headers),
redirect: this._options.redirect,
referrer: this._options.referrer,
body: payload === null? undefined : JSON.stringify(payload)
})
.then(response => Promise.all([response, response.json()]))
.then(([response, body]) => ({
headers: Array.from(response.headers).reduce((obj, [name, value]) => (obj[name] = value, obj), {}),
body
}));
}
}
function buildApi(declarations) {
Object.entries(declarations).forEach(([apiName, declaration]) => {
this[apiName] = {};
const metaData = {meta: {}};
Object.entries(declaration).forEach(([method, execParams]) => {
if(method[0] === "_") {
metaData.meta[method.substring(1)] = execParams;
return;
}
if(!execParams.url || execParams.url[0] !== "/") {
throw new Error(`Endpoint url should be set and should start with "/", given "${execParams.url}".`);
}
this[apiName][method] = createEndpoint(
execParams,
execParams.method? execParams.method : (["get", "post", "put", "delete"].includes(method)? method : "get"),
metaData).bind(this);
});
});
}
function createEndpoint(execParams, httpMethod, meta) {
return function (options, payload = null) {
return this._call(httpMethod, buildUrl(this._baseUrl, execParams, options), payload)
.then(response => Object.assign(response, meta));
}
}
function buildUrl(baseUrl, execParams, options) {
const queryOptions = execParams.options? Object.entries(options).reduce((queryOptions, [key, value]) => {
if(execParams.options.includes(key)) {
queryOptions.push(`${key}=${encodeURIComponent(value)}`);
} else {
throw new Error(`Passed query string key "${key}" does not exists in declaration for "${execParams.url}".`);
}
return queryOptions;
}, []) : [];
const url = execParams.url.replace(/{(\w+)}/g, (m, key, offset, url) => {
if(!options.hasOwnProperty(key)) {
throw new Error(`Missing required parameter "${key}" in url "${url}".`);
}
return options[key];
});
return baseUrl + url + (queryOptions.length? `?${queryOptions.join("&")}` : "");
} |
function nomVoie(data) {
const {num, rep, cp, commune, ident} = data._default
const prefixe = `${num || ''} ${rep || ''}`.trim()
const suffixe = `${cp} ${commune}`
return ident.substring(prefixe.length + 1, ident.length - suffixe.length - 1)
}
module.exports = {nomVoie}
|
import { FetchWSWatchDataResponse, RegisterWatchResponse } from './types';
import { all, call, put, takeLatest, takeLeading } from 'redux-saga/effects';
import callApi from 'global/services/api';
import {
fetchWSWatchData,
fetchWSWatchDataFailed,
fetchWSWatchDataSuccess,
registerWatch,
} from '.';
import { wsWatchDataAdapter } from '../adapter';
function* callRegisterWatch({ payload }: any) {
const response: RegisterWatchResponse = yield call(callApi, {
method: 'post',
route: '/watches',
headers: {
'Content-Type': 'multipart/form-data',
},
data: payload.params,
});
if (payload.callback) {
payload.callback(response);
}
}
function* callFetchWSWatchData({ payload }: any) {
const { referenceNumber, callback } = payload;
const response: FetchWSWatchDataResponse = yield call(callApi, {
method: 'get',
route: `/watch_signals/watch/reference_number/${referenceNumber}`,
});
if (response.success && response.response.data?.[0]) {
const data = wsWatchDataAdapter(response.response.data[0]);
yield put(fetchWSWatchDataSuccess({ refNum: referenceNumber, data }));
} else {
yield put(fetchWSWatchDataFailed({}));
}
if (callback) {
callback(response);
}
}
export function* registerWatchSaga() {
yield all([
takeLeading(fetchWSWatchData.type, callFetchWSWatchData),
takeLatest(registerWatch.type, callRegisterWatch),
]);
}
|
#!/bin/sh
cd 5.x/
sbt ++$TRAVIS_SCALA_VERSION clean update assembly
cd ../
|
<reponame>Mamuya7/datrastoco-springboot-api
package com.mamuya.datrastocospringbootapi.controller;
import com.mamuya.datrastocospringbootapi.utility.Response;
import org.springframework.http.ResponseEntity;
import java.util.LinkedHashMap;
public interface CRUDController<T> {
ResponseEntity<Response> create(T request);
ResponseEntity<Response> readOne(Integer id);
ResponseEntity<Response> readAll();
ResponseEntity<Response> update(Integer id, T request);
ResponseEntity<Response> delete(Integer id);
}
|
def validate_and_format_url(url: str, format: str) -> str:
placeholders = [p.strip('{}') for p in format.split('}')[:-1]]
components = url.split('/')
format_components = format.split('/')
for i in range(len(format_components)):
if '{' in format_components[i] and '}' in format_components[i]:
placeholder = format_components[i].strip('{}')
if placeholder in placeholders:
index = placeholders.index(placeholder)
format_components[i] = components[index]
else:
raise ValueError(f"{url} needs to contain {format}")
formatted_url = '/'.join(format_components)
return formatted_url |
import { Client, Message, MessageEmbed } from "discord.js";
module.exports = {
name: "roleinfo",
description: "Shows Information About a Role",
category: "Information",
memberpermissions: ["VIEW_CHANNEL"],
cooldown: 2,
usage: "roleinfo <role mentioned or typed>",
run: async (client: Client, message: Message, args: string[]) => {
if (!args[0]) return message.reply("Role Was Not Specified");
else {
const embed = new MessageEmbed();
if (message.mentions.roles.first()) {
const role = message.mentions.roles.first();
embed
.setTitle("Role Information for " + `${role?.name}`)
.addField("Name", role?.name!, true)
.addField("ID", role?.id.toString()!, true)
.addField("Created At", `<t:${role?.createdTimestamp}:R>`, true)
.addField("Color", role?.color?.toString()!, true)
.addField("Display Seperate", role?.hoist ? "Yes" : "No", true)
.addField(
"Members Who Have This Role",
role?.members.size.toString()!,
true
)
.addField("Mentionable", role?.mentionable ? "Yes" : "No", true);
embed
.setColor("LUMINOUS_VIVID_PINK")
.setFooter(`Requested By ${message.author.tag}`);
message.reply({ embeds: [embed] });
} else {
const roleToBeFound = args.join(" ").toLowerCase();
const role = message.guild?.roles.cache.find(
(r) => r.name.toLowerCase() === roleToBeFound
);
if (!role) return message.channel.send("Invalid Role");
else {
embed
.setTitle("Role Information for " + `${role.name}`)
.addField("Name", role.name, true)
.addField("ID", role.id.toString(), true)
.addField(
"Created At",
`<t:${Math.floor(role.createdTimestamp / 1000)}:R>`,
true
)
.addField("Color", role.color.toString(), true)
.addField("Display Seperate", role.hoist ? "Yes" : "No", true)
.addField(
"Members Who Have This Role",
role.members.size.toString(),
true
)
.addField("Mentionable", role.mentionable ? "Yes" : "No", true);
embed
.setColor("LUMINOUS_VIVID_PINK")
.setFooter(`Requested By ${message.author.tag}`);
message.reply({ embeds: [embed] });
}
}
}
},
};
|
The algorithm will follow these steps:
1. If the array has only one element, it is already sorted, so return.
2. Select an element as pivot element. The pivot element can be of any type, such as the first element of the array or the last element.
3. Partition the array into two sub-arrays: elements less than the pivot element and elements greater than the pivot element.
4. Recursively sort the two sub arrays created in the previous step.
5. Join the two sorted sub arrays and the pivot element to form the sorted array.
// Function to implement quick sort algorithm
void QuickSort(int arr[], int low, int high)
{
// Exit condition
if (low > high) {
return;
}
int pivot = arr[high];
int i = (low - 1);
// Iterate through the array to find the elements
// that are less than or equals to the pivot element
// and move those elements before the pivot element
for (int j = low; j <= high - 1; j++)
{
// If the current element is smaller than or equals
// to the pivot element, then swap the element
if (arr[j] <= pivot)
{
i++;
swap(arr[i], arr[j]);
}
}
swap(arr[i + 1], arr[high]);
// Recursively sort the two sub arrays
int p = i + 1;
QuickSort(arr, low, p - 1);
QuickSort(arr, p + 1, high);
} |
import { Context, Injectable, NestInterceptor, Next } from "../../mod.ts";
@Injectable()
export class LoggingInterceptor implements NestInterceptor {
async intercept(ctx: Context, next: Next) {
// console.log("LoggingInterceptor", "Before...");
const start = Date.now();
const result = await next();
// console.log("LoggingInterceptor", `After...`);
const ms = Date.now() - start;
ctx.response.headers.set("X-Response-Time", `${ms}ms`);
return result; // must return result
}
}
|
#!/bin/bash
all_specified=FALSE
. ${script_dir}/../common/repositories.sh
# Handle command line arguments
while [[ $# -gt 0 ]]
do
key="$1"
case $key in
${INCLUDE_PROJECT_OPTIONS:+--fabric})
if [[ "$all_specified" == FALSE ]] ; then
repositories+=( "${fabric_repositories[@]}" )
filename+="-fabric"
fi
;;
${INCLUDE_PROJECT_OPTIONS:+--sawtooth})
if [[ "$all_specified" == FALSE ]] ; then
repositories+=( "${sawtooth_repositories[@]}" )
filename+="-sawtooth"
fi
;;
${INCLUDE_PROJECT_OPTIONS:+--iroha})
if [[ "$all_specified" == FALSE ]] ; then
repositories+=( "${iroha_repositories[@]}" )
filename+="-iroha"
fi
;;
${INCLUDE_PROJECT_OPTIONS:+--burrow})
if [[ "$all_specified" == FALSE ]] ; then
repositories+=( "${burrow_repositories[@]}" )
filename+="-burrow"
fi
;;
${INCLUDE_PROJECT_OPTIONS:+--indy})
if [[ "$all_specified" == FALSE ]] ; then
repositories+=( "${indy_repositories[@]}" )
filename+="-indy"
fi
;;
${INCLUDE_PROJECT_OPTIONS:+--composer})
if [[ "$all_specified" == FALSE ]] ; then
repositories+=( "${composer_repositories[@]}" )
filename+="-composer"
fi
;;
${INCLUDE_PROJECT_OPTIONS:+--cello})
if [[ "$all_specified" == FALSE ]] ; then
repositories+=( "${cello_repositories[@]}" )
filename+="-cello"
fi
;;
${INCLUDE_PROJECT_OPTIONS:+--explorer})
if [[ "$all_specified" == FALSE ]] ; then
repositories+=( "${explorer_repositories[@]}" )
filename+="-explorer"
fi
;;
${INCLUDE_PROJECT_OPTIONS:+--quilt})
if [[ "$all_specified" == FALSE ]] ; then
repositories+=( "${quilt_repositories[@]}" )
filename+="-quilt"
fi
;;
${INCLUDE_PROJECT_OPTIONS:+--caliper})
if [[ "$all_specified" == FALSE ]] ; then
repositories+=( "${caliper_repositories[@]}" )
filename+="-caliper"
fi
;;
${INCLUDE_PROJECT_OPTIONS:+--ursa})
if [[ "$all_specified" == FALSE ]] ; then
repositories+=( "${ursa_repositories[@]}" )
filename+="-ursa"
fi
;;
${INCLUDE_PROJECT_OPTIONS:+--grid})
if [[ "$all_specified" == FALSE ]] ; then
repositories+=( "${grid_repositories[@]}" )
filename+="-grid"
fi
;;
${INCLUDE_PROJECT_OPTIONS:+--projects})
if [[ "$all_specified" == FALSE ]] ; then
repositories+=( "${project_repositories[@]}" )
filename+="-projects"
fi
;;
${INCLUDE_PROJECT_OPTIONS:+--labs})
if [[ "$all_specified" == FALSE ]] ; then
repositories+=( "${labs_repositories[@]}" )
filename+="-labs"
fi
;;
${INCLUDE_PROJECT_OPTIONS:+--other})
if [[ "$all_specified" == FALSE ]] ; then
repositories+=( "${other_repositories[@]}" )
filename+="-other"
fi
;;
${INCLUDE_PROJECT_OPTIONS:+--gerrit})
if [[ "$all_specified" == FALSE ]] ; then
repositories+=( "${gerrit_repositories[@]}" )
filename+="-gerrit"
fi
;;
${INCLUDE_PROJECT_OPTIONS:+--github})
if [[ "$all_specified" == FALSE ]] ; then
repositories+=( "${github_repositories[@]}" )
filename+="-github"
fi
;;
${INCLUDE_PROJECT_OPTIONS:+--all})
all_specified=TRUE
filename+="-all"
repositories="${all_repositories[@]}"
;;
${INCLUDE_DATE_OPTIONS:+--since})
since="$2"
shift # past argument or value. 2nd shift below
;;
${INCLUDE_DATE_OPTIONS:+--until})
until="$2"
shift # past argument or value. 2nd shift below
;;
--output-dir)
output_dir=$2
shift # past argument or value. 2nd shift below
;;
--help)
cat << EOM
$0 [options]
${short_description}
Options:
EOM
if [[ -n "$INCLUDE_PROJECT_OPTIONS" ]]
then
cat << EOM_PROJECT
--fabric: Include Fabric repositories
--sawtooth: Include Sawtooth repositories
--iroha: Include Iroha repositories
--burrow: Include Burrow repositories
--indy: Include Indy repositories
--composer: Include Composer repositories
--cello: Include Cello repositories
--explorer: Include Explorer repositories
--quilt: Include Quilt repositories
--caliper: Include Caliper repositories
--ursa: Include Ursa repositories
--grid: Include Grid repositories
--projects: Include Project repositories
--labs: Include Labs repositories
--other: Include Other repositories
--gerrit: Include Gerrit repositories
--github: Include Github repositories
--all: Include all repositories (default)
EOM_PROJECT
fi
if [[ -n "$INCLUDE_DATE_OPTIONS" ]]
then
cat << EOM_DATE
--since: Includes commits more recent than this date (mm/dd/yyyy).
By default starts from the start of the repo.
--until: Includes commits older than this date (mm/dd/yyyy).
By default ends at the end of the repo.
EOM_DATE
fi
cat << EOM_STD_OPTIONS
--output-dir <dir>: Where should output be placed. (Default: /tmp)
--help: Shows this help message
EOM_STD_OPTIONS
if [[ -n "$INCLUDE_PROJECT_OPTIONS" ]]
then
cat << EOM_NOTES
NOTE: If no options are specified, it is as if you had specified --all
NOTE: Multiple repository options can be specified
NOTE: --all will override all commands for individual projects
EOM_NOTES
fi
exit;
;;
*)
echo "Unknown option $key"
exit 1
;;
esac
shift # past argument or value
done
# if no repositories were specified, then act as if --all was specified
if [ "$repositories" == "" ]
then
repositories="${all_repositories[@]}"
filename+="-all"
fi
|
<reponame>sebastien-d-me/nice-looking-select
/*! `php-template` grammar compiled for Highlight.js 11.3.1 */
var hljsGrammar=(()=>{"use strict";return n=>({name:"PHP template",
subLanguage:"xml",contains:[{begin:/<\?(php|=)?/,end:/\?>/,subLanguage:"php",
contains:[{begin:"/\\*",end:"\\*/",skip:!0},{begin:'b"',end:'"',skip:!0},{
begin:"b'",end:"'",skip:!0},n.inherit(n.APOS_STRING_MODE,{illegal:null,
className:null,contains:null,skip:!0}),n.inherit(n.QUOTE_STRING_MODE,{
illegal:null,className:null,contains:null,skip:!0})]}]})})()
;export default hljsGrammar; |
#! /bin/bash
run_test ./drive_sql "select startswith('.foo', '.')"
check_output "" <<EOF
Row 0:
Column startswith('.foo', '.'): 1
EOF
run_test ./drive_sql "select startswith('foo', '.')"
check_output "" <<EOF
Row 0:
Column startswith('foo', '.'): 0
EOF
run_test ./drive_sql "select endswith('foo', '.')"
check_output "" <<EOF
Row 0:
Column endswith('foo', '.'): 0
EOF
run_test ./drive_sql "select endswith('foo.', '.')"
check_output "" <<EOF
Row 0:
Column endswith('foo.', '.'): 1
EOF
run_test ./drive_sql "select endswith('foo.txt', '.txt')"
check_output "" <<EOF
Row 0:
Column endswith('foo.txt', '.txt'): 1
EOF
run_test ./drive_sql "select endswith('a', '.txt')"
check_output "" <<EOF
Row 0:
Column endswith('a', '.txt'): 0
EOF
run_test ./drive_sql "select regexp('abcd', 'abcd')"
check_output "" <<EOF
Row 0:
Column regexp('abcd', 'abcd'): 1
EOF
run_test ./drive_sql "select regexp('bc', 'abcd')"
check_output "" <<EOF
Row 0:
Column regexp('bc', 'abcd'): 1
EOF
run_test ./drive_sql "select regexp('[e-z]+', 'abcd')"
check_output "" <<EOF
Row 0:
Column regexp('[e-z]+', 'abcd'): 0
EOF
run_test ./drive_sql "select regexp('[e-z]+', 'ea')"
check_output "" <<EOF
Row 0:
Column regexp('[e-z]+', 'ea'): 1
EOF
run_test ./drive_sql "select regexp_replace('test 1 2 3', '\\d+', 'N')"
check_output "" <<EOF
Row 0:
Column regexp_replace('test 1 2 3', '\d+', 'N'): test N N N
EOF
run_test ./drive_sql "select regexp_match('abc', 'abc')"
check_error_output "" <<EOF
error: sqlite3_exec failed -- regular expression does not have any captures
EOF
run_test ./drive_sql "select regexp_match(null, 'abc')"
check_output "" <<EOF
Row 0:
Column regexp_match(null, 'abc'): (null)
EOF
run_test ./drive_sql "select regexp_match('abc', null) as result"
check_output "" <<EOF
Row 0:
Column result: (null)
EOF
run_test ./drive_sql "select typeof(result), result from (select regexp_match('(\d*)abc', 'abc') as result)"
check_output "" <<EOF
Row 0:
Column typeof(result): text
Column result:
EOF
run_test ./drive_sql "select typeof(result), result from (select regexp_match('(\d*)abc(\d*)', 'abc') as result)"
check_output "" <<EOF
Row 0:
Column typeof(result): text
Column result: {"col_0":"","col_1":""}
EOF
run_test ./drive_sql "select typeof(result), result from (select regexp_match('(\d+)', '123') as result)"
check_output "" <<EOF
Row 0:
Column typeof(result): integer
Column result: 123
EOF
run_test ./drive_sql "select typeof(result), result from (select regexp_match('a(\d+\.\d+)a', 'a123.456a') as result)"
check_output "" <<EOF
Row 0:
Column typeof(result): real
Column result: 123.456
EOF
run_test ./drive_sql "select regexp_match('foo=(?<foo>\w+); (\w+)', 'foo=abc; 123') as result"
check_output "" <<EOF
Row 0:
Column result: {"foo":"abc","col_0":123}
EOF
run_test ./drive_sql "select regexp_match('foo=(?<foo>\w+); (\w+\.\w+)', 'foo=abc; 123.456') as result"
check_output "" <<EOF
Row 0:
Column result: {"foo":"abc","col_0":123.456}
EOF
run_test ./drive_sql "select extract('foo=1') as result"
check_output "" <<EOF
Row 0:
Column result: {"foo":1}
EOF
run_test ./drive_sql "select extract('foo=1; bar=2') as result"
check_output "" <<EOF
Row 0:
Column result: {"foo":1,"bar":2}
EOF
run_test ./drive_sql "select extract(null) as result"
check_output "" <<EOF
Row 0:
Column result: (null)
EOF
run_test ./drive_sql "select extract(1) as result"
check_output "" <<EOF
Row 0:
Column result: {"col_0":1}
EOF
run_test ./drive_sql "SELECT * FROM regexp_capture('foo bar', '\w+ (\w+)')"
check_output "" <<EOF
Row 0:
Column match_index: 0
Column capture_index: 0
Column capture_name: (null)
Column capture_count: 2
Column range_start: 0
Column range_stop: 7
Column content: foo bar
Row 1:
Column match_index: 0
Column capture_index: 1
Column capture_name:
Column capture_count: 2
Column range_start: 4
Column range_stop: 7
Column content: bar
EOF
run_test ./drive_sql "SELECT * FROM regexp_capture('foo bar', '\w+ \w+')"
check_output "" <<EOF
Row 0:
Column match_index: 0
Column capture_index: 0
Column capture_name: (null)
Column capture_count: 1
Column range_start: 0
Column range_stop: 7
Column content: foo bar
EOF
run_test ./drive_sql "SELECT * FROM regexp_capture('foo bar', '\w+ (?<word>\w+)')"
check_output "" <<EOF
Row 0:
Column match_index: 0
Column capture_index: 0
Column capture_name: (null)
Column capture_count: 2
Column range_start: 0
Column range_stop: 7
Column content: foo bar
Row 1:
Column match_index: 0
Column capture_index: 1
Column capture_name: word
Column capture_count: 2
Column range_start: 4
Column range_stop: 7
Column content: bar
EOF
run_test ./drive_sql "SELECT * FROM regexp_capture('foo bar', '(bar)|\w+ (?<word>\w+)')"
check_output "" <<EOF
Row 0:
Column match_index: 0
Column capture_index: 0
Column capture_name: (null)
Column capture_count: 3
Column range_start: 0
Column range_stop: 7
Column content: foo bar
Row 1:
Column match_index: 0
Column capture_index: 1
Column capture_name:
Column capture_count: 3
Column range_start: -1
Column range_stop: -1
Column content: (null)
Row 2:
Column match_index: 0
Column capture_index: 2
Column capture_name: word
Column capture_count: 3
Column range_start: 4
Column range_stop: 7
Column content: bar
EOF
run_test ./drive_sql "SELECT * FROM regexp_capture()"
check_output "" <<EOF
EOF
run_test ./drive_sql "SELECT * FROM regexp_capture('foo bar')"
check_output "" <<EOF
EOF
run_test ./drive_sql "SELECT * FROM regexp_capture('foo bar', '(')"
check_error_output "" <<EOF
error: sqlite3_exec failed -- Invalid regular expression: missing )
EOF
run_test ./drive_sql "SELECT * FROM regexp_capture('1 2 3 45', '(\d+)')"
check_output "" <<EOF
Row 0:
Column match_index: 0
Column capture_index: 0
Column capture_name: (null)
Column capture_count: 2
Column range_start: 0
Column range_stop: 1
Column content: 1
Row 1:
Column match_index: 0
Column capture_index: 1
Column capture_name:
Column capture_count: 2
Column range_start: 0
Column range_stop: 1
Column content: 1
Row 2:
Column match_index: 1
Column capture_index: 0
Column capture_name: (null)
Column capture_count: 2
Column range_start: 2
Column range_stop: 3
Column content: 2
Row 3:
Column match_index: 1
Column capture_index: 1
Column capture_name:
Column capture_count: 2
Column range_start: 2
Column range_stop: 3
Column content: 2
Row 4:
Column match_index: 2
Column capture_index: 0
Column capture_name: (null)
Column capture_count: 2
Column range_start: 4
Column range_stop: 5
Column content: 3
Row 5:
Column match_index: 2
Column capture_index: 1
Column capture_name:
Column capture_count: 2
Column range_start: 4
Column range_stop: 5
Column content: 3
Row 6:
Column match_index: 3
Column capture_index: 0
Column capture_name: (null)
Column capture_count: 2
Column range_start: 6
Column range_stop: 8
Column content: 45
Row 7:
Column match_index: 3
Column capture_index: 1
Column capture_name:
Column capture_count: 2
Column range_start: 6
Column range_stop: 8
Column content: 45
EOF
run_test ./drive_sql "SELECT * FROM regexp_capture('foo foo', '^foo')"
check_output "" <<EOF
Row 0:
Column match_index: 0
Column capture_index: 0
Column capture_name: (null)
Column capture_count: 1
Column range_start: 0
Column range_stop: 3
Column content: foo
EOF
|
<reponame>OhFinance/oh-app
import { Box, Grid } from "@material-ui/core";
import { Button, Flex, Heading, Text } from "@ohfinance/oh-ui";
import { Balance } from "components/Balance";
import { Bank } from "config/constants/types";
import { FC } from "react";
import { BackButton } from "components/BackButton/BackButton";
import { Skeleton } from "@material-ui/lab";
export interface EarnWithdrawConfirmationProps {
bank: Bank;
input: string;
withdrawAmount: string;
receiveAmount: string;
exchangeRate: string;
onBack: () => void;
onDismiss: () => void;
onWithdraw: () => void;
}
export const EarnWithdrawConfirmation: FC<EarnWithdrawConfirmationProps> = ({
bank,
input,
withdrawAmount,
receiveAmount,
exchangeRate,
onBack,
onDismiss,
onWithdraw,
}) => {
return (
<Grid container direction="column" spacing={2}>
{/* Back Button */}
<Grid item>
<BackButton onClick={onBack} />
</Grid>
<Grid item>
<Box mb={1}>
{receiveAmount !== undefined ? (
<Flex align="center">
<Heading gutterBottom={false}>
<b>
<Balance value={receiveAmount} />
</b>
</Heading>
<Flex ml={1} center>
<img
src={bank.underlying.image}
alt={bank.alt}
height={40}
width="auto"
/>
</Flex>
</Flex>
) : (
<Skeleton width={120} height={60} />
)}
</Box>
<Heading>{bank.underlying.symbol} Tokens</Heading>
</Grid>
<Grid item>
{/* <Text color="textSecondary">Outputs are estimated</Text> */}
</Grid>
<Grid item>
<Flex align="center" justify="space-between">
<Text>Return</Text>
{withdrawAmount !== undefined ? (
<Text>
<Balance value={withdrawAmount} /> {bank.symbol}
</Text>
) : (
<Skeleton width={80} height={30} />
)}
</Flex>
<Flex align="center" justify="space-between">
<Text>Withdraw</Text>
{receiveAmount !== undefined ? (
<Text>
<Balance value={receiveAmount} /> {bank.underlying.symbol}
</Text>
) : (
<Skeleton width={80} height={30} />
)}
</Flex>
<Flex align="center" justify="space-between">
<Text>Bank Token Rate</Text>
{exchangeRate !== undefined ? (
<Text>
1 {bank.symbol} ={" "}
<Balance
value={exchangeRate}
decimals={bank.underlying.decimals}
/>{" "}
{bank.underlying.symbol}
</Text>
) : (
<Skeleton width={80} height={30} />
)}
</Flex>
</Grid>
<Grid item>
<Grid container spacing={2}>
<Grid item xs={6}>
<Button fullWidth variant="contained" onClick={onDismiss}>
Cancel
</Button>
</Grid>
<Grid item xs={6}>
<Button
fullWidth
variant="contained"
color="primary"
disabled={!input}
onClick={onWithdraw}
>
Withdraw
</Button>
</Grid>
</Grid>
</Grid>
</Grid>
);
};
|
#!/bin/bash
rsync -r . gabrielrc@linux.ime.usp.br:~/www/mac0499
|
package cn.zhangjingyao;
import org.apache.dubbo.config.spring.context.annotation.EnableDubbo;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cache.annotation.EnableCaching;
import org.springframework.transaction.annotation.EnableTransactionManagement;
/**
* 程序启动入口
* @author
*/
@EnableCaching
@EnableAutoConfiguration
@SpringBootApplication
public class SystemConsumerApplication {
public static void main(String[] args) {
SpringApplication.run(SystemConsumerApplication.class,args);
}
}
|
<filename>src/routing/Router.js
import Joi from 'joi';
class Router {
constructor(routes = []) {
this.routes = [];
this.addRoutes(routes);
}
addRoute(rawRoute) {
const route = Joi.attempt(
{
transform: message => message,
...rawRoute,
},
Joi.object()
.keys({
matcher: Joi.object().type(RegExp),
transform: Joi.func(),
})
.unknown()
);
this.routes.unshift(route);
return this;
}
addRoutes(routes) {
routes.forEach(this.addRoute.bind(this));
}
getRoutes() {
return this.routes;
}
findRoute(message) {
if (!this.routes.length) {
return undefined;
}
if (this.routes.length === 1) {
return this.routes[0];
}
return this.routes.find(({ matcher }) => matcher.test(message.topic));
}
}
export default Router;
|
def remove_duplicates(nums):
"""
Use the set() method to remove duplicates from a list
and return a list with unique elements.
"""
return list(set(nums)) |
// SPDX-License-Identifier: MIT
package mux
import (
"net/http"
"github.com/issue9/mux/v5/middleware"
)
// Resource 以资源地址为对象的路由
//
// srv := NewRouter("")
// r, _ := srv.Resource("/api/users/{id}")
// r.Get(h) // 相当于 srv.Get("/api/users/{id}")
// r.Post(h) // 相当于 srv.Post("/api/users/{id}")
// url := r.URL(map[string]string{"id":5}) // 获得 /api/users/5
type Resource struct {
router *Router
pattern string
ms []middleware.Func
}
// Handle 相当于 Router.Handle(pattern, h, methods...) 的简易写法
func (r *Resource) Handle(h http.Handler, methods ...string) *Resource {
r.router.Handle(r.pattern, middleware.Apply(h, r.ms...), methods...)
return r
}
// Get 相当于 Router.Get(pattern, h) 的简易写法
func (r *Resource) Get(h http.Handler) *Resource {
return r.Handle(h, http.MethodGet)
}
// Post 相当于 Router.Post(pattern, h) 的简易写法
func (r *Resource) Post(h http.Handler) *Resource {
return r.Handle(h, http.MethodPost)
}
// Delete 相当于 Router.Delete(pattern, h) 的简易写法
func (r *Resource) Delete(h http.Handler) *Resource {
return r.Handle(h, http.MethodDelete)
}
// Put 相当于 Router.Put(pattern, h) 的简易写法
func (r *Resource) Put(h http.Handler) *Resource {
return r.Handle(h, http.MethodPut)
}
// Patch 相当于 Router.Patch(pattern, h) 的简易写法
func (r *Resource) Patch(h http.Handler) *Resource {
return r.Handle(h, http.MethodPatch)
}
// Any 相当于 Router.Any(pattern, h) 的简易写法
func (r *Resource) Any(h http.Handler) *Resource { return r.Handle(h) }
// HandleFunc 功能同 Router.HandleFunc(pattern, fun, ...)
func (r *Resource) HandleFunc(f http.HandlerFunc, methods ...string) *Resource {
return r.Handle(f, methods...)
}
// GetFunc 相当于 Router.GetFunc(pattern, func) 的简易写法
func (r *Resource) GetFunc(f http.HandlerFunc) *Resource {
return r.HandleFunc(f, http.MethodGet)
}
// PutFunc 相当于 Router.PutFunc(pattern, func) 的简易写法
func (r *Resource) PutFunc(f http.HandlerFunc) *Resource {
return r.HandleFunc(f, http.MethodPut)
}
// PostFunc 相当于 Router.PostFunc(pattern, func) 的简易写法
func (r *Resource) PostFunc(f http.HandlerFunc) *Resource {
return r.HandleFunc(f, http.MethodPost)
}
// DeleteFunc 相当于 Router.DeleteFunc(pattern, func) 的简易写法
func (r *Resource) DeleteFunc(f http.HandlerFunc) *Resource {
return r.HandleFunc(f, http.MethodDelete)
}
// PatchFunc 相当于 Router.PatchFunc(pattern, func) 的简易写法
func (r *Resource) PatchFunc(f http.HandlerFunc) *Resource {
return r.HandleFunc(f, http.MethodPatch)
}
// AnyFunc 相当于 Router.AnyFunc(pattern, func) 的简易写法
func (r *Resource) AnyFunc(f http.HandlerFunc) *Resource { return r.HandleFunc(f) }
// Remove 删除指定匹配模式的路由项
func (r *Resource) Remove(methods ...string) { r.router.Remove(r.pattern, methods...) }
// Clean 清除当前资源的所有路由项
func (r *Resource) Clean() { r.router.Remove(r.pattern) }
// URL 根据参数构建一条 URL
//
// params 匹配路由参数中的同名参数,或是不存在路由参数,比如普通的字符串路由项,
// 该参数不启作用;
// res, := m.Resource("/posts/{id}")
// res.URL(map[string]string{"id": "1"}, "") // /posts/1
//
// res, := m.Resource("/posts/{id}/{path}")
// res.URL(map[string]string{"id": "1","path":"author/profile"}) // /posts/1/author/profile
func (r *Resource) URL(strict bool, params map[string]string) (string, error) {
return r.router.URL(strict, r.pattern, params)
}
// Resource 创建一个资源路由项
//
// pattern 资源地址;
// m 中间件函数,按顺序调用;
func (r *Router) Resource(pattern string, m ...middleware.Func) *Resource {
return &Resource{router: r, pattern: pattern, ms: m}
}
// Resource 创建一个资源路由项
//
// pattern 资源地址;
// m 中间件函数,按顺序调用,会继承 p 的中间件并按在 m 之前;
func (p *Prefix) Resource(pattern string, m ...middleware.Func) *Resource {
ms := make([]middleware.Func, 0, len(p.ms)+len(m))
ms = append(ms, p.ms...)
ms = append(ms, m...)
return p.router.Resource(p.prefix+pattern, ms...)
}
// Router 返回与当前资源关联的 *Router 实例
func (r *Resource) Router() *Router { return r.router }
|
import * as fs from "fs";
import * as path from "path";
import * as winston from "winston";
import { ILogger } from "./Interfaces";
import { Environment } from "../../shared/Enums";
import EnvironmentProvider from "./EnvironmentProvider";
import { injectable } from "inversify";
import AppConstants from "./AppConstants";
/**
* Simple logger wrapper around a concrete logger instance, Winston in this case
* Ref: https://github.com/winstonjs/winston
*/
@injectable()
export default class Logger implements ILogger {
private internalLogger: winston.LoggerInstance;
private environment: EnvironmentProvider;
private logDir: string = process.env.NODE_LOG || AppConstants.LOG_DIR;
public constructor() {
this.environment = new EnvironmentProvider();
this.ensureDirectory();
this.initializeLogger();
}
public debug(...params: any[]): void {
if (!this.environment.isProduction()) {
let message: string = this.paramsToString(params);
this.internalLogger.debug(message);
}
}
public error(...params: any[]): void {
let message: string = this.paramsToString(params);
this.internalLogger.error(message);
}
public info(...params: any[]): void {
let message: string = this.paramsToString(params);
this.internalLogger.info(message);
}
public warn(...params: any[]): void {
let message: string = this.paramsToString(params);
this.internalLogger.warn(message);
}
public fatal(...params: any[]): void {
let message: string = this.paramsToString(params);
this.internalLogger.error("FATAL! >> " + message);
}
private ensureDirectory(): void {
if (!fs.existsSync(this.logDir)) {
fs.mkdirSync(this.logDir);
}
}
private initializeLogger(): void {
this.internalLogger = new winston.Logger({
timestamp: true,
transports: [
new (winston.transports.Console)({
colorize: true,
level: "debug",
prettyPrint: true,
silent: false,
timestamp: false,
}),
new (winston.transports.File)({
filename: path.join(this.logDir, Environment[this.environment.getEnvironment()] + ".log"),
}),
],
});
}
private paramsToString(...params: any[]): string {
return params.map(((value) => value.toString())).join(" ");
}
}
|
import React from 'react';
import PropTypes from 'prop-types';
import { observer } from 'mobx-react';
import styled from 'react-emotion';
import { Table as AntdTable, Button } from 'antd';
const Table = styled(AntdTable)`
border: 1px solid #ebedf0;
.ant-table-thead > tr > th {
background-color: #f6f6f6;
}
.ant-table-tbody {
background-color: #ffffff;
.ant-table-row {
cursor: pointer;
}
}
.ant-table-placeholder {
padding: 50px;
}
`;
const ActionButton = styled(Button)`
border: none;
padding: 0;
&:first-child {
margin-right: 16px;
}
`;
const Image = styled('img')`
width: 35px;
height: 35px;
border-radius: 100%;
border: 1px solid rgba(0, 0, 0, 0.1);
`;
const UserTable = observer(
({
loading, users, pageSize, total, onEdit, onDelete, onClickRow, onPageChange,
}) => {
const columns = [
{
title: 'Id',
dataIndex: 'id',
key: 'id',
width: 60,
},
{
title: 'Avatar',
dataIndex: 'avatar',
key: 'avatar',
width: 80,
render: (text, record) => <Image src={record.avatar} />,
},
{
title: 'Full name',
dataIndex: 'full_name',
key: 'full_name',
},
{
title: 'Email',
dataIndex: 'email',
key: 'email',
},
{
title: 'Phone',
dataIndex: 'phone',
key: 'phone',
},
{
title: 'Job title',
dataIndex: 'job_title',
key: 'job_title',
},
{
title: 'Action',
dataIndex: '',
key: 'actions',
render: (text, record) => (
<div>
<ActionButton
type="primary"
ghost
icon="edit"
onClick={(e) => {
e.stopPropagation();
onEdit(record.id);
}}
>
Edit
</ActionButton>
<ActionButton
type="danger"
ghost
icon="delete"
onClick={(e) => {
e.stopPropagation();
onDelete(record.id);
}}
>
Delete
</ActionButton>
</div>
),
},
];
return (
<Table
columns={columns}
dataSource={users}
pagination={{
hideOnSinglePage: true,
showQuickJumper: true,
pageSize,
total,
onChange: onPageChange,
}}
rowKey="id"
onRow={record => ({
onClick: () => onClickRow(record.id),
})}
loading={loading}
/>
);
},
);
UserTable.propTypes = {
loading: PropTypes.bool,
users: PropTypes.arrayOf(PropTypes.object),
currentPage: PropTypes.number,
pageSize: PropTypes.number,
total: PropTypes.number,
onEdit: PropTypes.func,
onDelete: PropTypes.func,
onClickRow: PropTypes.func,
onPageChange: PropTypes.func,
};
UserTable.defaultProps = {
users: [],
onEdit: () => {},
onDelete: () => {},
onClickRow: () => {},
onPageChange: () => {},
};
export default UserTable;
|
class CustomDateTime:
def __init__(self, year, month, day, hour, minute, second):
self.year = year
self.month = month
self.day = day
self.hour = hour
self.minute = minute
self.second = second
def to_isoformat(self):
return f"{self.year:04d}-{self.month:02d}-{self.day:02d}T{self.hour:02d}:{self.minute:02d}:{self.second:02d}"
def to_string_repr(self):
return f"{self.year:04d}-{self.month:02d}-{self.day:02d} {self.hour:02d}:{self.minute:02d}:{self.second:02d}"
# Test the CustomDateTime class
dt = CustomDateTime(2022, 10, 15, 12, 30, 45)
isoformat = "2022-10-15T12:30:45"
string_repr = "2022-10-15 12:30:45"
assert dt.to_isoformat() == isoformat
assert dt.to_string_repr() == string_repr |
import React from "react"
import FindingCard from "./FindingCard"
import brim from "../../brim"
import loginTo from "../../test/helpers/loginTo"
import provide from "../../test/helpers/provide"
import {Finding} from "src/js/state/Investigation/types"
let store
beforeEach(async () => {
const setup = await loginTo("workspace1", "space1")
store = setup.store
})
function getActionTypes() {
return store.getActions().map((a) => a.type)
}
const finding: Finding = {
ts: brim.time().toTs(),
search: {
program: "finding card test",
pins: [],
spanArgs: [brim.time().toTs(), brim.time().toTs()],
spaceId: "1",
spaceName: "space1",
target: "events"
}
}
test("Clicking the history submits the search", () => {
const el = provide(
store,
<FindingCard spaceId="1" workspaceId="1" finding={finding} />
)
store.clearActions()
el.simulate("click")
expect(getActionTypes()).toContain("SEARCH_BAR_SUBMIT")
})
|
#! /bin/sh
#
# Copyright (c) 2016 Nat! - Mulle kybernetiK
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of Mulle kybernetiK nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
PATH="/usr/local/libexec/mulle-bootstrap:$PATH"
export PATH
[ -z "${MULLE_BOOTSTRAP_MINGW_SH}" ] && . mulle-bootstrap-mingw.sh
PATH="`mingw_buildpath "${PATH}"`" mingw32-make.exe "$@"
|
<gh_stars>10-100
package infoblox
import "fmt"
// https://192.168.2.200/wapidoc/objects/record.host.html
func (c *Client) RecordHost() *Resource {
return &Resource{
conn: c,
wapiObject: "record:host",
}
}
type RecordHostObject struct {
Object
Comment string `json:"comment,omitempty"`
ConfigureForDNS bool `json:"configure_for_dns,omitempty"`
Ipv4Addrs []HostIpv4Addr `json:"ipv4addrs,omitempty"`
Ipv6Addrs []HostIpv6Addr `json:"ipv6addrs,omitempty"`
Name string `json:"name,omitempty"`
Ttl int `json:"ttl,omitempty"`
View string `json:"view,omitempty"`
}
type HostIpv4Addr struct {
Object `json:"-"`
ConfigureForDHCP bool `json:"configure_for_dhcp,omitempty"`
Host string `json:"host,omitempty"`
Ipv4Addr string `json:"ipv4addr,omitempty"`
MAC string `json:"mac,omitempty"`
}
type HostIpv6Addr struct {
Object `json:"-"`
ConfigureForDHCP bool `json:"configure_for_dhcp,omitempty"`
Host string `json:"host,omitempty"`
Ipv6Addr string `json:"ipv6addr,omitempty"`
MAC string `json:"mac,omitempty"`
}
func (c *Client) RecordHostObject(ref string) *RecordHostObject {
host := RecordHostObject{}
host.Object = Object{
Ref: ref,
r: c.RecordHost(),
}
return &host
}
func (c *Client) GetRecordHost(ref string, opts *Options) (*RecordHostObject, error) {
resp, err := c.RecordHostObject(ref).get(opts)
if err != nil {
return nil, fmt.Errorf("Could not get created host record: %s", err)
}
var out RecordHostObject
err = resp.Parse(&out)
if err != nil {
return nil, err
}
return &out, nil
}
func (c *Client) FindRecordHost(name string) ([]RecordHostObject, error) {
field := "name"
conditions := []Condition{Condition{Field: &field, Value: name}}
resp, err := c.RecordHost().find(conditions, nil)
if err != nil {
return nil, err
}
var out []RecordHostObject
err = resp.Parse(&out)
if err != nil {
return nil, err
}
return out, nil
}
|
package com.zlikun.open.api;
import com.zlikun.open.configure.AppProperties;
import lombok.extern.slf4j.Slf4j;
import okhttp3.FormBody;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
import java.io.IOException;
import static org.junit.Assert.assertTrue;
/**
* http://www.oschina.net/openapi/docs/blog_pub
* @author zlikun <<EMAIL>>
* @date 2017/11/17 21:59
*/
@Slf4j
@RunWith(SpringRunner.class)
@SpringBootTest
public class BlogPubTest {
@Autowired
AppProperties properties ;
@Autowired
OkHttpClient client ;
@Value("${http.header.user_agent}")
String userAgent ;
@Test
public void request() throws IOException {
// 构造请求参数
FormBody body = new FormBody.Builder()
.add("access_token" ,properties.getAccessToken())
.add("dataType" ,"json")
.add("title" ,"测试文章-标题")
.add("content" ,"测试文章-正文")
// 保存到草稿 是:1 否:0
.add("save_as_draft" ,"0")
// 博客分类
.add("catalog" ,"zlikun")
// 博客摘要
.add("abstracts" ,"测试文章-摘要")
// 博客标签,用逗号隔开
.add("tags" ,"xxx,yyy,zzz")
// 系统博客分类
.add("classification" ,"1575308")
// 原创:1、转载:4
.add("type" ,"1")
// 转载的原文链接
.add("origin_url" ,"https://zlikun.com/")
// 公开:0、私有:1
.add("privacy" ,"1")
// 允许评论:0、禁止评论:1
.add("deny_comment" ,"0")
// 自动生成目录:0、不自动生成目录:1
.add("auto_content" ,"1")
// 非置顶:0、置顶:1
.add("as_top" ,"1")
.build() ;
// 构造请求
Request request = new Request.Builder()
.url("https://www.oschina.net/action/openapi/blog_pub")
.addHeader("User-Agent" ,userAgent)
.post(body)
.build() ;
Response response = client.newCall(request).execute() ;
log.info("user response code = {} ,message = {}" ,response.code() ,response.message());
assertTrue(response.isSuccessful()) ;
// {"error_description":"操作成功完成","error":"200"}
log.info("/--------\n{}\n-----------------------------------------/" ,response.body().string());
}
}
|
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${FRAMEWORKS_FOLDER_PATH+x} ]; then
# If FRAMEWORKS_FOLDER_PATH is not set, then there's nowhere for us to copy
# frameworks to, so exit 0 (signalling the script phase was successful).
exit 0
fi
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
COCOAPODS_PARALLEL_CODE_SIGN="${COCOAPODS_PARALLEL_CODE_SIGN:-false}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# Used as a return value for each invocation of `strip_invalid_archs` function.
STRIP_BINARY_RETVAL=0
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
# Copies and strips a vendored framework
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
elif [ -L "${binary}" ]; then
echo "Destination binary is symlinked..."
dirname="$(dirname "${binary}")"
binary="${dirname}/$(readlink "${binary}")"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u)
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies and strips a vendored dSYM
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
# Copy the dSYM into a the targets temp dir.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DERIVED_FILES_DIR}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DERIVED_FILES_DIR}"
local basename
basename="$(basename -s .framework.dSYM "$source")"
binary="${DERIVED_FILES_DIR}/${basename}.framework.dSYM/Contents/Resources/DWARF/${basename}"
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"Mach-O "*"dSYM companion"* ]]; then
strip_invalid_archs "$binary"
fi
if [[ $STRIP_BINARY_RETVAL == 1 ]]; then
# Move the stripped file into its final destination.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${DERIVED_FILES_DIR}/${basename}.framework.dSYM\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${DERIVED_FILES_DIR}/${basename}.framework.dSYM" "${DWARF_DSYM_FOLDER_PATH}"
else
# The dSYM was not stripped at all, in this case touch a fake folder so the input/output paths from Xcode do not reexecute this script because the file is missing.
touch "${DWARF_DSYM_FOLDER_PATH}/${basename}.framework.dSYM"
fi
fi
}
# Copies the bcsymbolmap files of a vendored framework
install_bcsymbolmap() {
local bcsymbolmap_path="$1"
local destination="${BUILT_PRODUCTS_DIR}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${bcsymbolmap_path}" "${destination}"
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY:-}" -a "${CODE_SIGNING_REQUIRED:-}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identity
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS:-} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current target binary
binary_archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | awk '{$1=$1;print}' | rev)"
# Intersect them with the architectures we are building for
intersected_archs="$(echo ${ARCHS[@]} ${binary_archs[@]} | tr ' ' '\n' | sort | uniq -d)"
# If there are no archs supported by this binary then warn the user
if [[ -z "$intersected_archs" ]]; then
echo "warning: [CP] Vendored binary '$binary' contains architectures ($binary_archs) none of which match the current build architectures ($ARCHS)."
STRIP_BINARY_RETVAL=0
return
fi
stripped=""
for arch in $binary_archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary"
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
STRIP_BINARY_RETVAL=1
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/MIRToolTip/MIRToolTip.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/MIRToolTip/MIRToolTip.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.