text stringlengths 1 1.05M |
|---|
/**
* @author ooooo
* @date 2021/5/22 11:37
*/
#ifndef CPP_1442__SOLUTION2_H_
#define CPP_1442__SOLUTION2_H_
#include <iostream>
#include <vector>
using namespace std;
class Solution {
public:
int countTriplets(vector<int> &arr) {
int n = arr.size();
vector<int> pre(n + 1);
for (int i = 0; i < n; i++) {
pre[i + 1] = pre[i] ^ arr[i];
}
// pre[j] ^ pre[i] = pre[k+1] ^ pre[j]
int ans = 0;
for (int i = 0; i < n; i++) {
for (int k = i + 1; k < n; k++) {
if (pre[i] == pre[k + 1]) {
ans += (k - i);
}
}
}
return ans;
}
};
#endif //CPP_1442__SOLUTION2_H_
|
def linkedlist_to_array(linked_list):
arr = []
node = linked_list
while node is not None:
arr.append(node.val)
node = node.next
return arr |
#!/bin/bash
CODE_PATH=$HOME/git
OUT_PATH_BASE="/storage/groups/ml01/workspace/${USER}/ncem"
GS_PATH="${OUT_PATH_BASE}/grid_searches/"
DATA_PATH="/storage/groups/ml01/workspace/${USER}/ncem/data"
SBATCH_P="gpu_p"
SBATCH_QOS="gpu"
SBATCH_GRES="gpu:1"
SBATCH_TIME="2-00:00:00"
SBATCH_MEM="50G"
SBATCH_C="4"
SBATCH_NICE="1000"
SBATCH_NODELIST="supergpu02pxe"
MODEL_CLASS="INTERACTIONS"
DATA_SET="10xvisium"
OPTIMIZER="ADAM"
DOMAIN_TYPE="PATIENT"
LR_KEYS=("1")
L1_KEY=("1")
L2_KEYS=("1")
BATCH_SIZE=("S")
N_RINGS_KEYS=("0")
N_EVAL_KEYS=("10")
GS_KEY="$(date '+%y%m%d')c_${MODEL_CLASS}_${DOMAIN_TYPE}_${DATA_SET}"
OUT_PATH=${GS_PATH}/${GS_KEY}
# dummy values for this model class have hard-encoded default values in this grid search
rm -rf ${OUT_PATH}/jobs
rm -rf ${OUT_PATH}/logs
rm -rf ${OUT_PATH}/results
mkdir -p ${OUT_PATH}/jobs
mkdir -p ${OUT_PATH}/logs
mkdir -p ${OUT_PATH}/results
for rd in ${N_RINGS_KEYS[@]}; do
for l1 in ${L1_KEY[@]}; do
for bs in ${BATCH_SIZE[@]}; do
sleep 0.1
job_file="${OUT_PATH}/jobs/run_${MODEL_CLASS}_${DATA_SET}_${OPTIMIZER}_${LR_KEYS}_${l1}_${L2_KEYS}_${bs}_${rd}_${N_EVAL_KEYS}_${GS_KEY}.cmd"
echo "#!/bin/bash
#SBATCH -J ${MODEL_CLASS}_${DATA_SET}_${OPTIMIZER}_${LR_KEYS}_${l1}_${L2_KEYS}_${bs}_${rd}_${N_EVAL_KEYS}_${GS_KEY}
#SBATCH -o ${OUT_PATH}/jobs/run_${MODEL_CLASS}_${DATA_SET}_${OPTIMIZER}_${LR_KEYS}_${l1}_${L2_KEYS}_${bs}_${rd}_${N_EVAL_KEYS}_${GS_KEY}.out
#SBATCH -e ${OUT_PATH}/jobs/run_${MODEL_CLASS}_${DATA_SET}_${OPTIMIZER}_${LR_KEYS}_${l1}_${L2_KEYS}_${bs}_${rd}_${N_EVAL_KEYS}_${GS_KEY}.err
#SBATCH -p gpu_p
#SBATCH --qos=gpu
#SBATCH --gres=gpu:1
#SBATCH -t 2-00:00:00
#SBATCH --mem=50G
#SBATCH -c 4
#SBATCH --nice=1000
#SBATCH --nodelist=supergpu05
source "$HOME"/.bashrc
conda activate ncem
python3 ${CODE_PATH}/ncem_benchmarks/scripts/train_script_linear.py ${DATA_SET} ${OPTIMIZER} ${DOMAIN_TYPE} ${LR_KEYS} ${l1} ${L2_KEYS} ${bs} ${rd} ${N_EVAL_KEYS} ${MODEL_CLASS} ${GS_KEY} ${DATA_PATH} ${OUT_PATH}
" > ${job_file}
sbatch $job_file
done
done
done |
def delete_element(array, element)
array.delete(element)
return array
end
delete_element([3, 5, 12, 6, 9], 5) |
<reponame>hchimachi/sites<filename>wordpress/wp-content/plugins/pdf-builder-for-wpforms/js/lib/velocityAsync/velocityAsync.ts<gh_stars>0
(jQuery.fn as any).velocityAsync=function(property:any, duration,easing:'easeInExp'|'easeOutExp'|'linear'){
let $element=this;
return new Promise((resolve => {
$element.velocity(property,duration,easing,resolve);
}));
}; |
import { ComponentFixture, TestBed, waitForAsync } from '@angular/core/testing';
import { FormItemRadioComponent } from './form-item-radio.component';
import { MatInputModule } from '@angular/material/input';
import { MatListModule } from '@angular/material/list';
import { NoopAnimationsModule } from '@angular/platform-browser/animations';
import { FormsModule } from '@angular/forms';
import { testCreditClaimForm } from 'app/modules/qa/testing';
import * as _ from 'lodash';
describe('FormItemRadioComponent', () => {
let component: FormItemRadioComponent;
let fixture: ComponentFixture<FormItemRadioComponent>;
let item=_.cloneDeep(testCreditClaimForm.sections[3]);
beforeEach(waitForAsync(() => {
TestBed.configureTestingModule({
imports: [
MatInputModule,
NoopAnimationsModule,
MatListModule,
FormsModule
],
declarations: [ FormItemRadioComponent ],
providers: [
],
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(FormItemRadioComponent);
component = fixture.componentInstance;
item=_.cloneDeep(testCreditClaimForm.sections[3]);
component.item=item;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
it('should render '+item.items.length+' list items', () => {
const compiled = fixture.debugElement.nativeElement;
const options = [...compiled.querySelector('mat-selection-list').children];
expect(options.length).toEqual(item.items.length);
options.forEach(option=>{
expect(option.querySelector('.mat-list-text').textContent.trim()).toEqual(item.items[options.indexOf(option)].title);
})
});
it('should render active list option in accordance field value', () => {
const compiled = fixture.debugElement.nativeElement;
const options = [...compiled.querySelector('mat-selection-list').children];
expect(options.filter(option=>option.getAttribute('aria-selected')==="true").length).toEqual(0);
item.value=item.items[1].optionValue;
fixture.detectChanges();
expect(options[1].getAttribute('aria-selected')).toEqual('true');
expect(options.filter(option=>option.getAttribute('aria-selected')==="true").length).toEqual(1);
});
it('should render active list with MULTIPLE option selected in accordance field value', () => {
const compiled = fixture.debugElement.nativeElement;
const options = [...compiled.querySelector('mat-selection-list').children];
expect(options.filter(option=>option.getAttribute('aria-selected')==="true").length).toEqual(0);
item.multiple=true;
item.value=[item.items[1].optionValue, item.items[3].optionValue];
fixture.detectChanges();
expect(options[1].getAttribute('aria-selected')).toEqual('true');
expect(options[3].getAttribute('aria-selected')).toEqual('true');
expect(options.filter(option=>option.getAttribute('aria-selected')==="true").length).toEqual(2);
});
it('should render 2 error messages', waitForAsync(() => {
const compiled = fixture.debugElement.nativeElement;
const errors=[
{message: 'First Error Message'},
{message: 'Second Error Message'},
];
expect(compiled.querySelector('mat-error')).toEqual(null);
item.errors=errors;
fixture.detectChanges();
errors.forEach(err=>{
expect(compiled.querySelector('mat-error').children[errors.indexOf(err)].textContent).toEqual(err.message);
});
}));
it('should change the field value by click on list item', () => {
const compiled = fixture.debugElement.nativeElement;
const options = [...compiled.querySelector('mat-selection-list').children];
expect(options.filter(option=>option.getAttribute('aria-selected')==="true").length).toEqual(0);
options[0].click();
fixture.detectChanges();
expect(item.value).toEqual(item.items[0].optionValue);
options[1].click();
fixture.detectChanges();
expect(item.value).toEqual(item.items[1].optionValue);
});
it('should change the field value by click on list items in MULTIPLE options mode', () => {
const compiled = fixture.debugElement.nativeElement;
const options = [...compiled.querySelector('mat-selection-list').children];
expect(options.filter(option=>option.getAttribute('aria-selected')==="true").length).toEqual(0);
item.multiple=true;
options[0].click();
fixture.detectChanges();
expect(item.value).toEqual([item.items[0].optionValue]);
options[2].click();
fixture.detectChanges();
expect(item.value).toEqual([item.items[0].optionValue, item.items[2].optionValue]);
options[0].click();
fixture.detectChanges();
expect(item.value).toEqual([item.items[2].optionValue]);
options[1].click();
fixture.detectChanges();
expect(item.value).toEqual([item.items[2].optionValue, item.items[1].optionValue]);
options[3].click();
fixture.detectChanges();
expect(item.value).toEqual([item.items[2].optionValue, item.items[1].optionValue, item.items[3].optionValue]);
});
});
|
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.25.0
// protoc v3.14.0
// source: session.proto
// protoc --go_out=. *.proto
package proto
import (
proto "github.com/golang/protobuf/proto"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
timestamppb "google.golang.org/protobuf/types/known/timestamppb"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// This is a compile-time assertion that a sufficiently up-to-date version
// of the legacy proto package is being used.
const _ = proto.ProtoPackageIsVersion4
type Data struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
UserID uint64 `protobuf:"varint,1,opt,name=UserID,proto3" json:"UserID,omitempty"`
UserName string `protobuf:"bytes,2,opt,name=UserName,proto3" json:"UserName,omitempty"`
ExpirationTokenTime *timestamppb.Timestamp `protobuf:"bytes,3,opt,name=ExpirationTokenTime,proto3" json:"ExpirationTokenTime,omitempty"`
}
func (x *Data) Reset() {
*x = Data{}
if protoimpl.UnsafeEnabled {
mi := &file_session_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Data) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Data) ProtoMessage() {}
func (x *Data) ProtoReflect() protoreflect.Message {
mi := &file_session_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Data.ProtoReflect.Descriptor instead.
func (*Data) Descriptor() ([]byte, []int) {
return file_session_proto_rawDescGZIP(), []int{0}
}
func (x *Data) GetUserID() uint64 {
if x != nil {
return x.UserID
}
return 0
}
func (x *Data) GetUserName() string {
if x != nil {
return x.UserName
}
return ""
}
func (x *Data) GetExpirationTokenTime() *timestamppb.Timestamp {
if x != nil {
return x.ExpirationTokenTime
}
return nil
}
type Session struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
ID uint64 `protobuf:"varint,1,opt,name=ID,proto3" json:"ID,omitempty"`
UserID uint64 `protobuf:"varint,2,opt,name=UserID,proto3" json:"UserID,omitempty"`
Token string `protobuf:"bytes,3,opt,name=Token,proto3" json:"Token,omitempty"`
User *User `protobuf:"bytes,4,opt,name=User,proto3" json:"User,omitempty"`
Data *Data `protobuf:"bytes,5,opt,name=Data,proto3" json:"Data,omitempty"`
}
func (x *Session) Reset() {
*x = Session{}
if protoimpl.UnsafeEnabled {
mi := &file_session_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *Session) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*Session) ProtoMessage() {}
func (x *Session) ProtoReflect() protoreflect.Message {
mi := &file_session_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use Session.ProtoReflect.Descriptor instead.
func (*Session) Descriptor() ([]byte, []int) {
return file_session_proto_rawDescGZIP(), []int{1}
}
func (x *Session) GetID() uint64 {
if x != nil {
return x.ID
}
return 0
}
func (x *Session) GetUserID() uint64 {
if x != nil {
return x.UserID
}
return 0
}
func (x *Session) GetToken() string {
if x != nil {
return x.Token
}
return ""
}
func (x *Session) GetUser() *User {
if x != nil {
return x.User
}
return nil
}
func (x *Session) GetData() *Data {
if x != nil {
return x.Data
}
return nil
}
var File_session_proto protoreflect.FileDescriptor
var file_session_proto_rawDesc = []byte{
0x0a, 0x0d, 0x73, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12,
0x05, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d,
0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x0a, 0x75, 0x73, 0x65, 0x72, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x22, 0x88, 0x01, 0x0a, 0x04, 0x44, 0x61, 0x74, 0x61, 0x12, 0x16, 0x0a, 0x06,
0x55, 0x73, 0x65, 0x72, 0x49, 0x44, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x06, 0x55, 0x73,
0x65, 0x72, 0x49, 0x44, 0x12, 0x1a, 0x0a, 0x08, 0x55, 0x73, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65,
0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x55, 0x73, 0x65, 0x72, 0x4e, 0x61, 0x6d, 0x65,
0x12, 0x4c, 0x0a, 0x13, 0x45, 0x78, 0x70, 0x69, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x6f,
0x6b, 0x65, 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e,
0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e,
0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x13, 0x45, 0x78, 0x70, 0x69, 0x72,
0x61, 0x74, 0x69, 0x6f, 0x6e, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x54, 0x69, 0x6d, 0x65, 0x22, 0x89,
0x01, 0x0a, 0x07, 0x53, 0x65, 0x73, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x0e, 0x0a, 0x02, 0x49, 0x44,
0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x02, 0x49, 0x44, 0x12, 0x16, 0x0a, 0x06, 0x55, 0x73,
0x65, 0x72, 0x49, 0x44, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52, 0x06, 0x55, 0x73, 0x65, 0x72,
0x49, 0x44, 0x12, 0x14, 0x0a, 0x05, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28,
0x09, 0x52, 0x05, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12, 0x1f, 0x0a, 0x04, 0x55, 0x73, 0x65, 0x72,
0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x55,
0x73, 0x65, 0x72, 0x52, 0x04, 0x55, 0x73, 0x65, 0x72, 0x12, 0x1f, 0x0a, 0x04, 0x44, 0x61, 0x74,
0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e,
0x44, 0x61, 0x74, 0x61, 0x52, 0x04, 0x44, 0x61, 0x74, 0x61, 0x42, 0x09, 0x5a, 0x07, 0x2e, 0x3b,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_session_proto_rawDescOnce sync.Once
file_session_proto_rawDescData = file_session_proto_rawDesc
)
func file_session_proto_rawDescGZIP() []byte {
file_session_proto_rawDescOnce.Do(func() {
file_session_proto_rawDescData = protoimpl.X.CompressGZIP(file_session_proto_rawDescData)
})
return file_session_proto_rawDescData
}
var file_session_proto_msgTypes = make([]protoimpl.MessageInfo, 2)
var file_session_proto_goTypes = []interface{}{
(*Data)(nil), // 0: proto.Data
(*Session)(nil), // 1: proto.Session
(*timestamppb.Timestamp)(nil), // 2: google.protobuf.Timestamp
(*User)(nil), // 3: proto.User
}
var file_session_proto_depIdxs = []int32{
2, // 0: proto.Data.ExpirationTokenTime:type_name -> google.protobuf.Timestamp
3, // 1: proto.Session.User:type_name -> proto.User
0, // 2: proto.Session.Data:type_name -> proto.Data
3, // [3:3] is the sub-list for method output_type
3, // [3:3] is the sub-list for method input_type
3, // [3:3] is the sub-list for extension type_name
3, // [3:3] is the sub-list for extension extendee
0, // [0:3] is the sub-list for field type_name
}
func init() { file_session_proto_init() }
func file_session_proto_init() {
if File_session_proto != nil {
return
}
file_user_proto_init()
if !protoimpl.UnsafeEnabled {
file_session_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Data); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_session_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*Session); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_session_proto_rawDesc,
NumEnums: 0,
NumMessages: 2,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_session_proto_goTypes,
DependencyIndexes: file_session_proto_depIdxs,
MessageInfos: file_session_proto_msgTypes,
}.Build()
File_session_proto = out.File
file_session_proto_rawDesc = nil
file_session_proto_goTypes = nil
file_session_proto_depIdxs = nil
}
|
#!/usr/bin/env bash
# @since 2019-04-16 04:01
# @author vivaxy
npx tsc
|
<gh_stars>0
import { NgModule } from '@angular/core';
import { BrowserModule } from '@angular/platform-browser';
import { BrowserAnimationsModule } from '@angular/platform-browser/animations';
import { AppComponent, AppComponentModule } from './app-component';
import {
getChunkStrategyCredentialsMap,
getConcurrentSchedulerStrategyCredentialsMap, getFreStrategyCredentialsMap,
PriorityNameToLevel,
RX_CUSTOM_STRATEGIES,
RX_PRIMARY_STRATEGY,
} from './rx-angular-pocs';
import { ENVIRONMENT_SETTINGS } from './shared/environment.token';
import { environment } from '../environments/environment';
import { HttpClientModule } from '@angular/common/http';
import { HomeComponent } from './features/home/home.component';
import { tap } from 'rxjs/operators';
import { observeOnPriority } from './rx-angular-pocs/cdk/render-strategies/scheduling/operators';
import { concurrent } from './rx-angular-pocs/cdk/render-strategies/scheduling/scheduler/react-concurrent-scheduler/concurrent';
@NgModule({
imports: [
BrowserModule,
BrowserAnimationsModule,
HttpClientModule,
AppComponentModule,
],
providers: [
{
provide: ENVIRONMENT_SETTINGS,
useValue: environment,
},
{
provide: RX_CUSTOM_STRATEGIES,
useValue: {
...getConcurrentSchedulerStrategyCredentialsMap(),
...getChunkStrategyCredentialsMap(),
...getFreStrategyCredentialsMap(),
test: {
name: 'test',
work: (cdRef) => {
cdRef.detectChanges();
},
behavior: (work: any, context: any) => {
return (o$) =>
o$.pipe(
observeOnPriority(concurrent(PriorityNameToLevel.low)),
tap(work)
);
},
},
},
multi: true,
},
{
provide: RX_PRIMARY_STRATEGY,
useValue: 'normal',
},
],
declarations: [HomeComponent],
exports: [],
bootstrap: [AppComponent],
})
export class AppModule {}
|
<gh_stars>0
package it.madlabs.patternrec.web.rest.controllers.common;
import it.madlabs.patternrec.web.rest.controllers.common.ApiException;
import org.springframework.http.HttpStatus;
import org.springframework.web.bind.annotation.ResponseStatus;
@ResponseStatus(value = HttpStatus.NOT_FOUND)
public class NotFoundException extends ApiException {
public NotFoundException (String msg) {
super(404, msg);
}
}
|
x = 5
y = 8
sum = x + y
puts "The sum of x and y is #{sum}" |
#!/usr/bin/env bash
set -o nounset -o errexit -o pipefail
cat - <<EOF
<h1>ICFP Contest 2021</h1>
<p>
ICFP Contest 2021 took place 12:00 PM Friday 9 July - 12:00 PM Monday 12 July UTC.
</p>
<img style="width: 80%; max-width: 200px" alt="ICFP Contest 2021" src="images/logo.svg"><br>
EOF
for i in $(ls -r updates); do
TITLE="$(head -n1 "updates/$i")"
FORMAT="+%a, %d %B %H:%M %Z"
DATE="$(date --date="$(tail +2 "updates/$i" | head -n1)" -u "$FORMAT")"
SLUG="$(echo "$i" | sed 's/\..*//')"
cat - <<EOF
<section>
<h2 id="$SLUG">$TITLE</h2>
<strong>$DATE</strong>
$(tail +3 "updates/$i")
</section>
EOF
done
|
export { default } from 'ember-medium-editor/components/me-image-dragging';
|
#!/usr/bin/env bash
# Copyright 2017 DigitalOcean
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o pipefail
set -o nounset
readonly KOPS_VERSION=v1.10.0
declare -rx KOPS_FEATURE_FLAGS=AlphaAllowDO
readonly REQUIRED_ENV_VARS='KOPS_STATE_STORE S3_ENDPOINT S3_ACCESS_KEY_ID S3_SECRET_ACCESS_KEY KOPS_CLUSTER_NAME'
# check_envs verifies that all required environment variables are set.
check_envs() {
for REQ_ENV_VAR in ${REQUIRED_ENV_VARS}; do
check_env "${REQ_ENV_VAR}"
done
}
# check_env verifies that the given environment variable is set.
check_env() {
declare -r _env="$1"
if [[ -z "${!_env:-}" ]]; then
echo "environment variable ${_env} must be set" >&2
exit 1
fi
}
ensure_deps() {
ensure_kubectl
ensure_kops
}
# ensure_kubectl makes sure kubectl is installed and, if not, installs it when
# running on the CI.
ensure_kubectl() {
if ! type kubectl > /dev/null 2>&1; then
if [[ -z ${CI:-} ]]; then
echo "please install missing dependency: kubectl" >&2
return 1
fi
echo "==> installing kubectl"
curl --fail --location --remote-name "https://storage.googleapis.com/kubernetes-release/release/$(curl --fail --silent --show-error https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/amd64/kubectl"
chmod +x ./kubectl
sudo mv ./kubectl /usr/local/bin/kubectl
fi
}
# ensure_kops makes sure kops is installed and, if not, installs a specific
# version when running on the CI.
ensure_kops() {
if ! type kops > /dev/null 2>&1; then
if [[ -z ${CI:-} ]]; then
echo "please install missing dependency: kops" >&2
return 1
fi
echo "==> installing kops"
(
# kops' Makefile leverages the CI variable; unset it to make sure we
# build a production release.
unset CI
go get -u k8s.io/kops
cd "${GOPATH}/src/k8s.io/kops"
git checkout "${KOPS_VERSION}"
sed -i 's#image: digitalocean/digitalocean-cloud-controller-manager:.*#image: digitalocean/digitalocean-cloud-controller-manager:v0.1.8#' upup/models/cloudup/resources/addons/digitalocean-cloud-controller.addons.k8s.io/k8s-1.8.yaml.template
make kops
chmod u+x .build/local/kops
sudo cp .build/local/kops /usr/local/bin/
)
fi
}
|
#!/bin/bash
cd /backup # where the backups are located
echo -e "\n---------------------------------------------------------------------------------\n"
echo -e "What Backup you want to unzip ?\n"
files=$(ls *.tar.gz)
i=1
for j in $files
do
echo "$i.$j"
file[i]=$j
i=$(( i + 1 ))
done
echo -e "\n---------------------------------------------------------------------------------\n"
echo -e "Enter number\n"
read input
re='^[0-9]+$'
while :; do
if ! [[ $input =~ $re ]] ; then
if [ "$input" == "l" ] || [ "$input" == "exit" ] || [ "$input" == "q" ]; then
echo "This script will exit now."
exit 1
else
echo "Error: Not a number"
echo -e "Enter number"
read input
fi
else
break
fi
done
read -r -p "Please insert 'YES' to allow the installation. Otherwise this script will exit here: " ANSWER
if [ "$ANSWER" == "YES" ] || [ "$ANSWER" == "y" ]; then
tar xf ${file[$input]}
echo "The backup get's unzipped..."
else
echo "Your answer was \"$ANSWER\" and not YES. So this script will exit now."
exit 1
fi
|
const { ApolloClient } = require('apollo-client');
const { createHttpLink } = require('apollo-link-http');
const { InMemoryCache } = require('apollo-cache-inmemory');
// Initialize Apollo Client
const client = new ApolloClient({
link: createHttpLink({
uri: 'http://localhost:4000/graphql',
}),
cache: new InMemoryCache(),
});
// Execute your query
client
.query({
query: gql`
query {
Customer(id: 1) {
name
email
}
}
`,
}).then(console.log); |
#ifndef _INTERPOLATION_CUDA_KERNEL
#define _INTERPOLATION_CUDA_KERNEL
#include <torch/serialize/tensor.h>
#include <vector>
#include <ATen/cuda/CUDAContext.h>
void nearestneighbor_cuda(int b, int n, int m, at::Tensor unknown_tensor, at::Tensor known_tensor, at::Tensor dist2_tensor, at::Tensor idx_tensor);
void interpolation_forward_cuda(int b, int c, int m, int n, at::Tensor points_tensor, at::Tensor idx_tensor, at::Tensor weight_tensor, at::Tensor out_tensor);
void interpolation_backward_cuda(int b, int c, int n, int m, at::Tensor grad_out_tensor, at::Tensor idx_tensor, at::Tensor weight_tensor, at::Tensor grad_points_tensor);
void nearestneighbor_cuda_fast(int b, int n, int m, at::Tensor unknown_tensor, at::Tensor known_tensor, at::Tensor dist2_tensor, at::Tensor idx_tensor);
void interpolation_forward_cuda_fast(int b, int c, int m, int n, at::Tensor points_tensor, at::Tensor idx_tensor, at::Tensor weight_tensor, at::Tensor out_tensor);
#ifdef __cplusplus
extern "C" {
#endif
void nearestneighbor_cuda_launcher(int b, int n, int m, const float *unknown, const float *known, float *dist2, int *idx);
void interpolation_forward_cuda_launcher(int b, int c, int m, int n, const float *points, const int *idx, const float *weight, float *out);
void interpolation_backward_cuda_launcher(int b, int c, int n, int m, const float *grad_out, const int *idx, const float *weight, float *grad_points);
void nearestneighbor_cuda_launcher_fast(int b, int n, int m, const float *unknown, const float *known, float *dist2, int *idx);
void interpolation_forward_cuda_launcher_fast(int b, int c, int m, int n, const float *points, const int *idx, const float *weight, float *out);
#ifdef __cplusplus
}
#endif
#endif
|
builddir="/musl-cross-build"
outputdir="/musl-cross"
decho () {
echo $(TZ='America/Toronto' date "+%Y-%m-%d %H:%M:%S") $@
}
secho () {
if [ $? -eq 0 ]; then
decho "--success!"
else
decho "--failure!"
fi
}
# build cross compiler for given target ($1) and gcc_options ($2)
build_cross () {
cd $builddir/musl-cross-make-0.9.8/
mkdir -p logs
echo "# $1 soft float setup" > config.mak
echo "TARGET = $1" >> config.mak
echo "OUTPUT = $outputdir" >> config.mak
echo "STAT = -static --static" >> config.mak
echo "FLAG = -g0 -O2 -fno-align-functions -fno-align-jumps -fno-align-loops -fno-align-labels" >> config.mak
echo "COMMON_CONFIG += --disable-nls CFLAGS=\"\${FLAG}\" CXXFLAGS=\"\${FLAG}\" FFLAGS=\"\${FLAG}\" LDFLAGS=\"-s \${STAT}\"" >> config.mak
echo "GCC_CONFIG += --enable-languages=c,c++ --disable-libquadmath --disable-decimal-float --disable-multilib $2" >> config.mak
decho BUILDING TARGET $1 WITH config.mak:
cat config.mak
decho MAKE
make > /dev/null #logs/$1-build.log 2>&1
secho
decho INSTALL
make install > /dev/null #logs/$1-install.log 2>&1
secho
decho CLEAN
make clean >> /dev/null #logs/$1-install.log 2>&1
secho
decho DONE BUILDING TARGET $1
echo
}
# build a compiler which is just a driver calling another compiler with extra arguments
# args: compiler-name-prefix original-compiler-name-prefix args-before args-after
# prefixes should be given without dash, e.g 'aarch64-linux-musl' to create 'aarch64-linux-musl-gcc'
# creates gcc, g++, cpp
build_driver () {
decho CREATE DRIVER $1 "->" $2
outdir=$outputdir/bin
for cmd in gcc g++ cpp; do
echo '#!/bin/bash' > $outdir/$1-$cmd
echo 'eval $(dirname $0)/'$2-$cmd $3' "$@" '$4 >> $outdir/$1-$cmd
chmod +x $outdir/$1-$cmd
done
}
build_cross i486-linux-musl "--with-arch=i486 --with-cpu=i486 --with-tune=i486"
build_driver i586-linux-musl i486-linux-musl "-march=pentium-mmx -mtune=pentium-mmx -mmmx" " "
build_driver i686-linux-musl i486-linux-musl "-march=pentium3 -mtune=pentium3 -mmmx -msse -mfpmath=sse" " "
build_driver i786-linux-musl i486-linux-musl "-march=pentium4 -mtune=pentium4 -mmmx -msse -msse2 -mfpmath=sse" " "
build_cross arm-linux-musleabi "--with-arch=armv6"
build_cross arm-linux-musleabihf "--with-arch=armv6"
build_driver armv6sf-linux-musl arm-linux-musleabi "-march=armv6k -mfloat-abi=soft "
build_driver armv6hf-linux-musl arm-linux-musleabihf "-march=armv6k -mfpu=vfp -mfloat-abi=hard"
build_driver armv7hf-linux-musl arm-linux-musleabihf "-mthumb -march=armv7 -mfpu=neon -mfloat-abi=hard"
build_cross aarch64-linux-musl "--enable-neon"
build_cross x86_64-linux-musl "--with-fpmath=sse --enable-sse --enable-sse2 --enable-mmx"
|
import React, { useState } from 'react';
import { Platform, StyleSheet, SafeAreaView, View, Text, FlatList, TouchableOpacity, TextInput, KeyboardAvoidingView, Alert, ActivityIndicator } from 'react-native';
import Post from '../components/Post';
import { useRoute } from '@react-navigation/native';
import { RouteProp, useNavigation } from '@react-navigation/core';
import Comment from '../components/Comment';
import Colors from '../constants/Colors';
import { Ionicons } from '@expo/vector-icons';
import { CommentType, PostType } from '../types';
import { useEffect } from 'react';
import { axiosHandler, getData, tokenName, tokenType } from '../helper';
import { COMMENT_URL } from '../urls';
import { StoreStateType } from '../store/types';
import { useDispatch, useSelector } from 'react-redux';
import { updateCommentsList } from '../store/comments/actionCreators';
export default function SinglePostScreen() {
const [commentText, setCommentText] = useState<string>('');
const [error, setError] = useState<string | null>(null);
const [loading, setLoading] = useState<boolean>(false);
const [post, setPost] = useState<PostType | null>(null);
const route: RouteProp<{ params: { postId: string } }, 'params'> = useRoute();
const navigation = useNavigation();
const { posts, comments } = useSelector(mapStateToProps);
const dispatch = useDispatch();
const onSubmitComment = async () => {
if (!commentText || commentText.trim() === '') {
Alert.alert(
'Error',
'The Comment field is empty!',
[{
text: 'Ok',
}]
);
return;
}
setLoading(true);
const tokenString = await getData(tokenName);
if (!tokenString) {
navigation.navigate('Login');
return;
}
const token: tokenType = JSON.parse(tokenString);
const response = await axiosHandler({
url: `${COMMENT_URL}`,
method: 'POST',
data: {
post_id: route.params.postId, comment: commentText
},
token: token.access_token,
})?.catch(e => setError(e.response.data));
if (response) {
setCommentText('');
await getComments();
}
setLoading(false);
}
const onCancel = () => {
navigation.goBack();
}
const getComments = async () => {
const tokenString = await getData(tokenName);
if (!tokenString) {
navigation.navigate('Login');
return;
}
const token: tokenType = JSON.parse(tokenString);
const response = await axiosHandler({
url: `${COMMENT_URL}/${route.params.postId}`,
method: 'GET',
token: token.access_token,
})?.catch(e => setError(e.response.data));
if (response) {
dispatch(updateCommentsList(response.data.results.reverse()));
}
}
useEffect(() => {
dispatch(updateCommentsList([]));
const currentPost: PostType | undefined = posts.find(post => post.id === route.params.postId);
if(!currentPost){
navigation.goBack();
return;
}
setPost(currentPost);
getComments();
}, [posts])
useEffect(() => {
if (error) {
Alert.alert(
'Error',
error,
[{
text: 'Ok',
onPress: () => setError(null)
}]
);
}
}, [error])
if (!post) {
return <Text>Loader</Text>
}
return (
<SafeAreaView style={styles.container}>
<View style={styles.header}>
<TouchableOpacity onPress={onCancel} activeOpacity={0.8}>
<Ionicons color={Colors.light.tabIconSelected} size={30} name='md-chevron-back' />
</TouchableOpacity>
<Text style={styles.headerTitle}>Post</Text>
</View>
<FlatList
ListHeaderComponent={() => <Post post={post} single />}
data={comments}
renderItem={({ item }) => <Comment key={item.id} comment={item} postId={post.id} />}
style={styles.commentSection}
/>
<KeyboardAvoidingView
style={styles.newCommentSection}
behavior={Platform.OS === 'ios' ? 'padding' : undefined}
// keyboardVerticalOffset={Platform.OS === 'ios' ? 40 : 0}
>
<TextInput
style={styles.commentInput}
onChangeText={text => setCommentText(text)}
value={commentText}
placeholder='Write a comment'
/>
<TouchableOpacity onPress={onSubmitComment} activeOpacity={0.8}>
{
loading ? <ActivityIndicator /> :
<Ionicons color={commentText === '' ? Colors.light.tabIconDefault : Colors.light.tabIconSelected} size={35} name='arrow-up-circle' />
}
</TouchableOpacity>
</KeyboardAvoidingView>
</SafeAreaView>
);
}
const mapStateToProps = (state: StoreStateType) => ({
posts: state.posts.posts,
comments: state.comments.comments
});
const styles = StyleSheet.create({
container: {
flex: 1,
alignItems: 'flex-start',
backgroundColor: '#fff',
},
header: {
flexDirection: 'row',
width: '100%',
padding: 10,
alignItems: 'center',
borderBottomWidth: 0.3,
borderColor: '#d0d0d0',
},
headerTitle: {
fontWeight: 'bold',
fontSize: 17,
position: 'absolute',
left: '50%',
transform: [{ translateX: -10 }]
},
commentSection: {
width: '100%',
padding: 10
},
newCommentSection: {
width: '100%',
flexDirection: 'row',
alignItems: 'center',
borderTopWidth: 0.5,
padding: 5,
borderTopColor: '#d0d0d0'
},
commentInput: {
padding: 10,
flex: 1,
backgroundColor: '#ebebeb',
borderRadius: 20,
margin: 5,
}
});
|
package apps;
import org.jooby.Jooby;
import org.jooby.Results;
public class App946 extends Jooby {
{
/**
* Top.
*/
path("/some/path", () -> {
path("/:id", () -> {
/**
* GET.
* @param id Param ID.
*/
get(req -> {
return req.param("id").intValue();
});
/**
* GET foo.
* @param id Param ID.
*/
get("/foo", req -> {
return req.param("id").intValue();
});
});
});
}
}
|
class User < ActiveRecord::Base
validates_presence_of :username, :email, :password
has_many :drawings
has_secure_password
end |
#!/bin/bash
OUTDIR=$TRAVIS_BUILD_DIR/out/$TRAVIS_PULL_REQUEST/$TRAVIS_JOB_NUMBER-$HOST
mkdir -p $OUTDIR/bin
ARCHIVE_CMD="zip"
if [[ $HOST = "i686-w64-mingw32" ]]; then
ARCHIVE_NAME="windows-x86.zip"
elif [[ $HOST = "x86_64-w64-mingw32" ]]; then
ARCHIVE_NAME="windows-x64.zip"
elif [[ $HOST = "arm-linux-gnueabihf" ]]; then
ARCHIVE_NAME="arm-x86.tar.gz"
ARCHIVE_CMD="tar -czf"
elif [[ $HOST = "aarch64-linux-gnu" ]]; then
ARCHIVE_NAME="arm-x64.tar.gz"
ARCHIVE_CMD="tar -czf"
elif [[ $HOST = "x86_64-unknown-linux-gnu" ]]; then
ARCHIVE_NAME="linux-x64.tar.gz"
ARCHIVE_CMD="tar -czf"
elif [[ $HOST = "x86_64-apple-darwin11" ]]; then
ARCHIVE_NAME="osx-x64.zip"
fi
cp $TRAVIS_BUILD_DIR/src/qt/bitmoney-qt $OUTDIR/bin/ || cp $TRAVIS_BUILD_DIR/src/qt/bitmoney-qt.exe $OUTDIR/bin/ || echo "no QT Wallet"
cp $TRAVIS_BUILD_DIR/src/bitmoneyd $OUTDIR/bin/ || cp $TRAVIS_BUILD_DIR/src/bitmoneyd.exe $OUTDIR/bin/
cp $TRAVIS_BUILD_DIR/src/bitmoney-cli $OUTDIR/bin/ || cp $TRAVIS_BUILD_DIR/src/bitmoney-cli.exe $OUTDIR/bin/
strip "$OUTDIR/bin"/* || echo "nothing to strip"
ls -lah $OUTDIR/bin
cd $OUTDIR/bin
ARCHIVE_CMD="$ARCHIVE_CMD $ARCHIVE_NAME *"
eval $ARCHIVE_CMD
mkdir -p $OUTDIR/zip
mv $ARCHIVE_NAME $OUTDIR/zip
sleep $[ ( $RANDOM % 6 ) + 1 ]s
|
<reponame>FelixSeptem/itsrisky<gh_stars>0
package itsrisky
import (
"crypto/sha1"
"reflect"
"strconv"
"testing"
"time"
)
func TestSigner(t *testing.T) {
s := Signer{
SecretKey: GenerateSecretKey(32),
Hash: sha1.New(),
}
var (
str = "something information quite long"
)
signed, err := s.Sign(str)
if err != nil {
t.Fatal(err)
}
unsigned, err := s.Unsign(signed)
if err != nil {
t.Fatal(err)
}
if unsigned != str {
t.Errorf("expect %s got %s", str, unsigned)
}
}
func TestSignerWithTimeout(t *testing.T) {
s := SignerWithTimeout{
SecretKey: GenerateSecretKey(32),
Hash: sha1.New(),
}
var (
str = "something information quite long"
)
signed, err := s.Sign(str, time.Second)
if err != nil {
t.Fatal(err)
}
unsigned, err := s.Unsign(signed)
if err != nil {
t.Fatal(err)
}
if unsigned != str {
t.Errorf("expect %s got %s", str, unsigned)
}
time.Sleep(time.Second * 2)
_, err = s.Unsign(signed)
if _, ok := err.(*ErrDataExpired); !ok {
t.Errorf("expect got expired err got %v", err)
}
}
func TestSerialization(t *testing.T) {
s := Serialization{
SecretKey: GenerateSecretKey(32),
Hash: sha1.New(),
}
s.WithSalt(strconv.FormatInt(time.Now().UnixNano(), 10))
type tokenData struct {
UserId uint64
UserName string
UserAvatar string
UserCreatedAt int64
IsVIP bool
UserLevel []int
}
data := tokenData{
UserId: 12580,
UserName: "UserName",
UserAvatar: "www.fakeuser.org/user/12580.png",
UserCreatedAt: time.Now().Unix(),
IsVIP: false,
UserLevel: []int{
1,
2,
3,
},
}
signed, err := s.Dumps(data, time.Hour*72)
if err != nil {
t.Fatal(err)
}
receiveData := new(tokenData)
err = s.Loads(signed, receiveData)
if err != nil {
t.Fatal(err)
}
if !reflect.DeepEqual(data, *receiveData) {
t.Errorf("expect %v got %v", data, *receiveData)
}
}
func BenchmarkSigner_Sign(b *testing.B) {
b.StopTimer()
s := Signer{
SecretKey: GenerateSecretKey(32),
Hash: sha1.New(),
}
var (
str = "something information quite long"
)
b.StartTimer()
for i := 0; i < b.N; i++ {
_, err := s.Sign(str)
if err != nil {
b.Log(err)
}
}
}
func BenchmarkSigner_Unsign(b *testing.B) {
b.StopTimer()
s := Signer{
SecretKey: GenerateSecretKey(32),
Hash: sha1.New(),
}
var (
str = "something information quite long"
)
signed, err := s.Sign(str)
if err != nil {
b.Fatal(err)
}
b.StartTimer()
for i := 0; i < b.N; i++ {
_, err := s.Unsign(signed)
if err != nil {
b.Log(err)
}
}
}
func BenchmarkSignerWithTimeout_Sign(b *testing.B) {
b.StopTimer()
s := SignerWithTimeout{
SecretKey: GenerateSecretKey(32),
Hash: sha1.New(),
}
var (
str = "something information quite long"
)
b.StartTimer()
for i := 0; i < b.N; i++ {
_, err := s.Sign(str, time.Hour*72)
if err != nil {
b.Fatal(err)
}
}
}
func BenchmarkSignerWithTimeout_Unsign(b *testing.B) {
b.StopTimer()
s := SignerWithTimeout{
SecretKey: GenerateSecretKey(32),
Hash: sha1.New(),
}
var (
str = "something information quite long"
)
signed, err := s.Sign(str, time.Hour)
if err != nil {
b.Fatal(err)
}
b.StartTimer()
for i := 0; i < b.N; i++ {
_, err := s.Unsign(signed)
if err != nil {
b.Log(err)
}
}
}
func BenchmarkSerialization_Dumps(b *testing.B) {
b.StopTimer()
type tokenData struct {
UserId uint64
UserName string
UserAvatar string
UserCreatedAt time.Time
IsVIP bool
UserLevel []int
}
data := tokenData{
UserId: 12580,
UserName: "UserName",
UserAvatar: "www.fakeuser.org/user/12580.png",
UserCreatedAt: time.Now(),
IsVIP: false,
UserLevel: []int{
1,
2,
3,
},
}
s := Serialization{
SecretKey: GenerateSecretKey(32),
Hash: sha1.New(),
}
s.WithSalt(strconv.FormatInt(time.Now().Unix(), 10))
b.StartTimer()
for i := 0; i < b.N; i++ {
_, err := s.Dumps(data, time.Hour*72)
if err != nil {
b.Log(err)
}
}
}
func BenchmarkSerialization_Loads(b *testing.B) {
b.StopTimer()
type tokenData struct {
UserId uint64
UserName string
UserAvatar string
UserCreatedAt time.Time
IsVIP bool
UserLevel []int
}
data := tokenData{
UserId: 12580,
UserName: "UserName",
UserAvatar: "www.fakeuser.org/user/12580.png",
UserCreatedAt: time.Now(),
IsVIP: false,
UserLevel: []int{
1,
2,
3,
},
}
s := Serialization{
SecretKey: GenerateSecretKey(32),
Hash: sha1.New(),
}
s.WithSalt(strconv.FormatInt(time.Now().Unix(), 10))
dumpsData, err := s.Dumps(data, time.Hour*72)
if err != nil {
b.Log(err)
}
receivedData := new(tokenData)
b.StartTimer()
for i := 0; i < b.N; i++ {
err := s.Loads(dumpsData, receivedData)
if err != nil {
b.Log(err)
}
}
}
|
#!/bin/bash
# Variables to set, suit to your installation
cd /root
export PATH=/root/bin:$PATH
export OCP_RELEASE="{{ disconnected_operators_version|default(openshift_version|default(4.7)) }}"
export OCP_PULLSECRET_AUTHFILE='/root/openshift_pull.json'
IP=$(ip -o addr show eth0 |head -1 | awk '{print $4}' | cut -d'/' -f1)
REVERSE_NAME=$(dig -x $IP +short | sed 's/\.[^\.]*$//')
echo $IP | grep -q ':' && SERVER6=$(grep : /etc/resolv.conf | grep -v fe80 | cut -d" " -f2) && REVERSE_NAME=$(dig -6x $IP +short @$SERVER6 | sed 's/\.[^\.]*$//')
REGISTRY_NAME=${REVERSE_NAME:-$(hostname -f)}
export LOCAL_REGISTRY=$REGISTRY_NAME:5000
export LOCAL_REGISTRY_INDEX_TAG=olm-index/redhat-operator-index:v$OCP_RELEASE
export LOCAL_REGISTRY_IMAGE_TAG=olm
# Login registries
podman login -u '{{ disconnected_user if disconnected_user != None else "dummy" }}' -p '{{ disconnected_password if disconnected_password != None else "dummy" }}' $LOCAL_REGISTRY
#podman login registry.redhat.io --authfile /root/openshift_pull.json
REDHAT_CREDS=$(cat /root/openshift_pull.json | jq .auths.\"registry.redhat.io\".auth -r | base64 -d)
RHN_USER=$(echo $REDHAT_CREDS | cut -d: -f1)
RHN_PASSWORD=$(echo $REDHAT_CREDS | cut -d: -f2)
podman login -u "$RHN_USER" -p "$RHN_PASSWORD" registry.redhat.io
which opm >/dev/null 2>&1
if [ "$?" != "0" ] ; then
export REPO="operator-framework/operator-registry"
export VERSION=$(curl -s https://api.github.com/repos/$REPO/releases | grep tag_name | grep -v -- '-rc' | head -1 | awk -F': ' '{print $2}' | sed 's/,//' | xargs)
echo "Using Opm Version $VERSION"
curl -Lk https://github.com/operator-framework/operator-registry/releases/download/$VERSION/linux-amd64-opm > /usr/bin/opm
chmod u+x /usr/bin/opm
fi
# Set these values to true for the catalog and miror to be created
export RH_OP='true'
export RH_OP_INDEX="registry.redhat.io/redhat/redhat-operator-index:v${OCP_RELEASE}"
export CERT_OP_INDEX="registry.redhat.io/redhat/certified-operator-index:v${OCP_RELEASE}"
export COMM_OP_INDEX="registry.redhat.io/redhat/community-operator-index:v${OCP_RELEASE}"
export MARKETPLACE_OP_INDEX="registry.redhat.io/redhat-marketplace-index:v${OCP_RELEASE}"
#export RH_OP_PACKAGES='local-storage-operator,performance-addon-operator,ptp-operator,sriov-network-operator'
export RH_OP_PACKAGES='{{ disconnected_operators|join(",") }}'
opm index prune --from-index $RH_OP_INDEX --packages $RH_OP_PACKAGES --tag $LOCAL_REGISTRY/$LOCAL_REGISTRY_INDEX_TAG
podman push $LOCAL_REGISTRY/$LOCAL_REGISTRY_INDEX_TAG --authfile $OCP_PULLSECRET_AUTHFILE
oc adm catalog mirror $LOCAL_REGISTRY/$LOCAL_REGISTRY_INDEX_TAG $LOCAL_REGISTRY/$LOCAL_REGISTRY_IMAGE_TAG --registry-config=$OCP_PULLSECRET_AUTHFILE
cp /root/manifests-redhat-operator-index-*/imageContentSourcePolicy.yaml /root
cp /root/manifests-redhat-operator-index-*/catalogSource.yaml /root
|
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _assign = require('babel-runtime/core-js/object/assign');
var _assign2 = _interopRequireDefault(_assign);
var _keys = require('babel-runtime/core-js/object/keys');
var _keys2 = _interopRequireDefault(_keys);
var _extends2 = require('babel-runtime/helpers/extends');
var _extends3 = _interopRequireDefault(_extends2);
exports.createStyleSheet = createStyleSheet;
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function createStyleSheet(name, callback) {
var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
var styleSheet = {
name: name,
options: options,
createRules: createRules
};
function createRules() {
var theme = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : {};
var rules = typeof callback === 'function' ? callback(theme) : callback;
if (!theme.overrides || !theme.overrides[name]) {
return rules;
}
var overrides = theme.overrides[name];
var rulesWithOverrides = (0, _extends3.default)({}, rules);
(0, _keys2.default)(overrides).forEach(function (n) {
rulesWithOverrides[n] = (0, _assign2.default)(rulesWithOverrides[n] || {}, overrides[n]);
});
return rulesWithOverrides;
}
return styleSheet;
} |
#!/bin/bash
if [ -z ${BRANCH} ]; then
BRANCH="develop"
fi
PADDLE_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}")/../" && pwd )"
API_FILES=("CMakeLists.txt"
"paddle/fluid/API.spec"
"paddle/fluid/op_use_default_grad_op_maker.spec"
"paddle/fluid/framework/operator.h"
"paddle/fluid/framework/tensor.h"
"paddle/fluid/framework/details/op_registry.h"
"paddle/fluid/framework/grad_op_desc_maker.h"
"paddle/fluid/framework/lod_tensor.h"
"paddle/fluid/framework/selected_rows.h"
"paddle/fluid/framework/op_desc.h"
"paddle/fluid/framework/block_desc.h"
"paddle/fluid/framework/var_desc.h"
"paddle/fluid/framework/scope.h"
"paddle/fluid/framework/ir/node.h"
"paddle/fluid/framework/ir/graph.h"
"paddle/fluid/framework/framework.proto"
"python/requirements.txt"
"python/paddle/fluid/__init__.py"
"python/paddle/fluid/compiler.py"
"python/paddle/fluid/parallel_executor.py"
"python/paddle/fluid/framework.py"
"python/paddle/fluid/backward.py"
"paddle/fluid/operators/distributed/send_recv.proto.in")
approval_line=`curl -H "Authorization: token ${GITHUB_API_TOKEN}" https://api.github.com/repos/PaddlePaddle/Paddle/pulls/${GIT_PR_ID}/reviews?per_page=10000`
git_files=`git diff --numstat upstream/$BRANCH| wc -l`
git_count=`git diff --numstat upstream/$BRANCH| awk '{sum+=$1}END{print sum}'`
failed_num=0
echo_list=()
if [[ $git_files -gt 19 || $git_count -gt 999 ]];then
APPROVALS=`echo ${approval_line}|python ${PADDLE_ROOT}/tools/check_pr_approval.py 1 38231817`
if [ "${APPROVALS}" == "FALSE" ]; then
failed_num=`expr $failed_num + 1`
echo_line="You must have Dianhai approval for change 20+ files or add than 1000+ lines of content\n"
echo_list=(${echo_list[@]}$failed_num "." $echo_line)
fi
fi
for API_FILE in ${API_FILES[*]}; do
API_CHANGE=`git diff --name-only upstream/$BRANCH | grep "${API_FILE}" | grep -v "/CMakeLists.txt" || true`
echo "checking ${API_FILE} change, PR: ${GIT_PR_ID}, changes: ${API_CHANGE}"
if [ "${API_CHANGE}" ] && [ "${GIT_PR_ID}" != "" ]; then
# NOTE: per_page=10000 should be ok for all cases, a PR review > 10000 is not human readable.
# approval_user_list: XiaoguangHu01 46782768,chengduoZH 30176695,Xreki 12538138,luotao1 6836917,sneaxiy 32832641,tensor-tang 21351065,xsrobin 50069408,qingqing01 7845005,guoshengCS 14105589,heavengate 12605721,kuke 3064195,Superjomn 328693,lanxianghit 47554610,cyj1986 39645414,hutuxian 11195205,frankwhzhang 20274488,nepeplwu 45024560,Dianhai 38231817,JiabinYang 22361972,chenwhql 22561442.
if [ "${API_FILE}" == "paddle/fluid/API.spec" ];then
APPROVALS=`echo ${approval_line}|python ${PADDLE_ROOT}/tools/check_pr_approval.py 2 46782768 7534971 14105589 12605721 3064195 328693 47554610 39645414 11195205 20274488 45024560 `
elif [ "${API_FILE}" == "paddle/fluid/op_use_default_grad_op_maker.spec" ];then
APPROVALS=`echo ${approval_line}|python ${PADDLE_ROOT}/tools/check_pr_approval.py 1 32832641 6836917`
elif [ "${API_FILE}" == "CMakeLists.txt" ];then
APPROVALS=`echo ${approval_line}|python ${PADDLE_ROOT}/tools/check_pr_approval.py 1 6836917 46782768 30176695`
elif [ "${API_FILE}" == "python/paddle/fluid/__init__.py" ];then
APPROVALS=`echo ${approval_line}|python ${PADDLE_ROOT}/tools/check_pr_approval.py 1 6836917 47554610`
elif [ "${API_FILE}" == "python/requirements.txt" ];then
APPROVALS=`echo ${approval_line}|python ${PADDLE_ROOT}/tools/check_pr_approval.py 1 6836917 22361972`
else
APPROVALS=`echo ${approval_line}|python ${PADDLE_ROOT}/tools/check_pr_approval.py 1 21351065 3048612 46782768 30176695 12538138 6836917 32832641`
fi
echo "current pr ${GIT_PR_ID} got approvals: ${APPROVALS}"
if [ "${APPROVALS}" == "FALSE" ]; then
if [ "${API_FILE}" == "paddle/fluid/API.spec" ];then
failed_num=`expr $failed_num + 1`
echo_line="You must have two RD (XiaoguangHu01 or wanghaoshuang or guoshengCS or heavengate or kuke or Superjomn or lanxianghit or cyj1986 or hutuxian or frankwhzhang or nepeplwu) approval for the api change! ${API_FILE} for the management reason of API interface and API document.\n"
echo_list=(${echo_list[@]}$failed_num "." $echo_line)
elif [ "${API_FILE}" == "paddle/fluid/op_use_default_grad_op_maker.spec" ];then
failed_num=`expr $failed_num + 1`
echo_line="You must have one RD (sneaxiy (Recommend) or luotao1) approval for op_use_default_grad_op_maker.spec, which manages the grad_op memory optimization.\n"
echo_list=(${echo_list[@]}$failed_num "." $echo_line)
elif [ "${API_FILE}" == "CMakeLists.txt" ];then
failed_num=`expr $failed_num + 1`
echo_line="You must have one RD (luotao1 or chengduoZH or XiaoguangHu01) approval for CMakeLists.txt, which manages the compilation parameter.\n"
echo_list=(${echo_list[@]}$failed_num "." $echo_line)
elif [ "${API_FILE}" == "python/requirements.txt" ];then
failed_num=`expr $failed_num + 1`
echo_line="You must have one RD (JiabinYang (Recommend) or luotao1) approval for python/requirements.txt, which manages the third-party python package.\n"
echo_list=(${echo_list[@]}$failed_num "." $echo_line)
elif [ "${API_FILE}" == "python/paddle/fluid/__init__.py" ];then
failed_num=`expr $failed_num + 1`
echo_line="You must have one RD (lanxianghit (Recommend) or luotao1) approval for the python/paddle/fluid/init.py, which manages the environment variables.\n"
echo_list=(${echo_list[@]}$failed_num "." $echo_line)
else
failed_num=`expr $failed_num + 1`
echo_line="You must have one RD (XiaoguangHu01,chengduoZH,Xreki,luotao1,sneaxiy,tensor-tang) approval for ${API_FILE}, which manages the underlying code for fluid.\n"
echo_list=(${echo_list[@]}$failed_num "." $echo_line)
fi
fi
fi
done
HAS_CONST_CAST=`git diff -U0 upstream/$BRANCH |grep -o -m 1 "const_cast" || true`
if [ ${HAS_CONST_CAST} ] && [ "${GIT_PR_ID}" != "" ]; then
APPROVALS=`curl -H "Authorization: token ${GITHUB_API_TOKEN}" https://api.github.com/repos/PaddlePaddle/Paddle/pulls/${GIT_PR_ID}/reviews?per_page=10000 | \
python ${PADDLE_ROOT}/tools/check_pr_approval.py 1 21351065 3048612 46782768 30176695 12538138 6836917 32832641`
echo "current pr ${GIT_PR_ID} got approvals: ${APPROVALS}"
if [ "${APPROVALS}" == "FALSE" ]; then
failed_num=`expr $failed_num + 1`
echo_line="You must have one RD (XiaoguangHu01,chengduoZH,Xreki,luotao1,sneaxiy,tensor-tang) approval for the usage (either add or delete) of const_cast.\n"
echo_list=(${echo_list[@]}$failed_num "." $echo_line)
fi
fi
HAS_DEFINE_FLAG=`git diff -U0 upstream/$BRANCH |grep -o -m 1 "DEFINE_int32" |grep -o -m 1 "DEFINE_bool" | grep -o -m 1 "DEFINE_string" || true`
if [ ${HAS_DEFINE_FLAG} ] && [ "${GIT_PR_ID}" != "" ]; then
APPROVALS=`curl -H "Authorization: token ${GITHUB_API_TOKEN}" https://api.github.com/repos/PaddlePaddle/Paddle/pulls/${GIT_PR_ID}/reviews?per_page=10000 | \
python ${PADDLE_ROOT}/tools/check_pr_approval.py 1 47554610`
echo "current pr ${GIT_PR_ID} got approvals: ${APPROVALS}"
if [ "${APPROVALS}" == "FALSE" ]; then
failed_num=`expr $failed_num + 1`
echo_line="You must have one RD lanxianghit approval for the usage (either add or delete) of DEFINE_int32/DEFINE_bool/DEFINE_string flag.\n"
echo_list=(${echo_list[@]}$failed_num "." $echo_line)
fi
fi
HAS_PADDLE_ENFORCE_FLAG=`git diff -U0 upstream/$BRANCH |grep "+" |grep -v "PADDLE_ENFORCE_" |grep -o -m 1 "PADDLE_ENFORCE" || true`
if [ ${HAS_PADDLE_ENFORCE_FLAG} ] && [ "${GIT_PR_ID}" != "" ]; then
APPROVALS=`curl -H "Authorization: token ${GITHUB_API_TOKEN}" https://api.github.com/repos/PaddlePaddle/Paddle/pulls/${GIT_PR_ID}/reviews?per_page=10000 | \
python ${PADDLE_ROOT}/tools/check_pr_approval.py 1 6836917 47554610 22561442`
echo "current pr ${GIT_PR_ID} got approvals: ${APPROVALS}"
if [ "${APPROVALS}" == "FALSE" ]; then
failed_num=`expr $failed_num + 1`
echo_line="PADDLE_ENFORCE is not recommended. Please use PADDLE_ENFORCE_EQ/NE/GT/GE/LT/LE or PADDLE_ENFORCE_NOT_NULL or PADDLE_ENFORCE_CUDA_SUCCESS instead.\nYou must have one RD (chenwhql (Recommend) , luotao1 (Recommend) or lanxianghit) approval for the usage (either add or delete) of PADDLE_ENFORCE.\n"
echo_list=(${echo_list[@]}$failed_num "." $echo_line)
fi
fi
if [ -n "${echo_list}" ];then
echo "****************"
echo -e ${echo_list[@]}
git diff -U0 upstream/$BRANCH |grep "+" |grep -v "PADDLE_ENFORCE_" |grep "PADDLE_ENFORCE"
echo "There are ${failed_num} approved errors."
echo "****************"
exit 1
fi
|
/*
Retrieve bytes to the leading address of a word to wrap words.
*/
# define CAR
# include "../../../incl/config.h"
signed(__cdecl cue2l(signed char(*sym),signed char(*argp))) {
/* **** DATA, BSS and STACK */
auto signed char HT = ('\t');
auto signed char SP = (' ');
auto signed char *p;
auto signed i,r;
auto signed short flag;
auto signed char c,old;
/* **** CODE/TEXT */
if(!sym) return(0x00);
if(!argp) return(0x00);
c = (*argp);
if(!c) return(0x00);
old = (c);
flag = (0x00);
i = (0x00);
while(*(sym+(i))) {
if(!(c^(*(sym+(i++))))) flag = (0x01);
}
argp++;
c = (*argp);
if(!(HT^(c))) return(0x01);
i = (0x00);
while(*(sym+(i))) {
if(!(c^(*(sym+(i++))))) flag = (0x00);
}
if(!(SP^(c))) flag = (0x00);
else {
if(!(HT^(old))) flag = (0x01);
if(!(SP^(old))) flag = (0x01);
}
if(flag) return(0x01);
return(0x01+(cue2l(sym,argp)));
}
|
escape_grep_regex() {
sed 's/[][\.|$(){}?+*^]/\\&/g' <<< "$*"
}
function add_to_gitignore {
touch .gitignore
escaped_name="$(escape_grep_regex $1)"
grep -E -- "$escaped_name$" .gitignore &>/dev/null || echo "
# this next line was auto-added, and may be very important (passwords/auth etc)
# comment it out if dont want it to be ignored (and you know what you're doing)
$1" >> .gitignore
} |
<gh_stars>10-100
export default {
init() {
//=====Grid/List View change in Facilities=====
if($("#switchViewBtn").length) {
$(".listView").hide();
}
$("#switchViewBtn").toggle(
function(){
$(".gridView").hide();
$(".listView").show();
$("#switchViewBtn").html('<i class="fa fa-th-large" aria-hidden="true"></i> Switch to Grid View');
},
function() {
$(".gridView").show();
$(".listView").hide();
$("#switchViewBtn").html('<i class="fa fa-list" aria-hidden="true"></i> Switch to List View');
}
);
//=============================================
// JavaScript to be fired on all pages
var headings = $('.anchorific').data('headings');
headings = headings ? headings : 'h2,h3,h4';
$('.usa-layout-docs').anchorific({
headers: headings,
anchorText: false,
top: false,
spyOffset: 2,
exclude: '.screen-reader-text',
});
$('.anchorific > ul').addClass('usa-sidenav-list');
$('.anchorific li > ul').addClass('usa-sidenav-sub_list');
// media query event handler
if (matchMedia) {
var mq = window.matchMedia("(min-width: 1201px)");
mq.addListener(WidthChange);
WidthChange(mq);
}
// media query change
function WidthChange(mq) {
if (mq.matches) {
// window width is at least 1201px
// move aside before .usa-layout-docs-main_content
$('aside.usa-layout-docs-sidenav').insertBefore( $('.usa-layout-docs-main_content') );
$('.usa-layout-docs-sidenav').addClass('sticky usa-width-one-fourth');
} else {
// window width is less than 1201px
//move aside under .usa-font-lead if exists or under header if it doesn't
if($('aside.usa-layout-docs-sidenav').insertAfter( $('.usa-layout-docs-main_content').find('header').first().siblings('.usa-font-lead').first() ).length == 0) {
$('aside.usa-layout-docs-sidenav').insertAfter( $('.usa-layout-docs-main_content').find('header').first() );
}
$('.usa-layout-docs-sidenav').css('height', '');
$('.usa-layout-docs-sidenav').removeClass('sticky usa-width-one-fourth');
}
}
var adminBarHeight = $('#wpadminbar').height();
var navHeight = function (mq) {
if (mq.matches) {
$('.usa-layout-docs-sidenav').css('height', (window.innerHeight - 110 - adminBarHeight));
}
};
navHeight(mq);
$( window ).resize(function () {
navHeight(mq);
});
$('.sticky').Stickyfill();
var didScroll = false;
var topNav = $('header.usa-header');
window.onscroll = doThisStuffOnScroll;
function doThisStuffOnScroll() {
var scroll = $(window).scrollTop();
var scrollHeight = 55;
if (scroll >= scrollHeight) {
topNav.addClass('show-logo');
}
else {
topNav.removeClass('show-logo');
}
didScroll = true;
}
setInterval(function() {
if(didScroll) {
didScroll = false;
}
}, 500);
},
finalize() {
// JavaScript to be fired on all pages, after page specific JS is fired
},
};
|
#!/bin/bash
set -e
git clone bosh-cli bumped-bosh-cli
mkdir -p workspace/src/github.com/cloudfoundry/
ln -s $PWD/bumped-bosh-cli workspace/src/github.com/cloudfoundry/bosh-cli
export GOPATH=$PWD/workspace
cd workspace/src/github.com/cloudfoundry/bosh-cli
dep ensure -v -update
if [ "$(git status --porcelain)" != "" ]; then
git status
git add vendor Gopkg.lock
git config user.name "CI Bot"
git config user.email "cf-bosh-eng@pivotal.io"
git commit -m "Update vendored dependencies"
fi
|
<reponame>malte0811/ControlEngineering
package malte0811.controlengineering.controlpanels.cnc;
import com.google.common.collect.ImmutableList;
import malte0811.controlengineering.bus.BusSignalRef;
import malte0811.controlengineering.controlpanels.PanelComponentInstance;
import malte0811.controlengineering.controlpanels.PanelComponents;
import malte0811.controlengineering.controlpanels.PlacedComponent;
import malte0811.controlengineering.controlpanels.components.config.ColorAndSignal;
import malte0811.controlengineering.controlpanels.components.config.ColorAndText;
import malte0811.controlengineering.crafting.noncrafting.ServerFontRecipe;
import malte0811.controlengineering.util.math.Vec2d;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
public class CNCInstructionParserTest {
private static final PanelComponentInstance<?, ?> BUTTON = PanelComponents.BUTTON.newInstanceFromCfg(
new ColorAndSignal(0xffffff, new BusSignalRef(1, 2))
);
private static final PanelComponentInstance<?, ?> INDICATOR = PanelComponents.INDICATOR.newInstanceFromCfg(
new ColorAndSignal(0xff00, new BusSignalRef(2, 1))
);
private void assertSuccess(
CNCInstructionParser.ParserResult result,
PlacedComponent... expected
) {
Assert.assertFalse(result.isError());
Assert.assertEquals(ImmutableList.copyOf(expected), result.components());
}
private void assertFailure(
CNCInstructionParser.ParserResult result,
PlacedComponent... expected
) {
Assert.assertTrue(result.isError());
Assert.assertEquals(ImmutableList.copyOf(expected), result.components());
}
@BeforeClass
public static void preload() {
//Load all classes before the actual tests since some have slow static init
ServerFontRecipe.IN_UNIT_TEST = true;
CNCInstructionParser.parse(null, button(1, 2) + ";" + indicator(2, 3));
}
@Test
public void testBasicPanel() {
CNCInstructionParser.ParserResult result = CNCInstructionParser.parse(
null, button(1, 2) + ";" + indicator(2, 3)
);
assertSuccess(
result,
new PlacedComponent(BUTTON, new Vec2d(1, 2)),
new PlacedComponent(INDICATOR, new Vec2d(2, 3))
);
}
@Test
public void testInvalidComponent() {
CNCInstructionParser.ParserResult result = CNCInstructionParser.parse(null, "this is not a component");
assertFailure(result);
}
@Test
public void testMissingArgs() {
CNCInstructionParser.ParserResult result = CNCInstructionParser.parse(null, "button 1 2 ff");
assertFailure(result);
}
@Test
public void testNoPos() {
CNCInstructionParser.ParserResult result = CNCInstructionParser.parse(null, "button 1 ");
assertFailure(result);
}
@Test
public void testPosNumberFormatXCP() {
CNCInstructionParser.ParserResult result = CNCInstructionParser.parse(null, "button 1 foo");
assertFailure(result);
}
@Test
public void testDeserializerNumberFormatXCP() {
CNCInstructionParser.ParserResult result = CNCInstructionParser.parse(null, "button 1 2 foo");
assertFailure(result);
}
@Test
public void testOutOfPanel() {
CNCInstructionParser.ParserResult result = CNCInstructionParser.parse(
null, button(16.5, 1)
);
assertFailure(result);
}
@Test
public void testDisjoint() {
CNCInstructionParser.ParserResult result = CNCInstructionParser.parse(
null, button(3, 1) + ";" + indicator(3, 1)
);
assertFailure(result, new PlacedComponent(BUTTON, new Vec2d(3, 1)));
}
@Test
public void testQuotedString() {
CNCInstructionParser.ParserResult result = CNCInstructionParser.parse(null, "label 1 1 ff \"This is a test\"");
assertSuccess(
result,
new PlacedComponent(
PanelComponents.LABEL.newInstanceFromCfg(new ColorAndText(0xff, "This is a test")),
new Vec2d(1, 1)
)
);
}
@Test
public void testUnquotedString() {
CNCInstructionParser.ParserResult result = CNCInstructionParser.parse(null, "label 1 1 0 test");
assertSuccess(
result,
new PlacedComponent(
PanelComponents.LABEL.newInstanceFromCfg(new ColorAndText(0, "test")),
new Vec2d(1, 1)
)
);
}
private static String button(double x, double y) {
return "button " + x + " " + y + " ffffff 1 2";
}
private static String indicator(double x, double y) {
return "indicator " + x + " " + y + " ff00 2 1";
}
} |
#!/bin/sh
#
# Because git subtree doesn't provide an easy way to automatically merge changes
# from upstream, this shell script will do the job instead.
# If you don't have a POSIX-compatible shell on your system, feel free to use
# this as a reference for what commands to run, rather than running it directly.
# Change to the directory containing this script, or exit with failure
# to prevent git subtree scrawling over some other repo.
cd "$(dirname "$0")" || exit 1
# Merge changes from the upstream parallel-rdp repository.
git subtree pull --prefix=ares/n64/vulkan/parallel-rdp https://github.com/Themaister/parallel-rdp-standalone.git master --squash
git subtree pull --prefix=thirdparty/sljit https://github.com/zherczeg/sljit.git master --squash
|
fn custom_float_to_f64(input: u128) -> f64 {
let sign_bit = (input >> 127) & 1;
let exponent = ((input >> 112) & 0x7F) as i32 - 63;
let mantissa = (input & 0x7FFFFFFFFFFFFFFF) as f64 / (1u64 << 63) as f64;
let sign = if sign_bit == 1 { -1.0 } else { 1.0 };
let value = sign * (1.0 + mantissa) * 2.0_f64.powi(exponent);
if value.is_infinite() {
if sign_bit == 1 {
f64::NEG_INFINITY
} else {
f64::INFINITY
}
} else {
value
}
} |
<reponame>zmb3/om
package generator
import (
"fmt"
"strings"
"gopkg.in/yaml.v2"
)
func NewMetadata(fileBytes []byte) (*Metadata, error) {
metadata := &Metadata{}
err := yaml.Unmarshal(fileBytes, metadata)
if err != nil {
return nil, err
}
return metadata, nil
}
type Metadata struct {
Name string `yaml:"name"`
Version string `yaml:"product_version"`
FormTypes []FormType `yaml:"form_types"`
OpsManagerSyslog bool `yaml:"opsmanager_syslog"`
PropertyBlueprints []PropertyBlueprint `yaml:"property_blueprints"`
JobTypes []JobType `yaml:"job_types"`
PostDeployErrands []ErrandMetadata `yaml:"post_deploy_errands"`
PreDeleteErrands []ErrandMetadata `yaml:"pre_delete_errands"`
}
func (m *Metadata) Errands() []ErrandMetadata {
return append(m.PostDeployErrands, m.PreDeleteErrands...)
}
type ErrandMetadata struct {
Name string `yaml:"name"`
}
func (m *Metadata) ProductName() string {
return m.Name
}
func (m *Metadata) ProductVersion() string {
return m.Version
}
func matchesType(t string) bool {
switch t {
case "service_network_az_multi_select", "service_network_az_single_select":
return true
}
return false
}
func (m *Metadata) UsesServiceNetwork() bool {
for _, job := range m.JobTypes {
for _, propertyMetadata := range job.PropertyBlueprint {
if matchesType(propertyMetadata.Type) {
return true
}
}
}
for _, propertyMetadata := range m.PropertyBlueprints {
if matchesType(propertyMetadata.Type) {
return true
}
for _, subPropertyMetadata := range propertyMetadata.PropertyBlueprints {
if matchesType(subPropertyMetadata.Type) {
return true
}
}
for _, optionTemplates := range propertyMetadata.OptionTemplates {
for _, subPropertyMetadata := range optionTemplates.PropertyBlueprints {
if matchesType(subPropertyMetadata.Type) {
return true
}
}
}
}
return false
}
func (m *Metadata) GetJob(jobName string) (*JobType, error) {
for _, job := range m.JobTypes {
if job.Name == jobName {
return &job, nil
}
}
return nil, fmt.Errorf("job %s not found", jobName)
}
func (m *Metadata) GetPropertyBlueprint(propertyReference string) (*PropertyBlueprint, error) {
propertyParts := strings.Split(propertyReference, ".")
jobName := propertyParts[1]
simplePropertyName := propertyParts[len(propertyParts)-1]
job, err := m.GetJob(jobName)
if err == nil {
return job.GetPropertyBlueprint(propertyReference)
}
for _, property := range m.PropertyBlueprints {
if property.Name == simplePropertyName {
return &property, nil
}
}
return nil, fmt.Errorf("property %s not found", propertyReference)
}
func (m *Metadata) PropertyInputs() []PropertyInput {
var propertyInputs []PropertyInput
for _, form := range m.FormTypes {
propertyInputs = append(propertyInputs, form.PropertyInputs...)
}
return propertyInputs
}
func (m *Metadata) UsesOpsManagerSyslogProperties() bool {
return m.OpsManagerSyslog
}
|
<gh_stars>1-10
/* $Id$ */
/***************************************************************************
* (C) Copyright 2003-2010 - Stendhal *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package games.stendhal.client.sound.facade;
import java.util.Arrays;
import games.stendhal.common.math.Algebra;
import games.stendhal.common.math.Geometry;
/**
* AudibleCircleAfrea defines a circular area for audible objects.
* @author silvio
*/
public class AudibleCircleArea implements AudibleArea
{
private final float[] mCenter;
private float mInnerRadius;
private float mOuterRadius;
public AudibleCircleArea(float[] center, float innerRadius, float outerRadius)
{
mCenter = new float[center.length];
System.arraycopy(center, 0, mCenter, 0, center.length);
mInnerRadius = innerRadius;
mOuterRadius = outerRadius;
}
public void setArea(float innerRadius, float outerRadius)
{
mInnerRadius = innerRadius;
mOuterRadius = outerRadius;
}
public void setPosition (float[] position) { Algebra.mov_Vecf(mCenter, position); }
public void getPosition (float[] result) { Algebra.mov_Vecf(result, mCenter); }
public float getInnerRadius() { return mInnerRadius; }
public float getOuterRadius() { return mOuterRadius; }
@Override
public float getHearingIntensity(float[] hearerPos)
{
float distance = Algebra.distanceSqrt_Vecf(hearerPos, mCenter);
if(distance > (mOuterRadius * mOuterRadius)) {
return 0.0f;
} else if(distance < (mInnerRadius * mInnerRadius)) {
return 1.0f;
}
distance = (float)Math.sqrt(distance) - mInnerRadius;
return 1.0f - distance / (mOuterRadius - mInnerRadius);
}
@Override
public void getClosestPoint(float[] result, float[] hearerPos)
{
Geometry.closestPoint_SpherePointf(result, mCenter, mInnerRadius, hearerPos);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("AudibleCircleArea [mCenter=");
sb.append(Arrays.toString(mCenter));
sb.append(", mInnerRadius=" + mInnerRadius);
sb.append(", mOuterRadius=");
sb.append(mOuterRadius + "]");
return sb.toString();
}
}
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Download kubectl, which is a requirement for using minikube.
curl -Lo kubectl https://storage.googleapis.com/kubernetes-release/release/$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)/bin/linux/amd64/kubectl && chmod +x kubectl && sudo mv kubectl /usr/local/bin/
# Download minikube.
curl -Lo minikube https://storage.googleapis.com/minikube/releases/v0.25.2/minikube-linux-amd64 && chmod +x minikube && sudo mv minikube /usr/local/bin/
sudo minikube start --vm-driver=none --kubernetes-version=v1.9.0
# Fix the kubectl context, as it's often stale.
minikube update-context
# Wait for Kubernetes to be up and ready.
JSONPATH='{range .items[*]}{@.metadata.name}:{range @.status.conditions[*]}{@.type}={@.status};{end}{end}'; until kubectl get nodes -o jsonpath="$JSONPATH" 2>&1 | grep -q "Ready=True"; do sleep 1; done
|
#!/usr/bin/env bash
# Will Bash Prompt, inspired by theme " Axim", "Sexy" and "Bobby"
# thanks to them
if tput setaf 1 &> /dev/null; then
if [[ $(tput colors) -ge 256 ]] 2>/dev/null; then
MAGENTA=$(tput setaf 9)
ORANGE=$(tput setaf 172)
GREEN=$(tput setaf 190)
PURPLE=$(tput setaf 141)
WHITE=$(tput setaf 0)
else
MAGENTA=$(tput setaf 5)
ORANGE=$(tput setaf 4)
GREEN=$(tput setaf 2)
PURPLE=$(tput setaf 1)
WHITE=$(tput setaf 7)
fi
BOLD=$(tput bold)
RESET=$(tput sgr0)
else
MAGENTA="\033[1;31m"
ORANGE="\033[1;33m"
GREEN="\033[1;32m"
PURPLE="\033[1;35m"
WHITE="\033[1;37m"
BOLD=""
RESET="\033[m"
fi
function prompt_command() {
PS1="\[$GREEN\]\w\[$WHITE\]\[$SCM_THEME_PROMPT_PREFIX\]$(clock_prompt) \[$ORANGE env: \]\[$MAGENTA\]\[${ENV}\]\[$PURPLE\]$(scm_prompt_info) \n\$ \[$RESET\]"
}
THEME_CLOCK_COLOR=${THEME_CLOCK_COLOR:-"${white}"}
safe_append_prompt_command prompt_command
|
<reponame>wuximing/dsshop<gh_stars>1-10
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.INTERACTION_MAP = void 0;
var tslib_1 = require("tslib");
var drillDown_1 = tslib_1.__importDefault(require("./drillDown"));
exports.INTERACTION_MAP = {
drilldown: drillDown_1.default,
};
//# sourceMappingURL=index.js.map |
<filename>cache.go
package disgord
type Cacher interface{}
func NewCache() *Cache {
return &Cache{}
}
type Cache struct{}
|
#!/usr/bin/env bash
# Copyright 2020-2021 Johan Thorén <johan@thoren.xyz>
# Licensed under the ISC license:
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
# Optional, set these vars here instead of passing them as options.
#FOLDER="$HOME/some_folder"
#lat=""
#lon=""
cmd="feh --bg-fill"
gnome=0
purge=0
verbose=0
user=$(whoami)
if [[ $(uname) == OpenBSD ]];then
date_cmd="gdate"
else
date_cmd="date"
fi
die() {
case "${-}" in
(*i*) printf -- '\e[38;5;9mERROR: %s\e[m\n' "${0}:(${LINENO}): ${*}" >&2 ;;
(*) printf -- 'ERROR: %s\n' "${0}:(${LINENO}): ${*}" >&2 ;;
esac
exit 1
}
print_v() {
[ "$verbose" -eq 1 ] && printf "$(timestamp) %s\n" "$1"
}
usage() {
cat <<EOF
Usage: $0 -h | [-c COMMAND] [-g] [-p] [-v] [-x LATITUDE -y LONGITUDE] -f FOLDER
-c Command to use to set the wallpaper instead of feh
(assuming that the path to the wallpaper should follow the command)
-f Folder containing the wallpapers
-g Use GNOME tools to set wallpaper and screensaver (experimental)
-h Print this text
-p Purge old files
-v Verbose output
-x Latitude -- must be used together with -y
-y Longitude -- must be used together with -x
The FOLDER needs to contain the following images:
- night.jpg
- nautical_dawn.jpg
- civil_dawn.jpg
- morning.jpg
- noon.jpg
- late_afternoon.jpg
- civil_dusk.jpg
- nautical_dusk.jpg
EOF
exit "$1"
}
timestamp() {
printf '%s' "[$("$date_cmd" +"%Y-%m-%d %T %Z")]"
}
to_time() {
$date_cmd --date "@${1}" +"%T"
}
on_gnome() {
if [ $gnome -eq 1 ]; then
pid=$(pgrep gnome-session | head -n1)
DBUS_SESSION_BUS_ADDRESS=$(grep -z DBUS_SESSION_BUS_ADDRESS \
/proc/"$pid"/environ |\
tr '\0' '\n' |\
sed -e s/DBUS_SESSION_BUS_ADDRESS=//)
export DBUS_SESSION_BUS_ADDRESS
cmd="gsettings set org.gnome.desktop.background picture-uri file:///"
fi
}
# Delete old files in /tmp.
# Provide eith "geo" or "sun" as first argument to delete files of that type.
delete_old_files() {
print_v "Deleting old ${1}_data files."
find /tmp -maxdepth 1 -name "${1}_data*.json" -user "$user" -delete
}
# Purge all old files.
purge_old_files() {
if [ "$purge" -eq 1 ]; then
print_v "Purging old files."
delete_old_files "geo"
delete_old_files "sun"
fi
}
verify_requirements() {
# Make sure that all required commands are available.
command -v curl > /dev/null 2>&1 || die "curl is not installed."
command -v jq > /dev/null 2>&1 || die "jq is not installed."
if [[ $cmd == feh* ]]; then
command -v feh > /dev/null 2>&1 || die "feh is not installed."
fi
# Make sure that all required constants are set.
[ -z "$FOLDER" ] && usage 1
# If you use options x or y, you have to use both.
{ [ -n "$lat" ] && [ -z "$lon" ]; } || { [ -z "$lat" ] && [ -n "$lon" ]; } \
&& die "When using options -x or -y, both options must be used."
}
# Set the boundraries of the current day.
define_day() {
day_begin="$("$date_cmd" --date "$("$date_cmd" --iso)" +"%s")"
print_v "The day begins at $day_begin."
day_end="$("$date_cmd" --date "$("$date_cmd" --iso) 23:59:59" +"%s")"
print_v "The day ends at $day_end."
}
# Fetch geolocation information based on IP from ip-api.com
fetch_geo_data() {
delete_old_files "geo"
print_v "Fetching new geolocation data from the API."
geo_data="$(curl -s \
http://ip-api.com/json/\?fields=status,lat,lon,country,regionName,city)"
new_geo_data_file="/tmp/geo_data_$("$date_cmd" +"%s").json"
print_v "Saving geolocation data to file: $new_geo_data_file."
printf '%s\n' "$geo_data" > "$new_geo_data_file"
}
# Fetch the Sunrise and Sunset data from https://sunrise-sunset.org/api
fetch_sun_data() {
delete_old_files "sun"
print_v "Fetching new sunrise and sunset data from the API"
if [ -z "$lat" ] || [ -z "$lon" ]; then
check_local_geo_data
validate_geo_data
populate_geo_vars
fi
sun_data="$(curl -s \
https://api.sunrise-sunset.org/json\?lat="$lat"\&lng="$lon"\&formatted=0)"
new_sun_data_file="/tmp/sun_data_$("$date_cmd" +"%s").json"
print_v "Saving sunrise and sunset data to file: $new_sun_data_file."
printf '%s\n' "$sun_data" > "$new_sun_data_file"
}
# Find data files in /tmp.
# Provide eith "geo" or "sun" as first argument to find files of that type.
find_data_files() {
find /tmp -maxdepth 1 -name "${1}_data*.json" -user "$user"
}
# Check to see if there is already local geo_data saved from a previous run.
check_local_geo_data() {
geo_data_files="$(find_data_files "geo")"
number_of_geo_data_files="$(find_data_files "geo" | wc -l)"
print_v "The following old geo_data files were found:"
print_v "$geo_data_files"
print_v "Number of geo_data files: $number_of_geo_data_files"
if [ "$number_of_geo_data_files" -ne 1 ]; then
print_v "No geo_data file was found, or more than one was found."
fetch_geo_data
return
fi
local_geo_data_file_name="${geo_data_files[0]}"
geo_data_file_time_with_ending="${local_geo_data_file_name##*\_}"
geo_data_file_time="${geo_data_file_time_with_ending%\.*}"
print_v "The local sun_data file time is $geo_data_file_time."
# If there already is a file that has been fetched the last day,
# then use it to avoid using the API.
if [ "$geo_data_file_time" -ge "$day_begin" ] && \
[ "$geo_data_file_time" -lt "$day_end" ]
then
print_v "geo_data file time is within current day."
geo_data="$(cat "$local_geo_data_file_name")"
else
print_v "geo_data file time is NOT within current day."
fetch_geo_data
fi
}
# Check to see if there is already local sun_data saved from a previous run.
check_local_sun_data() {
sun_data_files="$(find_data_files "sun")"
number_of_sun_data_files="$(find_data_files "sun" | wc -l)"
print_v "The following old sun_data files were found:"
print_v "$sun_data_files"
print_v "Number of sun_data files: $number_of_sun_data_files"
if [ ! "$number_of_sun_data_files" -eq 1 ]; then
print_v "No sun_data file was found, or more than one was found."
fetch_sun_data
return
fi
local_sun_data_file_name="${sun_data_files[0]}"
sun_data_file_time_with_ending="${local_sun_data_file_name##*\_}"
sun_data_file_time="${sun_data_file_time_with_ending%\.*}"
print_v "The local sun_data file time is $sun_data_file_time."
# If there already is a file that has been fetched the last day,
# then use it to avoid using the API.
if [ "$sun_data_file_time" -ge "$day_begin" ] && \
[ "$sun_data_file_time" -lt "$day_end" ]
then
print_v "sun_data file time is within current day."
sun_data="$(cat "$local_sun_data_file_name")"
else
print_v "sun_data file time is NOT within current day."
fetch_sun_data
fi
}
parse_geo_data_response() {
jq --arg x "$1" '.[$x]' <<< "$geo_data" | sed 's/\"//g'
}
parse_sun_data_response() {
if [ "$#" -eq 1 ]; then
jq --arg x "$1" '.[$x]' <<< "$sun_data" | sed 's/\"//g'
elif [ "$#" -eq 2 ]; then
date_time="$(jq --arg x "$1" --arg y "$2" '.[$x][$y]' \
<<< "$sun_data" | sed 's/\"//g')"
# Transform to unix timestamp for easy math.
"$date_cmd" --date "$date_time" +"%s"
else
die "Illegal number of parameters to parse_sun_data_response."
fi
}
set_wallpaper() {
# Remove any trailing slash before trying to use the folder.
trimmed_folder="${FOLDER%/}"
wallpaper="${trimmed_folder}/${period}.jpg"
print_v "Setting the wallpaper: $wallpaper."
if [ $gnome -eq 1 ]; then
${cmd}${wallpaper}
else
${cmd} ${wallpaper}
fi
}
set_gnome_screensaver() {
# Remove any trailing slash before trying to use the folder.
trimmed_folder="${FOLDER%/}"
wallpaper="${trimmed_folder}/${period}.jpg"
print_v "Setting the GNOME screensaver: $wallpaper."
gsettings set org.gnome.desktop.screensaver picture-uri file:///"${wallpaper}"
}
take_a_guess() {
hour="$("$date_cmd" +"%H")"
case "$hour" in
0[4-5])
period="nautical_dawn" ;;
0[6-7])
period="civil_dawn" ;;
0[8-9]|1[0-1])
period="morning" ;;
1[2-4])
period="noon" ;;
1[5-7])
period="late_afternoon" ;;
18)
period="civil_dusk" ;;
19|20)
period="nautical_dusk" ;;
*)
period="night" ;;
esac
}
# Error handling if the geo_data status is not "success".
validate_geo_data() {
i=1
while [ "$i" -le 3 ]; do
print_v "Validation try: $i/3"
geo_api_status="$(parse_geo_data_response status)"
print_v "Geo API Status: $geo_api_status"
if [ "$geo_api_status" != "success" ]; then
print_v "The geo API request did not finish with a \"success\" status."
print_v "Taking a guess on what time it could be."
take_a_guess
print_v "I think it might be: $period"
set_wallpaper
if [ "$i" -eq 3 ]; then
print_v "Too many failed geo validation attempts."
delete_old_files "geo"
exit 1
fi
print_v "Trying again in 10 seconds."
sleep 10
fetch_geo_data
((++i))
else
break
fi
done
}
# Error handling if the sun_data is not "OK".
validate_sun_data() {
i=1
while [ "$i" -le 3 ]; do
print_v "Validation try: $i/3"
api_status="$(parse_sun_data_response status)"
print_v "Sun API Status: $api_status"
if [ "$api_status" != "OK" ] || \
[ "$(parse_sun_data_response results nautical_twilight_begin)" -lt "$day_begin" ] && \
[ "$(parse_sun_data_response results nautical_twilight_begin)" -ne 1 ];
then
print_v "Unable to determine the time based on the API response."
print_v "Taking a guess on what time it could be."
take_a_guess
print_v "I think it might be: $period"
set_wallpaper
if [ "$i" -eq 3 ]; then
print_v "Too many failed sun validation attempts."
delete_old_files "sun"
exit 1
fi
print_v "Trying again in 10 seconds."
sleep 10
fetch_sun_data
((++i))
else
break
fi
done
}
# Populate $lat and $lon.
populate_geo_vars() {
lat="$(parse_geo_data_response lat)"
lon="$(parse_geo_data_response lon)"
country="$(parse_geo_data_response country)"
regionName="$(parse_geo_data_response regionName)"
city="$(parse_geo_data_response city)"
print_v "I think that I'm in $city, $regionName, $country."
print_v "Using latitude: $lat"
print_v "Using longitude: $lon"
}
# Populate the vars to compare against. In chronological order.
populate_time_vars() {
naut_twi_begin="$(parse_sun_data_response results nautical_twilight_begin)"
civ_twi_begin="$(parse_sun_data_response results civil_twilight_begin)"
sunrise="$(parse_sun_data_response results sunrise)"
noon="$(parse_sun_data_response results solar_noon)"
sunset="$(parse_sun_data_response results sunset)"
# I want to switch to another wallpaper when the afternoon is starting to
# head toward sunset.
length_of_afternoon=$((sunset-noon))
late_afternoon=$((noon+(length_of_afternoon/4)*3))
civ_twi_end="$(parse_sun_data_response results civil_twilight_end)"
naut_twi_end="$(parse_sun_data_response results nautical_twilight_end)"
# The local time as a unix timestamp.
time="$("$date_cmd" +"%s")"
}
determine_period() {
# The sun API returns 1 if a state doesn't begin or end within the limits
# of the current day. In other words, if civil_twi_begin is 1, that means
# that either it began before the start of the day, ie. that 'night' never
# occured, or that it never begins at all. In some northern and southern
# locations this will be the case during the summer months.
if [ "$civ_twi_begin" -eq 1 ] && [ "$civ_twi_end" -eq 1 ]; then
[ "$time" -lt "$sunrise" ] && period="civil_dawn" && return
[ "$time" -ge "$sunset" ] && period="civil_dusk" && return
fi
if [ "$naut_twi_begin" -eq 1 ] && [ "$naut_twi_end" -eq 1 ]; then
[ "$time" -lt "$civ_twi_begin" ] && period="nautical_dawn" && return
[ "$time" -ge "$civ_twi_end" ] && period="nautical_dusk" && return
fi
[ "$time" -ge "$naut_twi_end" ] || [ "$time" -lt "$naut_twi_begin" ] && \
period="night" && return
[ "$time" -ge "$naut_twi_begin" ] && [ "$time" -lt "$civ_twi_begin" ] && \
period="nautical_dawn" && return
[ "$time" -ge "$civ_twi_begin" ] && [ "$time" -lt "$sunrise" ] && \
period="civil_dawn" && return
[ "$time" -ge "$sunrise" ] && [ "$time" -lt "$noon" ] && \
period="morning" && return
[ "$time" -ge "$noon" ] && [ "$time" -lt "$late_afternoon" ] && \
period="noon" && return
[ "$time" -ge "$late_afternoon" ] && [ "$time" -lt "$sunset" ] && \
period="late_afternoon" && return
[ "$time" -ge "$sunset" ] && [ "$time" -lt "$civ_twi_end" ] && \
period="civil_dusk" && return
[ "$time" -ge "$civ_twi_end" ] && [ "$time" -lt "$naut_twi_end" ] && \
period="nautical_dusk" && return
[ -z "$period" ] && die "Unable to determine period"
}
verbose_summary() {
if [ $verbose -eq 1 ]; then
cat <<EOF
$(timestamp) Here follows a summary:
Nautical dawn begins at: $naut_twi_begin / $(to_time "$naut_twi_begin")
Civil dawn begins at: $civ_twi_begin / $(to_time "$civ_twi_begin")
Sunrise is at: $sunrise / $(to_time "$sunrise")
Noon is at: $noon / $(to_time "$noon")
Late afternoon begins at: $late_afternoon / $(to_time "$late_afternoon")
Sunset is at: $sunset / $(to_time "$sunset")
Civil dusk ends at: $civ_twi_end / $(to_time "$civ_twi_end")
Nautical dusk ends at: $naut_twi_end / $(to_time "$naut_twi_end")
The time is now: $time / $(to_time "$time")
It's currently: $period
EOF
fi
}
while getopts "c:ghpvx:y:f:" opt
do
case $opt in
x) lat=$OPTARG;;
y) lon=$OPTARG;;
f) FOLDER=$OPTARG;;
g) gnome=1;;
c) cmd=$OPTARG;;
p) purge=1;;
v) verbose=1;;
h) usage 0;;
*) usage 1;;
esac
done
main() {
on_gnome
verify_requirements
purge_old_files
define_day
check_local_sun_data
validate_sun_data
populate_time_vars
determine_period
verbose_summary
set_wallpaper
if [ $gnome -eq 1 ]; then
set_gnome_screensaver
fi
}
main
|
import collections
def third_most_common(words):
count = collections.Counter(words).most_common()
return count[2][0]
third_most_common(words) # returns 'Bird' |
#Copyright 2018 The CDI Authors.
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
CONTROLLER="cdi-controller"
IMPORTER="cdi-importer"
CLONER="cdi-cloner"
APISERVER="cdi-apiserver"
UPLOADPROXY="cdi-uploadproxy"
UPLOADSERVER="cdi-uploadserver"
FUNC_TEST_INIT="cdi-func-test-file-host-init"
FUNC_TEST_HTTP="cdi-func-test-file-host-http"
BINARIES="cmd/${CONTROLLER} cmd/${IMPORTER} cmd/${APISERVER} cmd/${UPLOADPROXY} cmd/${UPLOADSERVER} tools/${FUNC_TEST_INIT}"
CDI_PKGS="cmd/ pkg/ test/"
CONTROLLER_MAIN="cmd/${CONTROLLER}"
IMPORTER_MAIN="cmd/${IMPORTER}"
CLONER_MAIN="cmd/${CLONER}"
APISERVER_MAIN="cmd/${APISERVER}"
UPLOADPROXY_MAIN="cmd/${UPLOADPROXY}"
UPLOADSERVER_MAIN="cmd/${UPLOADSERVER}"
DOCKER_IMAGES="cmd/${CONTROLLER} cmd/${IMPORTER} cmd/${CLONER} cmd/${APISERVER} cmd/${UPLOADPROXY} cmd/${UPLOADSERVER} tools/${FUNC_TEST_INIT} tools/${FUNC_TEST_HTTP}"
DOCKER_REPO=${DOCKER_REPO:-kubevirt}
CONTROLLER_IMAGE_NAME=${CONTROLLER_IMAGE_NAME:-cdi-controller}
IMPORTER_IMAGE_NAME=${IMPORTER_IMAGE_NAME:-cdi-importer}
CLONER_IMAGE_NAME=${CLONER_IMAGE_NAME:-cdi-cloner}
APISERVER_IMAGE_NAME=${APISERVER_IMAGE_NAME:-cdi-apiserver}
UPLOADPROXY_IMAGE_NAME=${UPLOADPROXY_IMAGE_NAME:-cdi-uploadproxy}
UPLOADSERVER_IMAGE_NAME=${UPLOADSERVER_IMAGE_NAME:-cdi-uploadserver}
DOCKER_TAG=${DOCKER_TAG:-latest}
VERBOSITY=${VERBOSITY:-1}
PULL_POLICY=${PULL_POLICY:-IfNotPresent}
NAMESPACE=${NAMESPACE:-kube-system}
KUBERNETES_IMAGE="k8s-1.10.4@sha256:c340a67190364b0e0c5864a8ce8edf38ccc35af6c4284a56118b2c38adf619cd"
OPENSHIFT_IMAGE="os-3.10.0@sha256:b026dba96571a6732171c45b1f9fbdbbb3c3bbb1aa2118e99e12368c15ffb6f6"
KUBEVIRT_PROVIDER=${KUBEVIRT_PROVIDER:-k8s-1.10.4}
function allPkgs() {
ret=$(sed "s,kubevirt.io/containerized-data-importer,${CDI_DIR},g" <(go list ./... | grep -v "pkg/client" | sort -u))
echo "$ret"
}
function parseTestOpts() {
pkgs=""
test_args=""
while [[ $# -gt 0 ]] && [[ $1 != "" ]]; do
case "${1}" in
--test-args=*)
test_args="${1#*=}"
shift 1
;;
./*...)
pkgs="${pkgs} ${1}"
shift 1
;;
*)
echo "ABORT: Unrecognized option \"$1\""
exit 1
;;
esac
done
}
function getClusterType() {
local image
case "${KUBEVIRT_PROVIDER}" in
"k8s-1.10.4")
image=$KUBERNETES_IMAGE
;;
"os-3.10.0")
image=$OPENSHIFT_IMAGE
;;
esac
echo "$image"
}
|
<reponame>lanpinguo/rootfs_build
/**
* sunxi-eh-test.c - SUNXI Embedded Host Test Support Driver
*
* This program is free software: you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 as published by
* the Free Software Foundation.
*
* ALTERNATIVELY, this software may be distributed under the terms of the
* GNU General Public License ("GPL") version 2, as published by the Free
* Software Foundation.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
* IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*/
#include <linux/module.h>
#include <linux/kernel.h>
#include <linux/delay.h>
#include <linux/usb.h>
#include <linux/usb/ch11.h>
#include <linux/usb/ch9.h>
#define EHCI0_SYSFS_PATH "/devices/soc.0/1c1a000.ehci0-controller"
#define EHCI1_SYSFS_PATH "/devices/soc.0/1c1b000.ehci1-controller"
#define EHCI0_SUN8IW11_SYSFS_PATH "/devices/soc.0/1c14000.ehci0-controller"
#define EHCI1_SUN8IW11_SYSFS_PATH "/devices/soc.0/1c19000.ehci1-controller"
#define EHCI2_SUN8IW11_SYSFS_PATH "/devices/soc.0/1c1c000.ehci2-controller"
#define EHCI0_ED_TEST_PATH "/sys/bus/platform/devices/1c1a000.ehci0" \
"-controller/ed_test"
#define EHCI1_ED_TEST_PATH "/sys/bus/platform/devices/1c1b000.ehci1" \
"-controller/ed_test"
#define EHCI0_SUN8IW11_ED_TEST_PATH "/sys/bus/platform/devices/1c14000.ehci0" \
"-controller/ed_test"
#define EHCI1_SUN8IW11_ED_TEST_PATH "/sys/bus/platform/devices/1c19000.ehci1" \
"-controller/ed_test"
#define EHCI2_SUN8IW11_ED_TEST_PATH "/sys/bus/platform/devices/1c1c000.ehci2" \
"-controller/ed_test"
#define USB_IF_TEST_VID 0x1a0a
#define USB_IF_TEST_SE0_NAK 0x0101
#define USB_IF_TEST_J 0x0102
#define USB_IF_TEST_K 0x0103
#define USB_IF_TEST_PACKET 0x0104
#define USB_IF_HS_HOST_PORT_SUSPEND_RESUME 0x0106
#define USB_IF_SINGLE_STEP_GET_DEV_DESC 0x0107
#define USB_IF_SINGLE_STEP_GET_DEV_DESC_DATA 0x0108
#define USB_IF_PROTOCOL_OTG_ELECTRICAL_TEST 0x0200
#define BUFLEN 32
static char *sunxi_eh_get_ed_test_path(struct usb_interface *intf)
{
char *udev_sysfs_path = NULL;
char *ed_test_path = NULL;
struct usb_device *udev;
udev = interface_to_usbdev(intf);
udev_sysfs_path = kobject_get_path(&udev->dev.kobj, GFP_KERNEL);
#if defined(CONFIG_ARCH_SUN50IW1P1)
if (!strncmp(udev_sysfs_path, EHCI0_SYSFS_PATH,
strlen(EHCI0_SYSFS_PATH))) /* usb0 */
ed_test_path = EHCI0_ED_TEST_PATH;
else if (!strncmp(udev_sysfs_path, EHCI1_SYSFS_PATH,
strlen(EHCI1_SYSFS_PATH))) /* usb1 */
ed_test_path = EHCI1_ED_TEST_PATH;
#elif defined(CONFIG_ARCH_SUN8IW10)
if (!strncmp(udev_sysfs_path, EHCI0_SYSFS_PATH,
strlen(EHCI0_SYSFS_PATH))) /* usb0 */
ed_test_path = EHCI0_ED_TEST_PATH;
#elif defined(CONFIG_ARCH_SUN8IW11)
if (!strncmp(udev_sysfs_path, EHCI0_SUN8IW11_SYSFS_PATH,
strlen(EHCI0_SUN8IW11_SYSFS_PATH))) /* usb0 */
ed_test_path = EHCI0_SUN8IW11_ED_TEST_PATH;
else if (!strncmp(udev_sysfs_path, EHCI1_SUN8IW11_SYSFS_PATH,
strlen(EHCI1_SUN8IW11_SYSFS_PATH))) /* usb1 */
ed_test_path = EHCI1_SUN8IW11_ED_TEST_PATH;
else if (!strncmp(udev_sysfs_path, EHCI2_SUN8IW11_SYSFS_PATH,
strlen(EHCI2_SUN8IW11_SYSFS_PATH))) /* usb2 */
ed_test_path = EHCI2_SUN8IW11_ED_TEST_PATH;
#endif
return ed_test_path;
}
static void sunxi_eh_set_ed_test(struct usb_interface *intf, char *test_mode)
{
struct file *filep;
loff_t pos;
char *ed_test_path = NULL;
dev_info(&intf->dev, "sunxi_eh_set_ed_test, test_mode:%s\n", test_mode);
ed_test_path = sunxi_eh_get_ed_test_path(intf);
if (ed_test_path == NULL) {
dev_err(&intf->dev, "ed test path is NULL\n");
return;
}
filep=filp_open(ed_test_path, O_RDWR, 0);
if (IS_ERR(filep)) {
dev_err(&intf->dev, "open status fail\n");
return;
}
pos = 0;
vfs_write(filep, (char __user *)test_mode, BUFLEN, &pos);
filp_close(filep, NULL);
return;
}
static int sunxi_eh_probe(struct usb_interface *intf,
const struct usb_device_id *id)
{
struct usb_device *udev;
udev = interface_to_usbdev(intf);
switch (id->idProduct) {
case USB_IF_TEST_SE0_NAK:
/*
* Upon enumerating VID 0x1A0A/PID 0x0101, the host�s
* downstream port shall enter a high-speed receive mode as
* described in Section 7.1.20 [USB2.0] and drives an SE0 until
* the controller is reset.
*/
sunxi_eh_set_ed_test(intf, "test_se0_nak");
break;
case USB_IF_TEST_J:
/*
* Upon enumerating VID 0x1A0A/PID 0x0102, the host�s
* downstream port shall enter a high-speed J state as
* described in Section 7.1.20 of [USB2.0] until the host
* controller is reset.
*/
sunxi_eh_set_ed_test(intf, "test_j_state");
break;
case USB_IF_TEST_K:
/*
* Upon enumerating VID 0x1A0A/PID 0x0103, the host�s
* downstream port shall enter a high-speed K state as
* described in Section 7.1.20 of [USB2.0] until the host
* controller is reset.
*/
sunxi_eh_set_ed_test(intf, "test_k_state");
break;
case USB_IF_TEST_PACKET:
/*
* Upon enumerating VID 0x1A0A/PID 0x0104, the host shall begin
* sending test packets as described in Section 7.1.20 of
* [USB2.0] until the host controller is reset.
*/
sunxi_eh_set_ed_test(intf, "test_pack");
break;
case USB_IF_HS_HOST_PORT_SUSPEND_RESUME:
/*
* Upon enumerating VID:0x1A0A/PID 0x0106, the host shall
* continue sending SOFs for 15 seconds, then suspend the
* downstream port under test per Section 7.1.7.6.1 of
* [USB2.0]. After 15 seconds has elapsed, the host shall issue
* a ResumeK state on the bus, then continue sending SOFs.
*/
dev_err(&intf->dev, "Unsupport hs host port suspend resume\n");
break;
case USB_IF_SINGLE_STEP_GET_DEV_DESC:
/*
* When the host discovers a device with VID:0x1A0A/PID 0x0107,
* the following steps are executed by the host and the device.
*
* 1. The host enumerates the test device, reads VID:0x1A0A/PID
* 0x0107, then completes its enumeration procedure.
*
* 2. The host issues SOFs for 15 seconds allowing the test
* engineer to raise the scope trigger just above the SOF
* voltage level.
*
* 3. The host sends a complete GetDescriptor(Device) transfer
*
* 4. The device ACKs the request, triggering the scope. (Note:
* SOFs continue.)
*/
dev_err(&intf->dev, "Unsupport single step get dev desc\n");
break;
case USB_IF_SINGLE_STEP_GET_DEV_DESC_DATA:
/*
* When the host discovers a device with VID:0x1A0A/PID 0x0108,
* the following steps are executed by the host and the device.
*
* 1. The host enumerates the test device and reads
* VID:0x1A0A/PID 0x0108, then completes its enumeration
* procedure
*
* 2. After enumerating the device, the host sends
* GetDescriptor(Device)
*
* 3. The device ACKs the request
*
* 4. The host issues SOFs for 15 seconds allowing the test
* engineer to raise the scope trigger just above the SOF
* voltage level
*
* 5. The host sends an IN packet
*
* 6. The device sends data in response to the IN packet,
* triggering the scope
*
* 7. The host sends an ACK in response to the data. (Note:
* SOFs may follow the IN transaction).
*/
dev_err(&intf->dev, "Unsupport single step get dev desc data\n");
break;
case USB_IF_PROTOCOL_OTG_ELECTRICAL_TEST:
/* OTG-A Device */
dev_err(&intf->dev, "Unsupport protocol otg electrical test\n");
break;
default:
dev_err(&intf->dev, "Unsupported device\n");
}
return 0;
}
static void sunxi_eh_disconnect(struct usb_interface *intf)
{
return;
}
static const struct usb_device_id sunxi_eh_id_table[] = {
{ USB_DEVICE(USB_IF_TEST_VID, USB_IF_TEST_SE0_NAK), },
{ USB_DEVICE(USB_IF_TEST_VID, USB_IF_TEST_J), },
{ USB_DEVICE(USB_IF_TEST_VID, USB_IF_TEST_K), },
{ USB_DEVICE(USB_IF_TEST_VID, USB_IF_TEST_PACKET), },
{ USB_DEVICE(USB_IF_TEST_VID, USB_IF_HS_HOST_PORT_SUSPEND_RESUME), },
{ USB_DEVICE(USB_IF_TEST_VID, USB_IF_SINGLE_STEP_GET_DEV_DESC), },
{ USB_DEVICE(USB_IF_TEST_VID, USB_IF_SINGLE_STEP_GET_DEV_DESC_DATA), },
{ USB_DEVICE(USB_IF_TEST_VID, USB_IF_PROTOCOL_OTG_ELECTRICAL_TEST), },
{ } /* Terminating Entry */
};
MODULE_DEVICE_TABLE(usb, sunxi_eh_id_table);
static struct usb_driver sunxi_eh_driver = {
.name = "sunxi-eh-test",
.probe = sunxi_eh_probe,
.disconnect = sunxi_eh_disconnect,
.id_table = sunxi_eh_id_table,
.supports_autosuspend = true,
};
module_usb_driver(sunxi_eh_driver);
MODULE_LICENSE("GPL v2");
MODULE_DESCRIPTION("USB SUNXI EH Test Driver");
|
def findMinMax(array):
min = array[0]
max = array[0]
for i in array:
if i < min:
min = i
elif i > max:
max = i
return (min, max)
inputArray = [2, 3, 4, 7, 8, 1, 9]
max, min = findMinMax(inputArray)
print("Maximum:", max) # 9
print("Minimum:", min) # 1 |
make nogdb V=0
|
<reponame>MckenzieSkullKid/GoldSiege<gh_stars>0
package com.pixelyeti.goldsiege.Util;
import org.bukkit.Bukkit;
import org.bukkit.World;
import org.bukkit.WorldCreator;
import java.io.*;
import java.util.ArrayList;
import java.util.Arrays;
/**
* Created by Callum on 30/10/2016.
*/
public class FileHandler {
public static void copyWorld(World originalWorld, String newWorldName) {
copyFileStructure(originalWorld.getWorldFolder(), new File(Bukkit.getWorldContainer(), newWorldName));
new WorldCreator(newWorldName).createWorld();
}
private static void copyFileStructure(File source, File target){
try {
ArrayList<String> ignore = new ArrayList<>(Arrays.asList("uid.dat", "session.lock"));
if(!ignore.contains(source.getName())) {
if(source.isDirectory()) {
if(!target.exists())
if (!target.mkdirs())
throw new IOException("Couldn't create world directory!");
String files[] = source.list();
for (String file : files) {
File srcFile = new File(source, file);
File destFile = new File(target, file);
copyFileStructure(srcFile, destFile);
}
} else {
InputStream in = new FileInputStream(source);
OutputStream out = new FileOutputStream(target);
byte[] buffer = new byte[1024];
int length;
while ((length = in.read(buffer)) > 0)
out.write(buffer, 0, length);
in.close();
out.close();
}
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
|
<reponame>timeo-app/timeo-api
package main
import (
"github.com/gofiber/fiber/v2"
"go-fiber-todos/router"
)
type ToDo struct {
Id int `json:"id"`
Name string `json:"name"`
Completed bool `json:"completed"`
}
var todos = []ToDo{
{Id: 1, Name: "Walk the Dog", Completed: false},
{Id: 2, Name: "Walk the Cat", Completed: false},
}
func main() {
app := fiber.New()
router.SetupRoutes(app)
app.Listen(":3000")
}
func GetToDos(ctx *fiber.Ctx){
ctx.Status(fiber.StatusOK).JSON(todos)
} |
"use strict";
global.sinon = require("sinon");
global.chai = require("chai");
global.should = global.chai.should();
var sinonChai = require("sinon-chai");
global.chai.use(sinonChai);
|
#! /bin/bash
SCRIPT_DIR=`dirname $0`
cd $SCRIPT_DIR
cat ../sql/insert.sql | xargs -I% docker exec clickhouse-master clickhouse-client --query=%
|
class Warp {
// Implementation of Warp class
}
function createWarp(opts: CreateWarpOpts): Warp {
const warp = new Warp();
// Configure the Warp instance using the provided options
warp.controllers = opts.controllers;
if (opts.middleware) {
warp.middleware = opts.middleware;
}
if (opts.authenticator) {
warp.authenticator = opts.authenticator;
}
if (opts.express) {
warp.express = opts.express;
}
if (opts.logger) {
warp.logger = opts.logger;
}
if (opts.errorHandler) {
warp.errorHandler = opts.errorHandler;
}
if (opts.basePath) {
warp.basePath = opts.basePath;
}
return warp;
} |
#!/bin/bash
#
# ElasTest backup utility
#
echo "FILE=/tmp/marika" >&1
echo "VAR1=value1" >&1
echo "This is an error" >&2
exit 2
|
<filename>mod.ts
export { denock } from "./src/index.ts";
export type { HTTPMethods, DenockOptions, Interceptor, RequestData } from "./src/type.ts";
|
from typing import List
class MigrationOperation:
def __init__(self, app: str, operation_type: str, field: str, details: str):
self.app = app
self.operation_type = operation_type
self.field = field
self.details = details
def process_migrations(dependencies: List[tuple], operations: List[MigrationOperation]) -> str:
migration_info = "Migrations to be performed:\n"
for dependency in dependencies:
app, migration = dependency
migration_info += f"- App: {app}, Migration: {migration}\n"
for operation in operations:
if operation.app == app:
migration_info += f" - {operation.operation_type}: {operation.field}, {operation.details}\n"
return migration_info |
import { handleActions } from 'redux-actions'
const initialState = {
detail: {},
}
export default handleActions({
NOTICE_PUSH (state, action) {
const {payload} = action
return Object.assign({pending: false}, state, {
detail: payload
})
},
}, initialState)
export const showError = (error) => (dispatch, getState) => {
console.error(error)
dispatch({
type: 'NOTICE_PUSH',
payload: {
type: 'error',
error
}
})
}
|
export const AUTHOR = "Soitora" as const;
export const URL = {
stylesheet: "https://soitora.com/SweClockers-Dark/sweclockers-dark.css",
info: "/forum/trad/1515628",
} as const;
|
#!/bin/bash
#default values for pyspark, spark-nlp, and SPARK_HOME
SPARKNLP="3.1.2"
PYSPARK="3.0.2"
SPARKHOME="spark-3.1.2-bin-hadoop2.7"
while getopts s:p: option
do
case "${option}"
in
s) SPARKNLP=${OPTARG};;
p) PYSPARK=${OPTARG};;
esac
done
echo "setup Kaggle for PySpark $PYSPARK and Spark NLP $SPARKNLP"
apt-get install -y openjdk-8-jdk-headless -qq > /dev/null
if [[ "$PYSPARK" == "3.1"* ]]; then
wget -q "https://downloads.apache.org/spark/spark-3.1.2/spark-3.1.2-bin-hadoop2.7.tgz" > /dev/null
tar -xvf spark-3.1.2-bin-hadoop2.7.tgz > /dev/null
SPARKHOME="/content/spark-3.1.2-bin-hadoop2.7"
elif [[ "$PYSPARK" == "3.0"* ]]; then
wget -q "https://downloads.apache.org/spark/spark-3.0.3/spark-3.0.3-bin-hadoop2.7.tgz" > /dev/null
tar -xvf spark-3.0.3-bin-hadoop2.7.tgz > /dev/null
SPARKHOME="/content/spark-3.0.3-bin-hadoop2.7"
elif [[ "$PYSPARK" == "2"* ]]; then
wget -q "https://downloads.apache.org/spark/spark-2.4.8/spark-2.4.8-bin-hadoop2.7.tgz" > /dev/null
tar -xvf spark-2.4.8-bin-hadoop2.8.tgz > /dev/null
SPARKHOME="/content/spark-2.4.8-bin-hadoop2.7"
else
wget -q "https://downloads.apache.org/spark/spark-3.1.2/spark-3.1.2-bin-hadoop2.7.tgz" > /dev/null
tar -xvf spark-3.1.2-bin-hadoop2.7.tgz > /dev/null
SPARKHOME="/content/spark-3.1.2-bin-hadoop2.7"
fi
export SPARK_HOME=$SPARKHOME
export JAVA_HOME="/usr/lib/jvm/java-8-openjdk-amd64"
# Install pyspark spark-nlp
! pip install --upgrade -q pyspark==$PYSPARK spark-nlp==$SPARKNLP findspark
|
public static int[] treeToArray(Node root) {
ArrayList<Integer> arr = new ArrayList<>();
treeToArrayRecur(arr, root);
int[] array = arr.stream().mapToInt(i->i).toArray();
return array;
}
public static void treeToArrayRecur(ArrayList<Integer> arr, Node node) {
if (node == null) {
arr.add(null);
return;
}
arr.add(node.data);
treeToArrayRecur(arr, node.left);
treeToArrayRecur(arr, node.right);
} |
<reponame>ooooo-youwillsee/leetcode
//
// Created by ooooo on 2020/4/3.
//
#ifndef CPP_047__SOLUTION1_H_
#define CPP_047__SOLUTION1_H_
#include <iostream>
#include <vector>
using namespace std;
/**
* dfs timeout
*/
class Solution {
public:
void dfs(int i, int j, int sum) {
if (i >= m || j >= n) return;
sum += grid[i][j];
if (i == m - 1 && j == n - 1) {
ans = max(ans, sum);
return;
}
dfs(i, j + 1, sum);
dfs(i + 1, j, sum);
}
vector<vector<int>> grid;
int m = 0, n = 0, ans = 0;
int maxValue(vector<vector<int>> &grid) {
if (grid.empty()) return 0;
this->grid = grid;
this->m = grid.size();
this->n = grid[0].size();
dfs(0, 0, 0);
return ans;
}
};
#endif //CPP_047__SOLUTION1_H_
|
package io.opensphere.core.preferences;
import java.text.SimpleDateFormat;
/**
* A utility method to get list tool preferences.
*
*/
public final class ListToolPreferences
{
/** The Constant LIST_TOOL_TIME_PRECISION_DIGITS. */
public static final String LIST_TOOL_TIME_PRECISION_DIGITS = "ListToolTimePrecision";
/** The Constant SNAP_TO_HIGHLIGHTED_PREFERENCE. */
public static final String SNAP_TO_HIGHLIGHTED_PREFERENCE = "SNAP_TO_HIGHLIGHTED_PREFERENCE";
/** The Constant SWITCH_FOCUS_ON_CURRENT_TYPE_CHANGE_PREFERENCE. */
public static final String SWITCH_FOCUS_ON_CURRENT_TYPE_CHANGE_PREFERENCE = "SWITCH_FOCUS_ON_CURRENT_TYPE_CHANGE_PREFERENCE";
/**
* Gets the simple date format for precision.
*
* @param precision the precision
* @return the simple date format for precision
*/
public static SimpleDateFormat getSimpleDateFormatForPrecision(int precision)
{
return new SimpleDateFormat("yyyy-MM-dd " + getTimeFormat(precision));
}
/**
* Gets the simple time format for precision.
*
* @param precision the precision
* @return the simple date format for precision
*/
public static SimpleDateFormat getSimpleTimeFormatForPrecision(int precision)
{
return new SimpleDateFormat(getTimeFormat(precision));
}
/**
* Gets the simple time format for precision.
*
* @param precision the precision
* @return the simple date format for precision
*/
private static String getTimeFormat(int precision)
{
String timeFormat;
switch (precision)
{
case 0:
timeFormat = "HH:mm:ss";
break;
case 1:
timeFormat = "HH:mm:ss.S";
break;
case 2:
timeFormat = "HH:mm:ss.SS";
break;
case 3:
timeFormat = "HH:mm:ss.SSS";
break;
default:
timeFormat = "HH:mm:ss";
break;
}
return timeFormat;
}
/**
* Not constructible.
*/
private ListToolPreferences()
{
// Don't allow construction.
}
}
|
// Render Google Sign-in button
function renderButton() {
gapi.signin2.render('gSignIn', {
'scope': 'profile email',
'width': 240,
'height': 50,
'longtitle': true,
'theme': 'dark',
'onsuccess': onSuccess,
'onfailure': onFailure
});
}
// Sign-in success callback
function onSuccess(googleUser) {
// Get the Google profile data (basic)
//var profile = googleUser.getBasicProfile();
// Retrieve the Google account data
gapi.client.load('oauth2', 'v2', function () {
var request = gapi.client.oauth2.userinfo.get({
'userId': 'me'
});
request.execute(function (resp) {
console.log(resp);
// Display the user details
var profileHTML = '<h3>Welcome '+resp.given_name+'! <a href="javascript:void(0);" onclick="signOut();">Sign out</a></h3>';
profileHTML += '<img src="'+resp.picture+'"/><p><b>Google ID: </b>'+resp.id+'</p><p><b>Name: </b>'+resp.name+'</p><p><b>Email: </b>'+resp.email+'</p><p><b>Gender: </b>'+resp.gender+'</p><p><b>Locale: </b>'+resp.locale+'</p><p><b>Google Profile:</b> <a target="_blank" href="'+resp.link+'">click to view profile</a></p>';
document.getElementsByClassName("userContent")[0].innerHTML = profileHTML;
document.getElementById("gSignIn").style.display = "none";
document.getElementsByClassName("userContent")[0].style.display = "block";
});
});
}
// Sign-in failure callback
function onFailure(error) {
alert(error);
}
// Sign out the user
function signOut() {
var auth2 = gapi.auth2.getAuthInstance();
auth2.signOut().then(function () {
document.getElementsByClassName("userContent")[0].innerHTML = '';
document.getElementsByClassName("userContent")[0].style.display = "none";
document.getElementById("gSignIn").style.display = "block";
});
auth2.disconnect();
}
//function ongoogleSignIn(googleUser) {
// var profile = googleUser.getBasicProfile();
// console.log(profile);
// console.log('Name: ' + profile.getName());
// console.log('Email: ' + profile.getEmail());
//}
|
<filename>src/main/java/de/thro/inf/prg3/a11/App.java
package de.thro.inf.prg3.a11;
import de.thro.inf.prg3.a11.openmensa.OpenMensaAPI;
import de.thro.inf.prg3.a11.openmensa.OpenMensaAPIService;
import de.thro.inf.prg3.a11.openmensa.model.Canteen;
import de.thro.inf.prg3.a11.openmensa.model.Meal;
import de.thro.inf.prg3.a11.openmensa.model.PageInfo;
import de.thro.inf.prg3.a11.utils.ListUtil;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.stream.IntStream;
/**
* @author <NAME>
* Created on 12/16/17.
*/
public class App {
private static final String OPEN_MENSA_DATE_FORMAT = "yyyy-MM-dd";
private static final SimpleDateFormat dateFormat = new SimpleDateFormat(OPEN_MENSA_DATE_FORMAT, Locale.getDefault());
private static final Scanner inputScanner = new Scanner(System.in);
private static final OpenMensaAPI openMensaAPI = OpenMensaAPIService.getInstance().getOpenMensaAPI();
private static final Calendar currentDate = Calendar.getInstance();
private static int currentCanteenId = -1;
public static void main(String[] args) throws ExecutionException, InterruptedException {
MenuSelection selection;
/* loop while true to get back to the menu every time an action was performed */
do {
selection = menu();
switch (selection) {
case SHOW_CANTEENS:
printCanteens();
break;
case SET_CANTEEN:
readCanteen();
break;
case SHOW_MEALS:
printMeals();
break;
case SET_DATE:
readDate();
break;
case QUIT:
System.exit(0);
}
} while (true);
}
/**
* Retrieve all canteens synchronously
*
* @throws ExecutionException thrown by the `get` call of `Future`
* @throws InterruptedException thrown by the `get` call of `Future`
*/
private static void printCanteens() throws ExecutionException, InterruptedException {
System.out.print("Fetching canteens [");
openMensaAPI.getCanteens().thenApply(response -> {
System.out.print("#");
PageInfo pageInfo = PageInfo.extractFromResponse(response);
List<Canteen> allCanteens;
/* unwrapping the response body */
if (response.body() == null) {
/* fallback to empty list if response body was empty */
allCanteens = new LinkedList<>();
} else {
allCanteens = response.body();
}
/* declare variable to be able to use `thenCombine` */
CompletableFuture<List<Canteen>> remainingCanteensFuture = null;
for (int i = 2; i <= pageInfo.getTotalCountOfPages(); i++) {
System.out.print("#");
/* if we're fetching the first page the future is null and has to be assigned */
if (remainingCanteensFuture == null) {
remainingCanteensFuture = openMensaAPI.getCanteens(i);
} else {
/* from the second page on the futures are combined
* to combine a future with another you have to provide a function to combine the results */
remainingCanteensFuture = remainingCanteensFuture.thenCombine(openMensaAPI.getCanteens(i), ListUtil::mergeLists);
}
}
try {
/* collect all retrieved in one list */
allCanteens.addAll(remainingCanteensFuture.get());
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
}
System.out.println("]");
/* sort the retrieved canteens by their ids and return them */
allCanteens.sort(Comparator.comparing(Canteen::getId));
return allCanteens;
}).thenAccept(canteens -> {
/* print all canteens to STDOUT */
for (Canteen c : canteens) {
System.out.println(c);
}
}).get(); /* block the thread by calling `get` to ensure that all results are retrieved when the method is completed */
}
/**
* Retrieve all meals served at the currently selected canteen at the currently selected date
*
* @throws ExecutionException thrown by the `get` call of `Future`
* @throws InterruptedException thrown by the `get` call of `Future`
*/
private static void printMeals() throws ExecutionException, InterruptedException {
if (currentCanteenId < 0) {
System.out.println("No canteen selected.");
return;
}
final String dateString = dateFormat.format(currentDate.getTime());
/* fetch the state of the canteen */
openMensaAPI.getCanteenState(currentCanteenId, dateString).thenApply(state -> {
/* if canteen is open fetch the meals */
if (state != null && !state.isClosed()) {
try {
return openMensaAPI.getMeals(currentCanteenId, dateString).get();
} catch (InterruptedException | ExecutionException e) {
}
} else {
/* if canteen is not open - print a message and return */
System.out.println(String.format("Seems like the canteen has closed on this date: %s", dateFormat.format(currentDate.getTime())));
}
return new LinkedList<Meal>();
}).thenAccept(meals -> {
/* print the retrieved meals to the STDOUT */
for (Meal m : meals) {
System.out.println(m);
}
}).get(); /* block the thread by calling `get` to ensure that all results are retrieved when the method is completed */
}
/**
* Utility method to select a canteen
*/
private static void readCanteen() {
/* typical input reading pattern */
boolean readCanteenId = false;
do {
try {
System.out.println("Enter canteen id:");
currentCanteenId = inputScanner.nextInt();
readCanteenId = true;
} catch (Exception e) {
System.out.println("Sorry could not read the canteen id");
}
} while (!readCanteenId);
}
/**
* Utility method to read a date and update the calendar
*/
private static void readDate() {
/* typical input reading pattern */
boolean readDate = false;
do {
try {
System.out.println("Pleae enter date in the format yyyy-mm-dd:");
Date d = dateFormat.parse(inputScanner.next());
currentDate.setTime(d);
readDate = true;
} catch (ParseException p) {
System.out.println("Sorry, the entered date could not be parsed.");
}
} while (!readDate);
}
/**
* Utility method to print menu and read the user selection
*
* @return user selection as MenuSelection
*/
private static MenuSelection menu() {
IntStream.range(0, 20).forEach(i -> System.out.print("#"));
System.out.println();
System.out.println("1) Show canteens");
System.out.println("2) Set canteen");
System.out.println("3) Show meals");
System.out.println("4) Set date");
System.out.println("5) Quit");
IntStream.range(0, 20).forEach(i -> System.out.print("#"));
System.out.println();
switch (inputScanner.nextInt()) {
case 1:
return MenuSelection.SHOW_CANTEENS;
case 2:
return MenuSelection.SET_CANTEEN;
case 3:
return MenuSelection.SHOW_MEALS;
case 4:
return MenuSelection.SET_DATE;
default:
return MenuSelection.QUIT;
}
}
}
|
<gh_stars>0
# Copyright 2016-present CERN – European Organization for Nuclear Research
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest.mock import Mock
import unittest
from urllib.parse import urljoin
from qf_lib.data_providers.bloomberg.exceptions import BloombergError
from qf_lib.data_providers.bloomberg_beap_hapi.bloomberg_beap_hapi_request_provider import BloombergBeapHapiRequestsProvider
class TestBloombergBeapHapiRequestProvider(unittest.TestCase):
def setUp(self):
self.session_mock = Mock()
self.post_response = Mock()
self.session_mock.post.return_value = self.post_response
self.address_url = '/eap/catalogs/address_url_id/'
self.request_id = 'sOmwhEReOveRTHeRainBOW'
self.host = 'https://api.bloomberg.com'
self.account_url = urljoin(self.host, self.address_url)
self.trigger_url = urljoin(self.host, '{}triggers/ctaAdhocTrigger/'.format(self.address_url))
def test_create_request__unknown_get_response(self):
self.session_mock.get.return_value.status_code = 404
provider = BloombergBeapHapiRequestsProvider(self.host, self.session_mock, self.account_url, self.trigger_url)
self.assertRaises(BloombergError, provider.create_request, self.request_id, 'some_universe_url', 'some_field_list_url', True)
def test_create_request__unknown_post_response(self):
self.session_mock.get.return_value.status_code = 404
self.post_response.status_code = 200
provider = BloombergBeapHapiRequestsProvider(self.host, self.session_mock, self.account_url, self.trigger_url)
self.assertRaises(BloombergError, provider.create_request, self.request_id, 'some_universe_url', 'some_field_list_url', True)
|
using System;
public class VowelCount
{
public static void Main(string[] args)
{
string str = "Hello";
int vowels = 0, consonants = 0;
str = str.ToLower();
for(int i = 0; i < str.Length; i++)
{
if(str[i] == 'a' || str[i] == 'e' || str[i] == 'i' || str[i] == 'o' || str[i] == 'u')
{
vowels++;
}
else if((str[i] >= 'a' && str[i] <= 'z'))
{
consonants++;
}
}
Console.WriteLine("Vowels: " + vowels);
Console.WriteLine("Consonants: " + consonants);
}
} |
<gh_stars>0
package server
import (
"crypto/rand"
"encoding/base64"
"fmt"
"io/ioutil"
"net/http"
"strings"
"testing"
"github.com/fnproject/fn/api/datastore"
"github.com/fnproject/fn/api/logs"
"github.com/fnproject/fn/api/models"
"github.com/fnproject/fn/api/mqs"
)
func TestBadRequests(t *testing.T) {
buf := setLogBuffer()
app := &models.App{ID: "app_id", Name: "myapp", Config: models.Config{}}
fn := &models.Fn{ID: "fn_id", AppID: "app_id"}
ds := datastore.NewMockInit(
[]*models.App{app},
[]*models.Fn{fn},
)
rnr, cancel := testRunner(t, ds)
defer cancel()
logDB := logs.NewMock()
srv := testServer(ds, &mqs.Mock{}, logDB, rnr, ServerTypeFull)
for i, test := range []struct {
path string
contentType string
body string
expectedCode int
expectedError error
}{
{"/invoke/notfn", "", "", http.StatusNotFound, models.ErrFnsNotFound},
} {
request := createRequest(t, http.MethodPost, test.path, strings.NewReader(test.body))
request.Header = map[string][]string{"Content-Type": []string{test.contentType}}
_, rec := routerRequest2(t, srv.Router, request)
if rec.Code != test.expectedCode {
t.Log(buf.String())
t.Fatalf("Test %d: Expected status code for path %s to be %d but was %d",
i, test.path, test.expectedCode, rec.Code)
}
if test.expectedError != nil {
resp := getErrorResponse(t, rec)
if !strings.Contains(resp.Message, test.expectedError.Error()) {
t.Log(buf.String())
t.Errorf("Test %d: Expected error message to have `%s`, but got `%s`",
i, test.expectedError.Error(), resp.Message)
}
}
}
}
func TestFnInvokeRunnerExecEmptyBody(t *testing.T) {
buf := setLogBuffer()
isFailure := false
defer func() {
if isFailure {
t.Log(buf.String())
}
}()
rCfg := map[string]string{"ENABLE_HEADER": "yes", "ENABLE_FOOTER": "yes"} // enable container start/end header/footer
rImg := "fnproject/fn-test-utils"
app := &models.App{ID: "app_id", Name: "soup"}
f1 := &models.Fn{ID: "hothttpstream", Name: "hothttpstream", AppID: app.ID, Image: rImg, ResourceConfig: models.ResourceConfig{Memory: 64, Timeout: 10, IdleTimeout: 20}, Config: rCfg}
ds := datastore.NewMockInit(
[]*models.App{app},
[]*models.Fn{f1},
)
ls := logs.NewMock()
rnr, cancelrnr := testRunner(t, ds, ls)
defer cancelrnr()
srv := testServer(ds, &mqs.Mock{}, ls, rnr, ServerTypeFull)
emptyBody := `{"echoContent": "_TRX_ID_", "isDebug": true, "isEmptyBody": true}`
// Test hot cases twice to rule out hot-containers corrupting next request.
testCases := []struct {
path string
}{
{"/invoke/hothttpstream"},
{"/invoke/hothttpstream"},
}
for i, test := range testCases {
t.Run(fmt.Sprintf("%d_%s", i, strings.Replace(test.path, "/", "_", -1)), func(t *testing.T) {
trx := fmt.Sprintf("_trx_%d_", i)
body := strings.NewReader(strings.Replace(emptyBody, "_TRX_ID_", trx, 1))
_, rec := routerRequest(t, srv.Router, http.MethodPost, test.path, body)
respBytes, _ := ioutil.ReadAll(rec.Body)
respBody := string(respBytes)
maxBody := len(respBody)
if maxBody > 1024 {
maxBody = 1024
}
if rec.Code != http.StatusOK {
isFailure = true
t.Errorf("Test %d: Expected status code to be %d but was %d. body: %s",
i, http.StatusOK, rec.Code, respBody[:maxBody])
} else if len(respBytes) != 0 {
isFailure = true
t.Errorf("Test %d: Expected empty body but got %d. body: %s",
i, len(respBytes), respBody[:maxBody])
}
})
}
}
func TestFnInvokeRunnerExecution(t *testing.T) {
buf := setLogBuffer()
isFailure := false
tweaker := envTweaker("FN_MAX_RESPONSE_SIZE", "2048")
defer tweaker()
// Log once after we are done, flow of events are important (hot containers, idle timeout, etc.)
// for figuring out why things failed.
defer func() {
if isFailure {
t.Log(buf.String())
}
}()
rCfg := map[string]string{"ENABLE_HEADER": "yes", "ENABLE_FOOTER": "yes"} // enable container start/end header/footer
rImg := "fnproject/fn-test-utils"
rImgBs1 := "fnproject/imagethatdoesnotexist"
rImgBs2 := "localhost:5050/fnproject/imagethatdoesnotexist"
app := &models.App{ID: "app_id", Name: "myapp"}
models.MaxMemory = uint64(1024 * 1024 * 1024) // 1024 TB
hugeMem := uint64(models.MaxMemory - 1)
dneFn := &models.Fn{ID: "dne_fn_id", Name: "dne_fn", AppID: app.ID, Image: rImgBs1, ResourceConfig: models.ResourceConfig{Memory: 64, Timeout: 30, IdleTimeout: 30}, Config: rCfg}
dneRegistryFn := &models.Fn{ID: "dnereg_fn_id", Name: "dnereg_fn", AppID: app.ID, Image: rImgBs2, ResourceConfig: models.ResourceConfig{Memory: 64, Timeout: 30, IdleTimeout: 30}, Config: rCfg}
httpStreamFn := &models.Fn{ID: "http_stream_fn_id", Name: "http_stream_fn", AppID: app.ID, Image: rImg, ResourceConfig: models.ResourceConfig{Memory: 64, Timeout: 30, IdleTimeout: 30}, Config: rCfg}
bigMemHotFn := &models.Fn{ID: "bigmem", Name: "bigmemhot", AppID: app.ID, Image: "fnproject/fn-test-utils", ResourceConfig: models.ResourceConfig{Memory: hugeMem, Timeout: 4, IdleTimeout: 30}}
// TODO consider removing this instead of satisfying this test. it was here to help user experience during a transitional time in our lives where we decided to cut all our hair off and we hope you'll forget it.
// TODO also note that fnproject/hello should get killed whenever you do that. it is only here for the purposes of failing.
oldDefaultFn := &models.Fn{ID: "fail_fn", Name: "fail_fn", AppID: app.ID, Image: "fnproject/hello", ResourceConfig: models.ResourceConfig{Memory: 64, Timeout: 30, IdleTimeout: 30}, Config: rCfg}
ds := datastore.NewMockInit(
[]*models.App{app},
[]*models.Fn{dneFn, dneRegistryFn, httpStreamFn, oldDefaultFn, bigMemHotFn},
)
ls := logs.NewMock()
rnr, cancelrnr := testRunner(t, ds, ls)
defer cancelrnr()
srv := testServer(ds, &mqs.Mock{}, ls, rnr, ServerTypeFull, LimitRequestBody(32256))
expHeaders := map[string][]string{"Content-Type": {"application/json; charset=utf-8"}}
expCTHeaders := map[string][]string{"Content-Type": {"foo/bar"}}
// Checking for EndOfLogs currently depends on scheduling of go-routines (in docker/containerd) that process stderr & stdout.
// Therefore, not testing for EndOfLogs for hot containers (which has complex I/O processing) anymore.
multiLogExpectHot := []string{"BeginOfLogs" /*, "EndOfLogs" */}
crasher := `{"echoContent": "_TRX_ID_", "isDebug": true, "isCrash": true}` // crash container
oomer := `{"echoContent": "_TRX_ID_", "isDebug": true, "allocateMemory": 120000000}` // ask for 120MB
// XXX(reed): do we have an invalid http response? no right?
ok := `{"echoContent": "_TRX_ID_", "responseContentType": "application/json; charset=utf-8", "isDebug": true}` // good response / ok
respTypeLie := `{"echoContent": "_TRX_ID_", "responseContentType": "foo/bar", "isDebug": true}` // Content-Type: foo/bar
// sleep between logs and with debug enabled, fn-test-utils will log header/footer below:
multiLog := `{"echoContent": "_TRX_ID_", "sleepTime": 1000, "isDebug": true}`
//over sized request
var bigbufa [32257]byte
rand.Read(bigbufa[:])
bigbuf := base64.StdEncoding.EncodeToString(bigbufa[:]) // this will be > bigbufa, but json compatible
bigoutput := `{"echoContent": "_TRX_ID_", "isDebug": true, "trailerRepeat": 1000}` // 1000 trailers to exceed 2K
smalloutput := `{"echoContent": "_TRX_ID_", "isDebug": true, "responseContentType":"application/json; charset=utf-8", "trailerRepeat": 1}` // 1 trailer < 2K
testCases := []struct {
path string
body string
method string
expectedCode int
expectedHeaders map[string][]string
expectedErrSubStr string
expectedLogsSubStr []string
}{
{"/invoke/http_stream_fn_id", ok, http.MethodPost, http.StatusOK, expHeaders, "", nil},
// NOTE: we can't test bad response framing anymore easily (eg invalid http response), should we even worry about it?
{"/invoke/http_stream_fn_id", respTypeLie, http.MethodPost, http.StatusOK, expCTHeaders, "", nil},
{"/invoke/http_stream_fn_id", crasher, http.MethodPost, http.StatusBadGateway, expHeaders, "error receiving function response", nil},
// XXX(reed): we could stop buffering function responses so that we can stream things?
{"/invoke/http_stream_fn_id", bigoutput, http.MethodPost, http.StatusBadGateway, nil, "function response too large", nil},
{"/invoke/http_stream_fn_id", smalloutput, http.MethodPost, http.StatusOK, expHeaders, "", nil},
// XXX(reed): meh we really should try to get oom out, but maybe it's better left to the logs?
{"/invoke/http_stream_fn_id", oomer, http.MethodPost, http.StatusBadGateway, nil, "error receiving function response", nil},
{"/invoke/http_stream_fn_id", bigbuf, http.MethodPost, http.StatusRequestEntityTooLarge, nil, "", nil},
{"/invoke/dne_fn_id", ``, http.MethodPost, http.StatusNotFound, nil, "pull access denied", nil},
{"/invoke/dnereg_fn_id", ``, http.MethodPost, http.StatusBadGateway, nil, "connection refused", nil},
// XXX(reed): what are these?
{"/invoke/http_stream_fn_id", multiLog, http.MethodPost, http.StatusOK, nil, "", multiLogExpectHot},
// TODO consider removing this, see comment above the image
{"/invoke/fail_fn", ok, http.MethodPost, http.StatusBadGateway, nil, "container failed to initialize", nil},
{"/invoke/fn_id", ok, http.MethodPut, http.StatusMethodNotAllowed, nil, "Method not allowed", nil},
{"/invoke/bigmem", ok, http.MethodPost, http.StatusBadRequest, nil, "cannot be allocated", nil},
}
callIds := make([]string, len(testCases))
for i, test := range testCases {
t.Run(fmt.Sprintf("Test_%d_%s", i, strings.Replace(test.path, "/", "_", -1)), func(t *testing.T) {
trx := fmt.Sprintf("_trx_%d_", i)
body := strings.NewReader(strings.Replace(test.body, "_TRX_ID_", trx, 1))
_, rec := routerRequest(t, srv.Router, test.method, test.path, body)
respBytes, _ := ioutil.ReadAll(rec.Body)
respBody := string(respBytes)
maxBody := len(respBody)
if maxBody > 1024 {
maxBody = 1024
}
callIds[i] = rec.Header().Get("Fn-Call-Id")
cid := callIds[i]
if rec.Code == 200 && cid == "" {
isFailure = true
t.Errorf("Test %d call_id %s: Expected successful call id to be non-empty but was %s. body: %s",
i, cid, cid, respBody[:maxBody])
}
if rec.Code != test.expectedCode {
isFailure = true
t.Errorf("Test %d call_id %s: Expected status code to be %d but was %d. body: %s",
i, cid, test.expectedCode, rec.Code, respBody[:maxBody])
}
if rec.Code == http.StatusOK && !strings.Contains(respBody, trx) {
isFailure = true
t.Errorf("Test %d call_id %s: Expected response to include %s but got body: %s",
i, cid, trx, respBody[:maxBody])
}
if test.expectedErrSubStr != "" && !strings.Contains(respBody, test.expectedErrSubStr) {
isFailure = true
t.Errorf("Test %d call_id %s: Expected response to include %s but got body: %s",
i, cid, test.expectedErrSubStr, respBody[:maxBody])
}
if test.expectedHeaders != nil {
for name, header := range test.expectedHeaders {
if header[0] != rec.Header().Get(name) {
isFailure = true
t.Errorf("Test %d call_id %s: Expected header `%s` to be %s but was %s. body: %s",
i, cid, name, header[0], rec.Header().Get(name), respBody)
}
}
}
})
}
for i, test := range testCases {
if test.expectedLogsSubStr != nil {
if !checkLogs(t, i, ls, callIds[i], test.expectedLogsSubStr) {
isFailure = true
}
}
}
}
func TestInvokeRunnerTimeout(t *testing.T) {
buf := setLogBuffer()
isFailure := false
// Log once after we are done, flow of events are important (hot containers, idle timeout, etc.)
// for figuring out why things failed.
defer func() {
if isFailure {
t.Log(buf.String())
}
}()
app := &models.App{ID: "app_id", Name: "myapp", Config: models.Config{}}
httpStreamFn := &models.Fn{ID: "http-stream", Name: "http-stream", AppID: app.ID, Image: "fnproject/fn-test-utils", ResourceConfig: models.ResourceConfig{Memory: 128, Timeout: 4, IdleTimeout: 30}}
ds := datastore.NewMockInit(
[]*models.App{app},
[]*models.Fn{httpStreamFn},
)
fnl := logs.NewMock()
rnr, cancelrnr := testRunner(t, ds, fnl)
defer cancelrnr()
srv := testServer(ds, &mqs.Mock{}, fnl, rnr, ServerTypeFull)
for i, test := range []struct {
path string
body string
method string
expectedCode int
expectedHeaders map[string][]string
}{
{"/invoke/http-stream", `{"echoContent": "_TRX_ID_", "sleepTime": 5000, "isDebug": true}`, http.MethodPost, http.StatusGatewayTimeout, nil},
{"/invoke/http-stream", `{"echoContent": "_TRX_ID_", "sleepTime": 0, "isDebug": true}`, http.MethodPost, http.StatusOK, nil},
} {
t.Run(fmt.Sprintf("%d_%s", i, strings.Replace(test.path, "/", "_", -1)), func(t *testing.T) {
trx := fmt.Sprintf("_trx_%d_", i)
body := strings.NewReader(strings.Replace(test.body, "_TRX_ID_", trx, 1))
_, rec := routerRequest(t, srv.Router, test.method, test.path, body)
respBytes, _ := ioutil.ReadAll(rec.Body)
respBody := string(respBytes)
maxBody := len(respBody)
if maxBody > 1024 {
maxBody = 1024
}
cid := rec.Header().Get("Fn-Call-Id")
if cid == "" {
isFailure = true
t.Errorf("Test %d call_id %s: Expected call id to be non-empty but was %s. body: %s",
i, cid, cid, respBody[:maxBody])
}
if rec.Code != test.expectedCode {
isFailure = true
t.Errorf("Test %d: Expected status code to be %d but was %d body: %#v",
i, test.expectedCode, rec.Code, respBody[:maxBody])
}
if rec.Code == http.StatusOK && !strings.Contains(respBody, trx) {
isFailure = true
t.Errorf("Test %d: Expected response to include %s but got body: %s",
i, trx, respBody[:maxBody])
}
if test.expectedHeaders != nil {
for name, header := range test.expectedHeaders {
if header[0] != rec.Header().Get(name) {
isFailure = true
t.Errorf("Test %d: Expected header `%s` to be %s but was %s body: %#v",
i, name, header[0], rec.Header().Get(name), respBody[:maxBody])
}
}
}
})
}
}
// Minimal test that checks the possibility of invoking concurrent hot sync functions.
func TestInvokeRunnerMinimalConcurrentHotSync(t *testing.T) {
buf := setLogBuffer()
app := &models.App{ID: "app_id", Name: "myapp", Config: models.Config{}}
fn := &models.Fn{ID: "fn_id", AppID: app.ID, Name: "myfn", Image: "fnproject/fn-test-utils", ResourceConfig: models.ResourceConfig{Memory: 128, Timeout: 30, IdleTimeout: 5}}
ds := datastore.NewMockInit(
[]*models.App{app},
[]*models.Fn{fn},
)
fnl := logs.NewMock()
rnr, cancelrnr := testRunner(t, ds, fnl)
defer cancelrnr()
srv := testServer(ds, &mqs.Mock{}, fnl, rnr, ServerTypeFull)
for i, test := range []struct {
path string
body string
method string
expectedCode int
expectedHeaders map[string][]string
}{
{"/invoke/fn_id", `{"sleepTime": 100, "isDebug": true}`, http.MethodPost, http.StatusOK, nil},
} {
errs := make(chan error)
numCalls := 4
for k := 0; k < numCalls; k++ {
go func() {
body := strings.NewReader(test.body)
_, rec := routerRequest(t, srv.Router, test.method, test.path, body)
if rec.Code != test.expectedCode {
t.Log(buf.String())
errs <- fmt.Errorf("Test %d: Expected status code to be %d but was %d body: %#v",
i, test.expectedCode, rec.Code, rec.Body.String())
return
}
if test.expectedHeaders == nil {
errs <- nil
return
}
for name, header := range test.expectedHeaders {
if header[0] != rec.Header().Get(name) {
t.Log(buf.String())
errs <- fmt.Errorf("Test %d: Expected header `%s` to be %s but was %s body: %#v",
i, name, header[0], rec.Header().Get(name), rec.Body.String())
return
}
}
errs <- nil
}()
}
for k := 0; k < numCalls; k++ {
err := <-errs
if err != nil {
t.Errorf("%v", err)
}
}
}
}
|
class CarData:
def __init__(self):
self.cars = {}
def add_car(self, name, color, price):
car = {
"name": name,
"color": color,
"price": price
}
self.cars[name] = car
def get_car(self, name):
if name in self.cars:
return self.cars[name]
return None |
#[derive(Debug, PartialEq)]
struct NuObject {
column: String,
}
fn convert_to_nu_object(named_column_expression: &str) -> Option<NuObject> {
if named_column_expression.is_empty() {
return None;
}
Some(NuObject {
column: named_column_expression.to_string(),
})
} |
#!/bin/bash
# Copyright 2016 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
KUBE_ROOT=$(dirname "${BASH_SOURCE}")/..
source "${KUBE_ROOT}/hack/lib/init.sh"
kube::golang::setup_env
make -C "${KUBE_ROOT}" WHAT=cmd/kube-apiserver
apiserver=$(kube::util::find-binary "kube-apiserver")
SPECROOT="${KUBE_ROOT}/api/openapi-spec"
TMP_SPECROOT="${KUBE_ROOT}/_tmp/openapi-spec"
_tmp="${KUBE_ROOT}/_tmp"
mkdir -p "${_tmp}"
cp -a "${SPECROOT}" "${TMP_SPECROOT}"
trap "cp -a ${TMP_SPECROOT} ${SPECROOT}/..; rm -rf ${_tmp}" EXIT SIGINT
rm ${SPECROOT}/*
cp ${TMP_SPECROOT}/BUILD ${SPECROOT}/BUILD
"${KUBE_ROOT}/hack/update-openapi-spec.sh"
echo "diffing ${SPECROOT} against freshly generated openapi spec"
ret=0
diff -Naupr -I 'Auto generated by' "${SPECROOT}" "${TMP_SPECROOT}" || ret=$?
if [[ $ret -eq 0 ]]
then
echo "${SPECROOT} up to date."
else
echo "${SPECROOT} is out of date. Please run hack/update-openapi-spec.sh"
exit 1
fi
# ex: ts=2 sw=2 et filetype=sh
|
#!/bin/bash
echo "This command print \"def_test\" function:"
echo "$ silentbob --tags def_test"
echo
. ./head.sh
cd $WORKDIR
silentbob -c
silentbob --tags def_test
|
<reponame>hhuchzh/Bus_Project
import service from '@/utils/request'
// @Tags GpsInfo
// @Summary 创建GpsInfo
// @Security ApiKeyAuth
// @accept application/json
// @Produce application/json
// @Param data body model.GpsInfo true "创建GpsInfo"
// @Success 200 {string} string "{"success":true,"data":{},"msg":"获取成功"}"
// @Router /gpsInfo/createGpsInfo [post]
export const createGpsInfo = (data) => {
return service({
url: '/gpsInfo/createGpsInfo',
method: 'post',
data
})
}
// @Tags GpsInfo
// @Summary 删除GpsInfo
// @Security ApiKeyAuth
// @accept application/json
// @Produce application/json
// @Param data body model.GpsInfo true "删除GpsInfo"
// @Success 200 {string} string "{"success":true,"data":{},"msg":"删除成功"}"
// @Router /gpsInfo/deleteGpsInfo [delete]
export const deleteGpsInfo = (data) => {
return service({
url: '/gpsInfo/deleteGpsInfo',
method: 'delete',
data
})
}
// @Tags GpsInfo
// @Summary 删除GpsInfo
// @Security ApiKeyAuth
// @accept application/json
// @Produce application/json
// @Param data body request.IdsReq true "批量删除GpsInfo"
// @Success 200 {string} string "{"success":true,"data":{},"msg":"删除成功"}"
// @Router /gpsInfo/deleteGpsInfo [delete]
export const deleteGpsInfoByIds = (data) => {
return service({
url: '/gpsInfo/deleteGpsInfoByIds',
method: 'delete',
data
})
}
// @Tags GpsInfo
// @Summary 更新GpsInfo
// @Security ApiKeyAuth
// @accept application/json
// @Produce application/json
// @Param data body model.GpsInfo true "更新GpsInfo"
// @Success 200 {string} string "{"success":true,"data":{},"msg":"更新成功"}"
// @Router /gpsInfo/updateGpsInfo [put]
export const updateGpsInfo = (data) => {
return service({
url: '/gpsInfo/updateGpsInfo',
method: 'put',
data
})
}
// @Tags GpsInfo
// @Summary 用id查询GpsInfo
// @Security ApiKeyAuth
// @accept application/json
// @Produce application/json
// @Param data query model.GpsInfo true "用id查询GpsInfo"
// @Success 200 {string} string "{"success":true,"data":{},"msg":"查询成功"}"
// @Router /gpsInfo/findGpsInfo [get]
export const findGpsInfo = (params) => {
return service({
url: '/gpsInfo/findGpsInfo',
method: 'get',
params
})
}
// @Tags GpsInfo
// @Summary 分页获取GpsInfo列表
// @Security ApiKeyAuth
// @accept application/json
// @Produce application/json
// @Param data query request.PageInfo true "分页获取GpsInfo列表"
// @Success 200 {string} string "{"success":true,"data":{},"msg":"获取成功"}"
// @Router /gpsInfo/getGpsInfoList [get]
export const getGpsInfoList = (params) => {
return service({
url: '/gpsInfo/getGpsInfoList',
method: 'get',
params
})
}
// @Tags GpsInfo
// @Summary 分页获取GpsInfo列表
// @Security ApiKeyAuth
// @accept application/json
// @Produce application/json
// @Param data query request.PageInfo true "分页获取GpsInfo列表"
// @Success 200 {string} string "{"success":true,"data":{},"msg":"获取成功"}"
// @Router /gpsInfo/getAvailableGpsInfoList [get]
export const getAvailableGpsInfoList = (params) => {
return service({
url: '/gpsInfo/getAvailableGpsInfoList',
method: 'get',
params
})
}
export const getNotAvailableGpsInfoList = (params) => {
return service({
url: '/gpsInfo/getNotAvailableGpsInfoList',
method: 'get',
params
})
}
|
<gh_stars>1-10
package oidc.management.repository.mongo;
import oidc.management.model.ServiceAccount;
import oidc.management.model.mongo.MongoServiceAccount;
import oidc.management.repository.ServiceAccountRepository;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.data.mongo.MongoRepositoriesAutoConfiguration;
import org.springframework.data.mongodb.repository.MongoRepository;
/**
* Mongo implementation of the {@link ServiceAccountRepository} interface.
*
* @author <NAME>
* @since 14-04-2022
*/
@ConditionalOnBean(MongoRepositoriesAutoConfiguration.class)
public interface MongoServiceAccountRepository extends ServiceAccountRepository<MongoServiceAccount>, MongoRepository<MongoServiceAccount, String> {
@Override
public default ServiceAccount.ServiceAccountBuilder entityBuilder() {
return MongoServiceAccount.builder();
}
}
|
#!/usr/bin/env bats
load test_helper
@test "vm.ip" {
id=$(new_ttylinux_vm)
run govc vm.power -on $id
assert_success
run govc vm.ip $id
assert_success
}
@test "vm.ip -esxcli" {
id=$(new_ttylinux_vm)
run govc vm.power -on $id
assert_success
run govc vm.ip -esxcli $id
assert_success
ip_esxcli=$output
run govc vm.ip $id
assert_success
ip_tools=$output
assert_equal $ip_esxcli $ip_tools
}
@test "vm.create" {
id=$(new_ttylinux_vm)
run govc vm.power -on $id
assert_success
result=$(govc device.ls -vm $vm | grep disk- | wc -l)
[ $result -eq 0 ]
result=$(govc device.ls -vm $vm | grep cdrom- | wc -l)
[ $result -eq 0 ]
}
@test "vm.change" {
id=$(new_ttylinux_vm)
run govc vm.change -g ubuntu64Guest -m 1024 -c 2 -vm $id
assert_success
run govc vm.info $id
assert_success
assert_line "Guest name: Ubuntu Linux (64-bit)"
assert_line "Memory: 1024MB"
assert_line "CPU: 2 vCPU(s)"
run govc vm.change -e "guestinfo.a=1" -e "guestinfo.b=2" -vm $id
assert_success
run govc vm.info -e $id
assert_success
assert_line "guestinfo.a: 1"
assert_line "guestinfo.b: 2"
nid=$(new_id)
run govc vm.change -name $nid -vm $id
assert_success
run govc vm.info $id
[ ${#lines[@]} -eq 0 ]
run govc vm.info $nid
[ ${#lines[@]} -gt 0 ]
}
@test "vm.power" {
vm=$(new_ttylinux_vm)
run vm_power_state $vm
assert_success "poweredOff"
run govc vm.power $vm
assert_failure
run govc vm.power -on -off $vm
assert_failure
run govc vm.power -on $vm
assert_success
run vm_power_state $vm
assert_success "poweredOn"
run govc vm.power -suspend $vm
assert_success
run vm_power_state $vm
assert_success "suspended"
run govc vm.power -on $vm
assert_success
run vm_power_state $vm
assert_success "poweredOn"
}
@test "vm.create pvscsi" {
vm=$(new_id)
govc vm.create -on=false -disk.controller pvscsi $vm
result=$(govc device.ls -vm $vm | grep pvscsi- | wc -l)
[ $result -eq 1 ]
result=$(govc device.ls -vm $vm | grep lsilogic- | wc -l)
[ $result -eq 0 ]
}
@test "vm.create in cluster" {
vcsim_env
# using GOVC_HOST and its resource pool
run govc vm.create -on=false $(new_id)
assert_success
# using no -host and the default resource pool for DC0
unset GOVC_HOST
run govc vm.create -on=false $(new_id)
assert_success
}
@test "vm.info" {
local num=3
local prefix=$(new_id)
for x in $(seq $num)
do
local id="${prefix}-${x}"
run govc vm.create -on=false $id
assert_success
local found=$(govc vm.info $id | grep Name: | wc -l)
[ "$found" -eq 1 ]
done
# test find slice
local found=$(govc vm.info ${prefix}-* | grep Name: | wc -l)
[ "$found" -eq $num ]
# test extraConfig
run govc vm.change -e "guestinfo.a=2" -vm $id
assert_success
run govc vm.info -e $id
assert_success
assert_line "guestinfo.a: 2"
}
@test "vm.create linked ide disk" {
vm=$(new_id)
run govc vm.create -disk $GOVC_TEST_VMDK -disk.controller ide -on=false $vm
assert_success
run govc device.info -vm $vm disk-200-0
assert_success
assert_line "Controller: ide-200"
}
@test "vm.create linked scsi disk" {
vm=$(new_id)
run govc vm.create -disk enoent -on=false $vm
assert_failure "Error: datastore file does not exist"
run govc vm.create -disk $GOVC_TEST_VMDK -on=false $vm
assert_success
run govc device.info -vm $vm disk-1000-0
assert_success
assert_line "Controller: lsilogic-1000"
assert_line "Parent: [datastore1] $GOVC_TEST_VMDK"
assert_line "File: [datastore1] $vm/${vm}.vmdk"
}
@test "vm.create scsi disk" {
vm=$(new_id)
run govc vm.create -disk enoent -on=false $vm
assert_failure "Error: datastore file does not exist"
run govc vm.create -disk $GOVC_TEST_VMDK -on=false -link=false $vm
assert_success
run govc device.info -vm $vm disk-1000-0
assert_success
assert_line "Controller: lsilogic-1000"
refute_line "Parent: [datastore1] $GOVC_TEST_VMDK"
assert_line "File: [datastore1] $GOVC_TEST_VMDK"
}
@test "vm.create iso" {
upload_iso
vm=$(new_id)
run govc vm.create -iso enoent -on=false $vm
assert_failure "Error: datastore file does not exist"
run govc vm.create -iso $GOVC_TEST_ISO -on=false $vm
assert_success
run govc device.info -vm $vm cdrom-3000
assert_success
assert_line "Controller: ide-200"
assert_line "Summary: ISO [datastore1] $GOVC_TEST_ISO"
}
@test "vm.disk.create empty vm" {
vm=$(new_empty_vm)
local name=$(new_id)
run govc vm.disk.create -vm $vm -name $name -size 1G
assert_success
result=$(govc device.ls -vm $vm | grep disk- | wc -l)
[ $result -eq 1 ]
name=$(new_id)
run govc vm.disk.create -vm $vm -name $name -controller lsilogic-1000 -size 2G
assert_success
result=$(govc device.ls -vm $vm | grep disk- | wc -l)
[ $result -eq 2 ]
}
@test "vm.disk.create" {
import_ttylinux_vmdk
vm=$(new_id)
govc vm.create -disk $GOVC_TEST_VMDK -on=false $vm
result=$(govc device.ls -vm $vm | grep disk- | wc -l)
[ $result -eq 1 ]
local name=$(new_id)
run govc vm.disk.create -vm $vm -name $name -size 1G
assert_success
result=$(govc device.ls -vm $vm | grep disk- | wc -l)
[ $result -eq 2 ]
run govc vm.disk.create -vm $vm -name $name -size 1G
assert_success # TODO: should fail?
result=$(govc device.ls -vm $vm | grep disk- | wc -l)
[ $result -eq 2 ]
}
@test "vm.disk.attach" {
import_ttylinux_vmdk
vm=$(new_id)
govc vm.create -disk $GOVC_TEST_VMDK -on=false $vm
result=$(govc device.ls -vm $vm | grep disk- | wc -l)
[ $result -eq 1 ]
run govc import.vmdk $GOVC_TEST_VMDK_SRC $vm
assert_success
run govc vm.disk.attach -vm $vm -link=false -disk enoent.vmdk
assert_failure "Error: File [datastore1] enoent.vmdk was not found"
run govc vm.disk.attach -vm $vm -disk enoent.vmdk
assert_failure "Error: Invalid configuration for device '0'."
run govc vm.disk.attach -vm $vm -disk $vm/$GOVC_TEST_VMDK -controller lsilogic-1000
assert_success
result=$(govc device.ls -vm $vm | grep disk- | wc -l)
[ $result -eq 2 ]
}
|
bnc_name="$(basename $(pwd))" ;
lnk_name="$bnc_name.rbc" ;
prf_name="$bnc_name.ibc" ;
obj_name="$bnc_name.o" ;
exe_name="$bnc_name.exe" ;
source_files=($(ls *.c)) ;
CXXFLAGS="-I." |
<reponame>hopshadoop/btsync-chef<filename>provider/wait_download.rb
action :install_ndbd do
tgt = "#{new_resource.recipe_name}"
# Chef::Log.info "seeder : #{node[:ndb][:seeder_ip]}"
# Tail the log (./sync/sync.log) until we see the event that the file has completed downloading ("Finished syncing file").
# Set a timeout if that event takes too long.
bash "wait_until_file_downloaded" do
# user "#{node[:btsync][:user]}"
user "root"
code <<-EOF
DOWNLOADED=0
TIMEOUT=#{node[:btsync][:timeout_secs]}
COUNT=0
ERROR=0
LOG=#{node[:btsync][:etc]}/sync-#{tgt}.log
service btsync-#{tgt} stop
while [ $DOWNLOADED -eq 0 ] ; do
if [ -e $LOG ] ; then
FOUND=`grep "Finished syncing file" $LOG`
if [ $FOUND = "0" ] ; then
ERROR=`grep -v "Error while adding folder" $LOG | grep "Error"`
fi
else
FOUND=0
fi
if [ $FOUND -eq 1 ] ; then
DOWNLOADED=1;
elif [ $ERROR -eq 1 ] ; then
DOWNLOADED=3;
else
if [ ${COUNT} -eq 0 ] ; then
echo -n "Seconds left before timeout: "
echo -n "`expr ${TIMEOUT} - ${COUNT}` "
sleep 1
COUNT=`expr ${COUNT} + 1`
if [ $COUNT -gt $TIMEOUT ] ; then
DOWNLOADED=2;
fi
fi
fi
done
echo ""
if [ $DOWNLOADED -ne 1 ] ; then
echo "Btsync didn't finish on time, downloading using wget"
else
touch #{Chef::Config[:file_cache_path]}/.#{tgt}_downloaded
fi
EOF
not_if { ::File.exists?("#{Chef::Config[:file_cache_path]}/.#{tgt}_downloaded") }
end
end
|
def is_valid_palindrome(s: str) -> bool:
# Convert the string to lowercase and remove non-alphanumeric characters
s = ''.join(char.lower() for char in s if char.isalnum())
# Check if the modified string is equal to its reverse
return s == s[::-1] |
#!/bin/bash
# This script will generate a Key-Pair for Owner Attestation.
if [[ "$1" == "-h" || "$1" == "--help" ]]; then
cat << EndOfMessage
Usage: ${0##*/} [<encryption-keyType>]
Arguments:
<encryption-keyType> The type of encryption to use when generating owner key pair (ecdsa256, ecdsa384, rsa, or all). Will default to all.
Optional Environment Variables:
COUNTRY_NAME - The country the user resides in. Necessary information for keyCertificate generation.
STATE_NAME - The state the user resides in. Necessary information for keyCertificate generation.
CITY_NAME - The city the user resides in. Necessary information for keyCertificate generation.
ORG_NAME - The organization the user works for. Necessary information for keyCertificate generation.
COMPANY_NAME - The company the user works for. Necessary information for keyCertificate generation.
YOUR_NAME - The name of the user. Necessary information for keyCertificate generation.
EMAIL_NAME - The user's email. Necessary information for keyCertificate generation.
EndOfMessage
exit 1
fi
keyType="${1:-all}"
gencmd=""
#If the argument passed for this script does not equal one of the encryption keyTypes, send error code and exit.
#BY DEFAULT THE keyType WILL BE SET TO all
if [[ -n $keyType ]] && [[ $keyType != "ecdsa256" ]] && [[ $keyType != "ecdsa384" ]] && [[ $keyType != "rsa" ]] && [[ $keyType != "all" ]]; then
echo "Error: specified encryption keyType '$keyType' is not supported."
exit 2
fi
#============================FUNCTIONS=================================
chk() {
local exitCode=$1
local task=$2
local dontExit=$3 # set to 'continue' to not exit for this error
if [[ $exitCode == 0 ]]; then return; fi
echo "Error: exit code $exitCode from: $task"
if [[ $dontExit != 'continue' ]]; then
exit $exitCode
fi
}
ensureWeAreUser() {
if [[ $(whoami) == 'root' ]]; then
echo "Error: must be normal user to run ${0##*/}"
exit 2
fi
}
function allKeys() {
for i in "rsa" "ecdsa256" "ecdsa384"; do
keyType=$i
genKey
done
}
#This function will create a private key that is needed to create a private keystore. Encryption keyType passed will decide which command to run for private key creation
function genKey() {
local privateKey=""${keyType}"private-key.pem"
local keyCert=""${keyType}"Cert.crt"
#Check if the folder is already created for the keyType (In case of multiple runs)
mkdir -p "${keyType}"Key && pushd "${keyType}"Key >/dev/null || return
#Generate a private RSA key.
if [[ $keyType == "rsa" ]]; then
if [ "$(uname)" == "Darwin" ]; then
echo "Using macOS, will generate private key and certificate simultaneously."
gencmd="openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout "$privateKey" -out "$keyCert""
else
echo -e "Generating an "${keyType}" private key."
openssl genrsa -out "${keyType}"private-key.pem 2048 >/dev/null 2>&1
chk $? 'Generating a rsa private key.'
fi
keyCertGenerator
#Generate a private ecdsa (256 or 384) key.
elif [[ $keyType == "ecdsa256" ]] || [[ $keyType == "ecdsa384" ]]; then
echo -e "Generating an "${keyType}" private key."
local var2=$(echo $keyType | cut -f2 -da)
if [ "$(uname)" == "Darwin" ]; then
echo "Using macOS, will generate private key and certificate simultaneously."
else
openssl ecparam -genkey -name secp"${var2}"r1 -out "${keyType}"private-key.pem >/dev/null 2>&1
chk $? 'Generating an ecdsa private key.'
fi
keyCertGenerator
fi
}
function keyCertGenerator() {
if [[ -f "${keyType}"private-key.pem ]]; then
if [ "$(uname)" == "Darwin" ]; then
rm "${keyType}"private-key.pem
else
echo -e ""${keyType}" private key creation: Successful"
gencmd="openssl req -x509 -key "$privateKey" -days 3650 -out "$keyCert""
fi
fi
#Generate a private key and self-signed certificate.
#You should have these environment variables set. If they aren't you will be prompted to enter values.
#!/usr/bin/env bash
if [ "$(uname)" == "Darwin" ]; then
if [[ $keyType == "ecdsa256" ]] || [[ $keyType == "ecdsa384" ]]; then
(
echo "$COUNTRY_NAME"
echo "$STATE_NAME"
echo "$CITY_NAME"
echo "$COMPANY_NAME"
echo "$ORG_NAME"
echo "$YOUR_NAME"
echo "$EMAIL_NAME"
) | openssl req -x509 -nodes -days 3650 -newkey ec:<(openssl genpkey -genparam -algorithm ec -pkeyopt ec_paramgen_curve:P-"${var2}") -keyout "$privateKey" -out "$keyCert" >/dev/null 2>&1
chk $? 'generating ec certificate'
if [[ -f "$privateKey" ]]; then
openssl ec -in ecdsa"${var2}"private-key.pem -out ecdsa"${var2}"private-key.pem >/dev/null 2>&1
chk $? 'decrypting ec private key for macOS'
else
echo "No EC private key found"
fi
fi
fi
if [[ $keyType == "rsa" ]] || [ "$(uname)" != "Darwin" ]; then
(
echo "$COUNTRY_NAME"
echo "$STATE_NAME"
echo "$CITY_NAME"
echo "$COMPANY_NAME"
echo "$ORG_NAME"
echo "$YOUR_NAME"
echo "$EMAIL_NAME"
) | $gencmd >/dev/null 2>&1
chk $? 'generating rsa certificate'
if [ "$(uname)" == "Darwin" ] && [[ $keyType == "rsa" ]] && [[ -f "$privateKey" ]]; then
openssl rsa -in "$privateKey" -out "$privateKey" >/dev/null 2>&1
chk $? 'decrypting rsa private key for macOS'
fi
fi
if [[ -f $keyCert ]] && [[ -f "$privateKey" ]]; then
echo -e ""${keyType}" Private Key and "${keyType}"Key Certificate creation: Successful"
genPublicKey
popd >/dev/null
else
echo ""${keyType}" Private Key and "${keyType}"Key Certificate not found"
exit 2
fi
}
function genPublicKey() {
# This function is ran after the private key and owner certificate has been created. This function will create a public key to correspond with
# the owner private key/certificate. Generate a public key from the certificate file
openssl x509 -pubkey -noout -in $keyCert >"${keyType}"pub-key.pem
chk $? 'Creating public key...'
echo "Generating "${keyType}" public key..."
if [[ -f "${keyType}"pub-key.pem ]]; then
echo -e ""${keyType}" public key creation: Successful"
mv "${keyType}"pub-key.pem ..
else
echo -e ""${keyType}" public key creation: Unsuccessful"
exit 2
fi
}
function combineKeys() {
#This function will combine all private keys and certificates into one tarball, then will concatenate all public keys into one
if [[ -f "rsapub-key.pem" && -f "ecdsa256pub-key.pem" && -f "ecdsa384pub-key.pem" ]]; then
#Combine all the public keys into one
echo "Concatenating Public Key files..."
cat ecdsa256pub-key.pem rsapub-key.pem ecdsa384pub-key.pem >owner-public-key.pem
chk $? 'Concatenating Public Key files...'
rm -- ecdsa*.pem && rm rsapub*
#Tar all keys and certs
tar -czf owner-keys.tar.gz ecdsa256Key ecdsa384Key rsaKey
chk $? 'Saving all key pairs in a tarball...'
#removing all files/directories except the ones we need
rm -rf "rsaKey" "ecdsa256Key" "ecdsa384Key"
fi
}
function infoKeyCert() {
#If varaibles are not set, prompt this openssl certificate paragraph
if [[ -z $COUNTRY_NAME ]] || [[ -z $STATE_NAME ]] || [[ -z $CITY_NAME ]] || [[ -z $ORG_NAME ]] || [[ -z $COMPANY_NAME ]] || [[ -z $YOUR_NAME ]] || [[ -z $EMAIL_NAME ]]; then
printf "You have to enter information in order to generate a custom self signed certificate as a part of your key pair for SDO Owner Attestation. What you are about to enter is what is called a Distinguished Name or a DN. There are quite a few fields but you can leave some blank. For some fields there will be a default value, If you enter '.', the field will be left blank." && echo
fi
#while variables are not set, prompt for whichever variable is not set
while [[ -z $COUNTRY_NAME ]] || [[ -z $STATE_NAME ]] || [[ -z $CITY_NAME ]] || [[ -z $ORG_NAME ]] || [[ -z $COMPANY_NAME ]] || [[ -z $YOUR_NAME ]] || [[ -z $EMAIL_NAME ]]; do
if [[ -z $COUNTRY_NAME ]]; then
echo "Country Name (2 letter code) [AU]:"
read COUNTRY_NAME && echo
elif [[ -z $STATE_NAME ]]; then
echo "State or Province Name (full name) [Some-State]:"
read STATE_NAME && echo
elif [[ -z $CITY_NAME ]]; then
echo "Locality Name (eg, city) []:"
read CITY_NAME && echo
elif [[ -z $COMPANY_NAME ]]; then
echo "Organization Name (eg, company) [Internet Widgits Pty Ltd]:"
read COMPANY_NAME && echo
elif [[ -z $ORG_NAME ]]; then
echo "Organizational Unit Name (eg, section) []:"
read ORG_NAME && echo
elif [[ -z $YOUR_NAME ]]; then
echo "Common Name (e.g. server FQDN or YOUR name) []:"
read YOUR_NAME && echo
elif [[ -z $EMAIL_NAME ]]; then
echo "Email Address []:"
read EMAIL_NAME && echo
fi
done
}
#============================MAIN CODE=================================
infoKeyCert
if [[ -n "$keyType" ]] && [[ "$keyType" == "all" ]]; then
allKeys
combineKeys
else
genKey
fi
echo "Owner Private Key Tarfile and Owner Public Key have been created"
|
TERMUX_PKG_HOMEPAGE=https://www.libsdl.org
TERMUX_PKG_DESCRIPTION="A library for portable low-level access to a video framebuffer, audio output, mouse, and keyboard (version 2)"
TERMUX_PKG_LICENSE="MIT"
TERMUX_PKG_LICENSE_FILE="COPYING.txt"
TERMUX_PKG_MAINTAINER="Leonid Pliushch <leonid.pliushch@gmail.com>"
TERMUX_PKG_VERSION=2.0.10
TERMUX_PKG_REVISION=24
TERMUX_PKG_SRCURL=https://www.libsdl.org/release/SDL2-${TERMUX_PKG_VERSION}.tar.gz
TERMUX_PKG_SHA256=b4656c13a1f0d0023ae2f4a9cf08ec92fffb464e0f24238337784159b8b91d57
TERMUX_PKG_DEPENDS="libandroid-glob, libflac, libogg, libsndfile, libvorbis, libx11, libxau, libxcb, libxcursor, libxdmcp, libxext, libxfixes, libxi, libxinerama, libxrandr, libxrender, libxss, libxxf86vm, pulseaudio"
TERMUX_PKG_CONFLICTS="libsdl2"
TERMUX_PKG_REPLACES="libsdl2"
TERMUX_PKG_EXTRA_CONFIGURE_ARGS="
--x-includes=${TERMUX_PREFIX}/include
--x-libraries=${TERMUX_PREFIX}/lib
--disable-assembly
--disable-mmx
--disable-3dnow
--disable-oss
--disable-alsa
--disable-esd
--disable-video-wayland
--disable-video-mir
--disable-video-rpi
--enable-video-x11-xcursor
--enable-video-x11-xinerama
--enable-video-x11-xinput
--enable-video-x11-xrandr
--enable-video-x11-scrnsaver
--enable-video-x11-xshape
--enable-video-x11-vm
--disable-video-vivante
--disable-video-cocoa
--disable-render-metal
--disable-video-opengl
--disable-video-opengles
--disable-video-opengles2
--disable-video-vulkan
--disable-libudev
--disable-dbus
--disable-ime
--disable-ibus
--disable-fcitx
--disable-input-tslib
--enable-pthreads
--disable-pthread-sem
--disable-directx
--disable-sdl-dlopen
--disable-render-d3d
"
|
import * as crypto from 'crypto';
import { Transform, Stream, Writable } from 'stream';
const algorithm = 'aes-256-ctr';
let password: Buffer;
function checkPassword() {
if (!password) {
throw new Error('You should set password first.');
}
}
export function generatePassword() {
return crypto.randomBytes(32);
}
/**
* set password for encrypting & decryption
* @param p hex string
*/
export function setPassword(p: Buffer) {
if (!Buffer.isBuffer(p) || p.length !== 32) {
throw new Error('password should be 32 length buffer')
}
password = p;
}
export function createEncryptStream(input: Stream): Stream {
checkPassword();
let passwordLocal = password;
const iv = crypto.randomBytes(16);
const encryptStream = crypto.createCipheriv(algorithm, passwordLocal, iv);
let inited: boolean = false;
return input.pipe(encryptStream).pipe(new Transform({
transform(chunk, encoding, callback) {
if (!inited) {
inited = true;
this.push(Buffer.concat([iv, chunk]));
} else {
this.push(chunk);
}
callback();
}
}));
}
export function createDecryptStream(output: Writable): Transform {
checkPassword();
let passwordLocal = password;
let iv: string;
return new Transform({
transform(chunk, encoding, callback) {
if (!iv) {
iv = chunk.slice(0, 16);
const decryptStream = crypto.createDecipheriv(algorithm, passwordLocal, iv);
this.pipe(decryptStream).pipe(output);
this.push(chunk.slice(16));
} else {
this.push(chunk);
}
callback();
}
})
}
|
<reponame>lgoldstein/communitychest
package com.vmware.spring.workshop.model;
import java.beans.BeanInfo;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.PropertyDescriptor;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Map;
import java.util.TreeMap;
/**
* @author lgoldstein
*/
public final class ModelUtils {
private ModelUtils() {
// no instance
}
public static final Map<String,PropertyDescriptor> createPropertiesMap (final Class<?> clazz) throws IntrospectionException {
final BeanInfo beanInfo=Introspector.getBeanInfo(clazz);
final PropertyDescriptor[] props=beanInfo.getPropertyDescriptors();
final Map<String,PropertyDescriptor> result=new TreeMap<String, PropertyDescriptor>(String.CASE_INSENSITIVE_ORDER);
for (final PropertyDescriptor desc : props)
{
final String name=desc.getName();
final Method gMethod=desc.getReadMethod(), sMethod=desc.getWriteMethod();
if ((gMethod == null) || (sMethod == null))
continue; // skip non read/write properties
final int gMods=gMethod.getModifiers(), sMods=sMethod.getModifiers();
if ((!Modifier.isPublic(gMods)) || (!Modifier.isPublic(sMods))
|| Modifier.isStatic(gMods) || Modifier.isStatic(sMods))
continue; // skip static or non public methods
final PropertyDescriptor prev=result.put(name, desc);
if (prev != null)
throw new IntrospectionException("Multiple properties named " + name);
}
return result;
}
}
|
function rotate90(arr) {
let n = arr.length;
let newMatrix = [];
for (let i = 0; i < n; i++) {
let newRow = [];
for (let j = 0; j < n; j++) {
newRow[j] = arr[n - j - 1][i];
}
newMatrix[i] = newRow;
}
return newMatrix;
}
let matrix = [[1, 2, 3], [4, 5, 6], [7, 8, 9]];
let result = rotate90(matrix);
console.log(result); |
<filename>service/src/test/java/dk/kvalitetsit/hjemmebehandling/controller/QuestionnaireResponseControllerTest.java
package dk.kvalitetsit.hjemmebehandling.controller;
import dk.kvalitetsit.hjemmebehandling.api.DtoMapper;
import dk.kvalitetsit.hjemmebehandling.api.PartialUpdateQuestionnaireResponseRequest;
import dk.kvalitetsit.hjemmebehandling.api.QuestionnaireResponseDto;
import dk.kvalitetsit.hjemmebehandling.constants.ExaminationStatus;
import dk.kvalitetsit.hjemmebehandling.constants.errors.ErrorDetails;
import dk.kvalitetsit.hjemmebehandling.controller.exception.BadRequestException;
import dk.kvalitetsit.hjemmebehandling.controller.exception.ForbiddenException;
import dk.kvalitetsit.hjemmebehandling.controller.exception.InternalServerErrorException;
import dk.kvalitetsit.hjemmebehandling.model.QuestionnaireResponseModel;
import dk.kvalitetsit.hjemmebehandling.service.AuditLoggingService;
import dk.kvalitetsit.hjemmebehandling.service.QuestionnaireResponseService;
import dk.kvalitetsit.hjemmebehandling.service.exception.AccessValidationException;
import dk.kvalitetsit.hjemmebehandling.service.exception.ErrorKind;
import dk.kvalitetsit.hjemmebehandling.service.exception.ServiceException;
import dk.kvalitetsit.hjemmebehandling.types.PageDetails;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import java.util.List;
import static org.junit.jupiter.api.Assertions.*;
@ExtendWith(MockitoExtension.class)
public class QuestionnaireResponseControllerTest {
@InjectMocks
private QuestionnaireResponseController subject;
@Mock
private QuestionnaireResponseService questionnaireResponseService;
@Mock
private AuditLoggingService auditLoggingService;
@Mock
private DtoMapper dtoMapper;
@Test
public void getQuestionnaireResponsesByCpr_cprParameterMissing_400() {
// Arrange
String carePlanId = null;
List<String> questionnaireIds = List.of("questionnaire-1");
// Act
// Assert
assertThrows(BadRequestException.class, () -> subject.getQuestionnaireResponsesByCarePlanId(carePlanId, questionnaireIds,1,5));
}
@Test
public void getQuestionnaireResponsesByCpr_questionnaireIdsParameterMissing_400() {
// Arrange
String carePlanId = "careplan-1";
List<String> questionnaireIds = null;
// Act
// Assert
assertThrows(BadRequestException.class, () -> subject.getQuestionnaireResponsesByCarePlanId(carePlanId, questionnaireIds,1,5));
}
@Test
public void getQuestionnaireResponsesByCpr_responsesPresent_200() throws Exception {
// Arrange
String carePlanId = "careplan-1";
List<String> questionnaireIds = List.of("questionnaire-1");
QuestionnaireResponseModel responseModel1 = new QuestionnaireResponseModel();
QuestionnaireResponseModel responseModel2 = new QuestionnaireResponseModel();
QuestionnaireResponseDto responseDto1 = new QuestionnaireResponseDto();
QuestionnaireResponseDto responseDto2 = new QuestionnaireResponseDto();
PageDetails pageDetails = new PageDetails(1,5);
Mockito.when(questionnaireResponseService.getQuestionnaireResponses(carePlanId, questionnaireIds,pageDetails)).thenReturn(List.of(responseModel1, responseModel2));
Mockito.when(dtoMapper.mapQuestionnaireResponseModel(responseModel1)).thenReturn(responseDto1);
Mockito.when(dtoMapper.mapQuestionnaireResponseModel(responseModel2)).thenReturn(responseDto2);
// Act
ResponseEntity<List<QuestionnaireResponseDto>> result = subject.getQuestionnaireResponsesByCarePlanId(carePlanId, questionnaireIds,1,5);
// Assert
assertEquals(HttpStatus.OK, result.getStatusCode());
assertEquals(2, result.getBody().size());
assertTrue(result.getBody().contains(responseDto1));
assertTrue(result.getBody().contains(responseDto2));
}
@Test
public void getQuestionnaireResponsesByCpr_responsesMissing_200() throws Exception {
// Arrange
String carePlanId = "careplan-1";
List<String> questionnaireIds = List.of("questionnaire-1");
PageDetails pageDetails = new PageDetails(1,5);
Mockito.when(questionnaireResponseService.getQuestionnaireResponses(carePlanId, questionnaireIds,pageDetails)).thenReturn(List.of());
// Act
ResponseEntity<List<QuestionnaireResponseDto>> result = subject.getQuestionnaireResponsesByCarePlanId(carePlanId, questionnaireIds,1,5);
// Assert
assertEquals(HttpStatus.OK, result.getStatusCode());
assertTrue(result.getBody().isEmpty());
}
@Test
public void getQuestionnaireResponsesByCpr_accessViolation_403() throws Exception {
// Arrange
String carePlanId = "careplan-1";
List<String> questionnaireIds = List.of("questionnaire-1");
PageDetails pageDetails = new PageDetails(1,5);
Mockito.when(questionnaireResponseService.getQuestionnaireResponses(carePlanId, questionnaireIds,pageDetails)).thenThrow(AccessValidationException.class);
// Act
// Assert
assertThrows(ForbiddenException.class, () -> subject.getQuestionnaireResponsesByCarePlanId(carePlanId, questionnaireIds,1,5));
}
@Test
public void getQuestionnaireResponsesByCpr_failureToFetch_500() throws Exception {
// Arrange
String carePlanId = "careplan-1";
List<String> questionnaireIds = List.of("questionnaire-1");
PageDetails pageDetails = new PageDetails(1,5);
Mockito.when(questionnaireResponseService.getQuestionnaireResponses(carePlanId, questionnaireIds,pageDetails)).thenThrow(new ServiceException("error", ErrorKind.INTERNAL_SERVER_ERROR, ErrorDetails.INTERNAL_SERVER_ERROR));
// Act
// Assert
assertThrows(InternalServerErrorException.class, () -> subject.getQuestionnaireResponsesByCarePlanId(carePlanId, questionnaireIds,1,5));
}
@Test
public void getQuestionnaireResponsesByStatus_parameterMissing_400() {
// Arrange
List<ExaminationStatus> statuses = null;
PageDetails pageDetails = null;
// Act
// Assert
assertThrows(BadRequestException.class, () -> subject.getQuestionnaireResponsesByStatus(statuses, 1, 10));
}
@Test
public void getQuestionnaireResponsesByStatus_responsesPresent_200() throws Exception {
// Arrange
List<ExaminationStatus> statuses = List.of(ExaminationStatus.NOT_EXAMINED);
PageDetails pageDetails = new PageDetails(1, 10);
QuestionnaireResponseModel responseModel1 = new QuestionnaireResponseModel();
QuestionnaireResponseModel responseModel2 = new QuestionnaireResponseModel();
QuestionnaireResponseDto responseDto1 = new QuestionnaireResponseDto();
QuestionnaireResponseDto responseDto2 = new QuestionnaireResponseDto();
Mockito.when(questionnaireResponseService.getQuestionnaireResponsesByStatus(statuses, pageDetails)).thenReturn(List.of(responseModel1, responseModel2));
Mockito.when(dtoMapper.mapQuestionnaireResponseModel(responseModel1)).thenReturn(responseDto1);
Mockito.when(dtoMapper.mapQuestionnaireResponseModel(responseModel2)).thenReturn(responseDto2);
// Act
ResponseEntity<List<QuestionnaireResponseDto>> result = subject.getQuestionnaireResponsesByStatus(statuses, pageDetails.getPageNumber(), pageDetails.getPageSize());
// Assert
assertEquals(HttpStatus.OK, result.getStatusCode());
assertEquals(2, result.getBody().size());
assertTrue(result.getBody().contains(responseDto1));
assertTrue(result.getBody().contains(responseDto2));
}
@Test
public void getQuestionnaireResponsesByStatus_responsesMissing_200() throws Exception {
// Arrange
List<ExaminationStatus> statuses = List.of(ExaminationStatus.UNDER_EXAMINATION);
PageDetails pageDetails = new PageDetails(1, 10);
Mockito.when(questionnaireResponseService.getQuestionnaireResponsesByStatus(statuses, pageDetails)).thenReturn(List.of());
// Act
ResponseEntity<List<QuestionnaireResponseDto>> result = subject.getQuestionnaireResponsesByStatus(statuses, pageDetails.getPageNumber(), pageDetails.getPageSize());
// Assert
assertEquals(HttpStatus.OK, result.getStatusCode());
assertTrue(result.getBody().isEmpty());
}
@Test
public void getQuestionnaireResponsesByStatus_failureToFetch_500() throws Exception {
// Arrange
List<ExaminationStatus> statuses = List.of(ExaminationStatus.UNDER_EXAMINATION, ExaminationStatus.EXAMINED);
PageDetails pageDetails = new PageDetails(1, 10);
Mockito.when(questionnaireResponseService.getQuestionnaireResponsesByStatus(statuses, pageDetails)).thenThrow(new ServiceException("error", ErrorKind.INTERNAL_SERVER_ERROR, ErrorDetails.INTERNAL_SERVER_ERROR));
// Act
// Assert
assertThrows(InternalServerErrorException.class, () -> subject.getQuestionnaireResponsesByStatus(statuses, pageDetails.getPageNumber(), pageDetails.getPageSize()));
}
@Test
public void patchQuestionnaireResponse_malformedRequest_400() {
// Arrange
String id = "questionnaireresponse-1";
PartialUpdateQuestionnaireResponseRequest request = new PartialUpdateQuestionnaireResponseRequest();
// Act
// Assert
assertThrows(BadRequestException.class, () -> subject.patchQuestionnaireResponse(id, request));
}
@Test
public void patchQuestionnaireResponse_accessViolation_403() throws Exception {
// Arrange
String id = "questionnaireresponse-1";
PartialUpdateQuestionnaireResponseRequest request = new PartialUpdateQuestionnaireResponseRequest();
request.setExaminationStatus(ExaminationStatus.UNDER_EXAMINATION);
Mockito.doThrow(AccessValidationException.class).when(questionnaireResponseService).updateExaminationStatus(id, request.getExaminationStatus());
// Act
// Assert
assertThrows(ForbiddenException.class, () -> subject.patchQuestionnaireResponse(id, request));
}
@Test
public void patchQuestionnaireResponse_failureToUpdate_500() throws Exception {
// Arrange
String id = "questionnaireresponse-1";
PartialUpdateQuestionnaireResponseRequest request = new PartialUpdateQuestionnaireResponseRequest();
request.setExaminationStatus(ExaminationStatus.UNDER_EXAMINATION);
Mockito.doThrow(new ServiceException("error", ErrorKind.INTERNAL_SERVER_ERROR, ErrorDetails.INTERNAL_SERVER_ERROR)).when(questionnaireResponseService).updateExaminationStatus(id, request.getExaminationStatus());
// Act
// Assert
assertThrows(InternalServerErrorException.class, () -> subject.patchQuestionnaireResponse(id, request));
}
@Test
public void patchQuestionnaireResponse_success_200() throws Exception {
// Arrange
String id = "questionnaireresponse-1";
PartialUpdateQuestionnaireResponseRequest request = new PartialUpdateQuestionnaireResponseRequest();
request.setExaminationStatus(ExaminationStatus.UNDER_EXAMINATION);
//Mockito.doNothing().when(questionnaireResponseService).updateExaminationStatus(id, request.getExaminationStatus());
Mockito.when(questionnaireResponseService.updateExaminationStatus(id, request.getExaminationStatus())).thenReturn(new QuestionnaireResponseModel());
// Act
ResponseEntity<Void> result = subject.patchQuestionnaireResponse(id, request);
// Assert
assertEquals(HttpStatus.OK, result.getStatusCode());
}
} |
SELECT MAX(salary) AS second_highest_salary
FROM employee
WHERE salary < (SELECT MAX(salary) FROM employee) |
#!/bin/bash
# Install hadoop
# Installation relies on finding JAVA_HOME@/usr/java/latest as a prerequisite
INSTALL_DIR=${INSTALL_DIR:-/usr}
USER=`whoami`
HADOOP=hadoop-${HADOOP_VER:-3.2.2}
HADOOP_DIR=${INSTALL_DIR}/$HADOOP
HADOOP_ENV=$HADOOP_DIR/hadoop.env
install_prereqs() {
if [[ -f /usr/java/latest ]]; then
echo "/usr/java/latest found"
sudo rm /usr/java/latest
fi
if [[ ! -z $JAVA_HOME ]]; then
sudo mkdir -p /usr/java
sudo ln -s $JAVA_HOME /usr/java/latest
else
sudo apt install openjdk-8-jre-headless
sudo ln -s /usr/lib/jvm/java-1.8.0-openjdk-amd64/ /usr/java/latest
fi
echo "install_prereqs successful"
}
# retry logic from: https://docs.microsoft.com/en-us/azure/hdinsight/hdinsight-hadoop-script-actions-linux
MAXATTEMPTS=3
retry() {
local -r CMD="$@"
local -i ATTEMPTNUM=1
local -i RETRYINTERVAL=2
until $CMD
do
if (( ATTEMPTNUM == MAXATTEMPTS ))
then
echo "Attempt $ATTEMPTNUM failed. no more attempts left."
return 1
else
echo "Attempt $ATTEMPTNUM failed! Retrying in $RETRYINTERVAL seconds..."
sleep $(( RETRYINTERVAL ))
ATTEMPTNUM=$ATTEMPTNUM+1
fi
done
}
download_gcs_connector() {
if [[ $INSTALL_TYPE == gcs ]]; then
retry wget -nv --trust-server-names https://storage.googleapis.com/hadoop-lib/gcs/gcs-connector-hadoop3-latest.jar
mv gcs-connector-hadoop3-latest.jar ${HADOOP_DIR}/share/hadoop/common
echo "download_gcs_connector successful"
fi
}
download_hadoop() {
retry wget -nv --trust-server-names http://www-eu.apache.org/dist/hadoop/common/$HADOOP/$HADOOP.tar.gz
tar -xzf $HADOOP.tar.gz --directory $INSTALL_DIR &&
download_gcs_connector &&
echo "download_hadoop successful"
}
configure_passphraseless_ssh() {
sudo apt update; sudo apt -y install openssh-server
cat > sshd_config << EOF
SyslogFacility AUTHPRIV
PermitRootLogin yes
AuthorizedKeysFile .ssh/authorized_keys
PasswordAuthentication yes
ChallengeResponseAuthentication no
UsePAM yes
UseDNS no
X11Forwarding no
PrintMotd no
EOF
sudo mv sshd_config /etc/ssh/sshd_config &&
sudo systemctl restart ssh &&
ssh-keygen -t rsa -b 4096 -N '' -f ~/.ssh/id_rsa &&
cat ~/.ssh/id_rsa.pub | tee -a ~/.ssh/authorized_keys &&
chmod 600 ~/.ssh/authorized_keys &&
chmod 700 ~/.ssh &&
sudo chmod -c 0755 ~/ &&
echo "configure_passphraseless_ssh successful"
}
configure_hadoop() {
configure_passphraseless_ssh &&
$HADOOP_DIR/bin/hdfs namenode -format &&
$HADOOP_DIR/sbin/start-dfs.sh &&
if [[ $INSTALL_TYPE == gcs ]]; then
export GOOGLE_APPLICATION_CREDENTIALS=$GITHUB_WORKSPACE/.github/resources/gcs/GCS.json
source $GITHUB_WORKSPACE/.github/resources/gcs/gcs_cred.sh
fi
if [[ $INSTALL_TYPE == azure ]]; then
source $GITHUB_WORKSPACE/.github/resources/azure/azure_cred.sh
fi
echo "configure_hadoop successful"
}
setup_paths() {
echo "export JAVA_HOME=/usr/java/latest" > $HADOOP_ENV
echo "export PATH=$HADOOP_DIR/bin:$PATH" >> $HADOOP_ENV
echo "export LD_LIBRARY_PATH=$HADOOP_DIR/lib:$LD_LIBRARY_PATH" >> $HADOOP_ENV
HADOOP_CP=`$HADOOP_DIR/bin/hadoop classpath --glob`
AZURE_JARS=`find $HADOOP_DIR/share/hadoop/tools/lib -name *azure*jar | tr '\n' ':'`
echo "export CLASSPATH=$AZURE_JARS$HADOOP_CP" >> $HADOOP_ENV
echo "setup_paths successful"
}
install_hadoop() {
install_prereqs
if [[ ! -f $HADOOP_ENV ]]; then
download_hadoop &&
setup_paths &&
cp -fr $GITHUB_WORKSPACE/.github/resources/hadoop/* $HADOOP_DIR/etc/hadoop &&
mkdir -p $HADOOP_DIR/logs &&
export HADOOP_ROOT_LOGGER=ERROR,console
fi
source $HADOOP_ENV &&
configure_hadoop &&
echo "Install Hadoop SUCCESSFUL"
}
echo "INSTALL_DIR=$INSTALL_DIR"
echo "INSTALL_TYPE=$INSTALL_TYPE"
# resources r.tar file encrypted using "gpg --symmetric --cipher-algo AES256 r.tar"
gpg --quiet --batch --yes --decrypt --passphrase="$R_TAR" --output $INSTALL_DIR/r.tar $GITHUB_WORKSPACE/.github/scripts/r.tar.gpg &&
tar xf $INSTALL_DIR/r.tar -C $GITHUB_WORKSPACE/.github &&
install_hadoop
|
from typing import List
def process_tokens(tokens: List[str]) -> List[str]:
# Add 5-bit "don't care" sequence and 6-bit "op-code" sequence to the end of the token list
tokens += ['00000', '000000']
return tokens
# Test the function with an example
input_tokens = ['10101', '11011']
output_tokens = process_tokens(input_tokens)
print(output_tokens) # Output: ['10101', '11011', '00000', '000000'] |
/*
* Copyright (c) 2004-2009, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.patient.action.patientchart;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataelement.DataElementService;
import org.hisp.dhis.patientchart.PatientChart;
import org.hisp.dhis.patientchart.PatientChartService;
import com.opensymphony.xwork2.Action;
/**
* @author <NAME>
* @version $ UpdatePatientChartAction.java Sep 5, 2011 9:13:29 AM $
*
*/
public class UpdatePatientChartAction
implements Action
{
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
private PatientChartService patientChartService;
public void setPatientChartService( PatientChartService patientChartService )
{
this.patientChartService = patientChartService;
}
private DataElementService dataElementService;
public void setDataElementService( DataElementService dataElementService )
{
this.dataElementService = dataElementService;
}
// -------------------------------------------------------------------------
// Input
// -------------------------------------------------------------------------
private Integer id;
private String title;
private String type;
private String size;
private boolean regression;
private Integer programId;
private Integer dataElementId;
// -------------------------------------------------------------------------
// Input
// -------------------------------------------------------------------------
public void setTitle( String title )
{
this.title = title;
}
public void setType( String type )
{
this.type = type;
}
public void setSize( String size )
{
this.size = size;
}
public void setRegression( boolean regression )
{
this.regression = regression;
}
public Integer getProgramId()
{
return programId;
}
public void setDataElementId( Integer dataElementId )
{
this.dataElementId = dataElementId;
}
public void setId( Integer id )
{
this.id = id;
}
// -------------------------------------------------------------------------
// Implementation Action
// -------------------------------------------------------------------------
@Override
public String execute()
throws Exception
{
PatientChart patientChart = patientChartService.getPatientChart( id );
patientChart.setTitle( title );
patientChart.setType( type );
patientChart.setSize( size );
patientChart.setRegression( regression );
DataElement dataElement = dataElementService.getDataElement( dataElementId );
patientChart.setDataElement( dataElement );
programId = patientChart.getProgram().getId();
patientChartService.updatePatientChart( patientChart );
return SUCCESS;
}
}
|
def isFibonacci(n):
a = 0
b = 1
if (n == a or n == b):
return True
while (n > b):
c = a + b
a = b
b = c
if (n == b):
return True
return False |
def handle_shell_command(cmd):
if "unison -version" in cmd:
return 0, "unison version 2.51.3"
elif "export UNISON=" in cmd:
return 0, "[mock] Successfully ran unison command"
else:
return 1, f"Missing mock implementation for shell command: '{cmd}'"
# Test cases
print(handle_shell_command("unison -version")) # Output: (0, "unison version 2.51.3")
print(handle_shell_command("export UNISON=abc")) # Output: (0, "[mock] Successfully ran unison command")
print(handle_shell_command("ls -l")) # Output: (1, "Missing mock implementation for shell command: 'ls -l'") |
#!/bin/bash
# Script for generating self signed certificate (for local development).
KEY_FOLDER=../src/main/resources
rm ${KEY_FOLDER}/vf-starter-self-signed.p12
echo ==========================================================
echo Generate Self Signed KeyStore
echo ==========================================================
echo
echo " - 1) Generating Self-Signed Certificate + KeyStore (PKCS12 format - better than JKS format)"
keytool -genkey -storetype PKCS12 \
-alias selfsigned_localhost_sslserver \
-keyalg RSA -keysize 2048 -validity 3650 \
-dname "CN=localhost, OU=Engineering, O=Video First, L=Belfast, S=Antrim, C=GB" \
-noprompt -keypass changeit -storepass changeit \
-keystore ${KEY_FOLDER}/vf-starter-self-signed.p12
|
#!/bin/bash
dieharder -d 208 -g 3 -S 3559857189
|
xargs --arg-file=/home/ubuntu/feature_scripts/feature_tiles_0.sh \
--max-procs=3 \
--replace \
--verbose \
/bin/sh -c "[ -f /data/local/eecolidar/rclone/tmp/ahn3_feature_10m/{}.ply ] && echo 'File {}.ply already exists' || echo 'Creating file {}.ply'; python /home/ubuntu/feature_scripts/computefea_wtargets_cell.py /data/local/eecolidar/modules/python/laserchicken/ /data/local/eecolidar/rclone/tmp/ahn3_256x256_2km_norm/{}.LAZ /data/local/eecolidar_webdav/01_Work/ALS/Netherlands/ahn3_targets_10m/{}_target.laz 5 /data/local/eecolidar/rclone/tmp/ahn3_feature_10m/{}.ply;" |
<reponame>CMU-Light-Curtains/ConstraintGraph
#ifndef PY_HPP
#define PY_HPP
#include <pybind11/pybind11.h>
#include <pybind11/stl.h>
#include <pybind11/eigen.h>
#include <v1.h>
#include <v2.h>
namespace py = pybind11;
using namespace planner;
PYBIND11_MODULE(planner_py, m) {
py::class_<CameraParameters, std::shared_ptr<CameraParameters>>(m, "CameraParameters")
.def(py::init<>())
.def_readwrite("width", &CameraParameters::width)
.def_readwrite("height", &CameraParameters::height)
.def_readwrite("fps", &CameraParameters::fps)
.def_readwrite("cam_matrix", &CameraParameters::cam_matrix)
.def_readwrite("cam_to_laser", &CameraParameters::cam_to_laser)
;
py::class_<LaserParameters, std::shared_ptr<LaserParameters>>(m, "LaserParameters")
.def(py::init<>())
.def_readwrite("fov", &LaserParameters::fov)
.def_readwrite("thickness", &LaserParameters::thickness)
.def_readwrite("divergence", &LaserParameters::divergence)
.def_readwrite("max_omega", &LaserParameters::max_omega)
.def_readwrite("max_alpha", &LaserParameters::max_alpha)
;
py::class_<Location, std::shared_ptr<Location>>(m, "Location")
.def_readonly("x", &Location::x)
.def_readonly("z", &Location::z)
.def_readonly("r", &Location::r)
.def_readonly("theta_cam", &Location::theta_cam)
.def_readonly("theta_las", &Location::theta)
.def_readonly("ki", &Location::ki)
.def_readonly("kj", &Location::kj)
;
py::class_<Interpolator, std::shared_ptr<Interpolator>>(m, "Interpolator");
py::class_<CartesianNNInterpolator, Interpolator, std::shared_ptr<CartesianNNInterpolator>>(m, "CartesianNNInterpolator")
.def(py::init<int, int, float, float, float, float> ())
;
py::class_<PolarIdentityInterpolator, Interpolator, std::shared_ptr<PolarIdentityInterpolator>>(m, "PolarIdentityInterpolator")
.def(py::init<int, int> ())
;
py::class_<Planner, std::shared_ptr<Planner>>(m, "Planner");
py::class_<v1::PlannerV1, std::shared_ptr<v1::PlannerV1>>(m, "PlannerV1")
.def(py::init<const CameraParameters&,
const LaserParameters&,
const std::vector<float>&,
const Interpolator&,
bool> ())
.def("getThetas", &v1::PlannerV1::getThetas)
.def("getLayoutForVis", &v1::PlannerV1::getVectorizedLayout)
.def("loadLineSurface", &v1::PlannerV1::loadLineSurface)
.def("computeVisibleRanges", &v1::PlannerV1::computeVisibleRanges)
.def("visiblePoints", &v1::PlannerV1::visiblePoints)
.def("computeLayoutIntensities", &v1::PlannerV1::computeLayoutIntensities)
.def("layoutIntensities", &v1::PlannerV1::layoutIntensities)
.def("optGlobalCostDiscrete", &v1::PlannerV1::optGlobalCostDiscrete)
.def("randomCurtainDiscrete", &v1::PlannerV1::randomCurtainDiscrete)
.def("randomCurtainContinuous", &v1::PlannerV1::randomCurtainContinuous)
.def("randomCurtainHitProb", &v1::PlannerV1::randomCurtainHitProb)
;
py::class_<v2::PlannerV2, std::shared_ptr<v2::PlannerV2>>(m, "PlannerV2")
.def(py::init<const CameraParameters&,
const LaserParameters&,
const std::vector<float>&,
const Interpolator&,
bool> ())
.def("getThetas", &v2::PlannerV2::getThetas)
.def("getLayoutForVis", &v2::PlannerV2::getVectorizedLayout)
.def("loadLineSurface", &v2::PlannerV2::loadLineSurface)
.def("computeVisibleRanges", &v2::PlannerV2::computeVisibleRanges)
.def("visiblePoints", &v2::PlannerV2::visiblePoints)
.def("computeLayoutIntensities", &v2::PlannerV2::computeLayoutIntensities)
.def("layoutIntensities", &v2::PlannerV2::layoutIntensities)
.def("optGlobalCostDiscrete", &v2::PlannerV2::optGlobalCostDiscrete)
.def("randomCurtainDiscrete", &v2::PlannerV2::randomCurtainDiscrete)
.def("randomCurtainContinuous", &v2::PlannerV2::randomCurtainContinuous)
.def("randomCurtainHitProb", &v2::PlannerV2::randomCurtainHitProb)
;
#ifdef VERSION_INFO
m.attr("__version__") = VERSION_INFO;
#else
m.attr("__version__") = "dev";
#endif
}
#endif |
# Function to find roots of a quadratic equation
quad_roots <- function(a, b, c){
discriminant <- (b^2) - (4*a*c) #Discriminant
if(discriminant == 0){
# Real and equal roots
root <- (-b/(2*a))
return(c(root, root))
} else if(discriminant > 0){
# Distinct real roots
root1 <- ((-b + sqrt(discriminant))/(2*a))
root2 <- ((-b - sqrt(discriminant))/(2*a))
return(c(root1, root2))
} else{
# Complex roots
real <- (-b/(2*a))
imaginary <- (sqrt(-discriminant)/(2*a))
return(c(real + complex(real = 0, imaginary = imaginary), real - complex(real = 0, imaginary = imaginary)))
}
}
# Example
quad_roots(2, 3, 1) |
package shape;
import java.awt.BasicStroke;
import java.awt.Color;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Polygon;
/**
* Used to draw different shapes
*/
public class Stamp extends Point {
private Shape shape;
private int x1;
private int y1;
private int stroke;
public Stamp(int x0, int y0, int x1, int y1, int stroke, String shape, Color color){
super(x0, y0, color);
this.x1 = x1;
this.y1 = y1;
this.stroke = stroke;
switch(shape) {
case "OVAL" : this.shape = Shape.OVAL; break;
case "RECTANGLE" : this.shape = Shape.RECTANGLE; break;
case "TRIANGLE" : this.shape = Shape.TRIANGLE; break;
case "DIAMOND" : this.shape = Shape.DIAMOND; break;
case "ARROW_RIGHT" : this.shape = Shape.ARROW_RIGHT; break;
case "ARROW_UP" : this.shape = Shape.ARROW_UP; break;
case "STAR" : this.shape = Shape.STAR; break;
case "HEART" : this.shape = Shape.HEART; break;
default : break;
}
}
public Stamp(int x0, int y0, int x1, int y1, int stroke, Shape shape, Color color){
super(x0, y0, color);
this.x1 = x1;
this.y1 = y1;
this.stroke = stroke;
this.shape = shape;
}
@Override
public void draw(Graphics g) {
Graphics2D g2 = (Graphics2D) g;
g2.setStroke(new BasicStroke(3 * stroke));
draw(g2);
}
private void draw(Graphics2D g2){
if (shape.equals(Shape.OVAL)) {
int width = x1 - x;
int height = y1 - y;
// Rotation when their dimensions are negatives
if (width > 0 && height > 0) g2.drawOval(x, y, width, height);
if (width < 0 && height > 0) g2.drawOval(x1, y1 - height, -width, height);
if (width > 0 && height < 0) g2.drawOval(x, y1, width, - height);
if (width < 0 && height < 0) g2.drawOval(x1, y1, - width, -height);
}
else if (shape.equals(Shape.RECTANGLE)) {
g2.drawLine(x, y, x1, y);
g2.drawLine(x1, y, x1, y1);
g2.drawLine(x, y, x, y1);
g2.drawLine(x, y1, x1, y1);
}
else if (shape.equals(Shape.TRIANGLE)) {
g2.drawLine(x, y1, (x+x1)/2, y);
g2.drawLine((x+x1)/2, y, x1, y1);
g2.drawLine(x, y1, x1, y1);
}
else if (shape.equals(Shape.DIAMOND)) {
g2.drawLine((x+x1)/2, y, x1, (y+y1)/2);
g2.drawLine(x1, (y+y1)/2, (x+x1)/2, y1);
g2.drawLine((x+x1)/2, y1, x, (y+y1)/2);
g2.drawLine(x, (y+y1)/2, (x+x1)/2, y);
}
else if (shape.equals(Shape.ARROW_RIGHT)) {
int[] xPoints1 = {x, (x+2*x1)/3, (x+2*x1)/3, x1, (x+2*x1)/3, (x+2*x1)/3, x};
int[] yPoints1 = {(2*y+y1)/3, (2*y+y1)/3, y, (y+y1)/2, y1, (y+2*y1)/3, (y+2*y1)/3};
g2.drawPolygon(new Polygon(xPoints1, yPoints1, xPoints1.length));
}
else if (shape.equals(Shape.ARROW_UP)) {
int[] xPoints2 = {(2*x+x1)/3, (2*x+x1)/3, x, (x+x1)/2, x1, (x+2*x1)/3, (x+2*x1)/3};
int[] yPoints2 = {y, (y+2*y1)/3, (y+2*y1)/3, y1, (y+2*y1)/3, (y+2*y1)/3, y};
g2.drawPolygon(new Polygon(xPoints2, yPoints2, xPoints2.length));
}
else if (shape.equals(Shape.STAR)) {
int[] xPoints3 = {(15*x+9*x1)/24, (x+x1)/2, (9*x+15*x1)/24, x1, (7*x+17*x1)/24, (x+5*x1)/6, (x+x1)/2, (5*x+x1)/6, (17*x+7*x1)/24, x};
int[] yPoints3 = {(15*y+9*y1)/24, y, (15*y+9*y1)/24, (15*y+9*y1)/24, (5*y+7*y1)/12, y1, (y+3*y1)/4, y1, (5*y+7*y1)/12, (15*y+9*y1)/24};
g2.drawPolygon(new Polygon(xPoints3, yPoints3, xPoints3.length));
}
else if (shape.equals(Shape.HEART)) {
if (x1 > x && y1 > y) {
g2.drawArc(x, y, (x1-x)/2, (y1-y)/2, 0, 180);
g2.drawArc((x+x1)/2, y, (x1-x)/2, (y1-y)/2, 0, 180);
g2.drawArc((3*x-x1)/2, (3*y-y1)/2, 3*(x1-x)/2, 3*(y1-y)/2, 0, -71);
g2.drawArc(x, (3*y-y1)/2, 3*(x1-x)/2, 3*(y1-y)/2, -180, 71);
}
else if (x1 < x && y1 > y) {
g2.drawArc(x1, y, (x-x1)/2, (y1-y)/2, 0, 180);
g2.drawArc((x+x1)/2, y, (x-x1)/2, (y1-y)/2, 0, 180);
g2.drawArc(x1, (3*y-y1)/2, 3*(x-x1)/2, 3*(y1-y)/2, -180, 71);
g2.drawArc((3*x1-x)/2, (3*y-y1)/2, 3*(x-x1)/2, 3*(y1-y)/2, -71, 71);
}
else if (x1 > x && y1 < y) {
g2.drawArc(x, (y+y1)/2, (x1-x)/2, (y-y1)/2, 0, -180);
g2.drawArc((x+x1)/2, (y+y1)/2, (x1-x)/2, (y-y1)/2, 0, -180);
g2.drawArc((3*x-x1)/2, y1, 3*(x1-x)/2, 3*(y-y1)/2, 0, 71);
g2.drawArc(x, y1, 3*(x1-x)/2, 3*(y-y1)/2, 110, 71);
}
else if (x1 < x && y1 < y) {
g2.drawArc(x1, (y+y1)/2, (x-x1)/2, (y-y1)/2, 0, -180);
g2.drawArc((x+x1)/2, (y+y1)/2, (x-x1)/2, (y-y1)/2, 0, -180);
g2.drawArc((3*x1-x)/2, y1, 3*(x-x1)/2, 3*(y-y1)/2, 0, 71);
g2.drawArc(x1, y1, 3*(x-x1)/2, 3*(y-y1)/2, 110, 71);
}
}
}
@Override
public String toString(){
return "STAMP" + ";"
+ super.toString() + ";"
+ x1 + ";"
+ y1 + ";"
+ stroke + ";"
+ shape.getId();
}
}
|
import React from 'react';
import './NavBar.css'
import HeaderAnchor from './HeaderAnchor'
const NavBar = ()=>{
const styleHome = {
fontSize: 'large',
paddingLeft: 10,
paddingRight: 10,
paddingTop: 10,
paddingBottom: 10
};
const rest ={
paddingTop: 10,
paddingBottom: 10
};
const examples =[
'Example1', 'Example2',
];
return(
<div className="headerContainer">
<div className="header-column">
<HeaderAnchor style={styleHome} flag={true} flagLink={true} value='/'>Home</HeaderAnchor>
</div>
<div className="header-column">
<HeaderAnchor style={rest}>Interests</HeaderAnchor>
</div>
<div className="header-column">
<HeaderAnchor style={rest} list={examples} flagLink={true} value='/Examples'>Examples</HeaderAnchor>
</div>
<div className="header-column">
<HeaderAnchor style={rest} value='/Writing'>Writing</HeaderAnchor>
</div>
<div className="header-column">
<HeaderAnchor style={{border:'none', fontSize: 'large', paddingTop: 10, paddingBottom: 10}} value='/'><i class="fas fa-list-ul"></i></HeaderAnchor>
</div>
</div>
);
}
export default NavBar;
|
<filename>test/js/logger_test.js
var expect = require('chai').expect;
var requireHelper = require('./util/require_helper');
var log = requireHelper('util/Logger');
describe('Logger Tests', function () {
it('should have debug, info and error function', function () {
expect(log.debug).to.be.a.function;
expect(log.info).to.be.a.function;
expect(log.error).to.be.a.function;
});
it('should allow override', function () {
var counter = 0;
var newLog = {
debug: function () {
counter++;
},
info: function () {
counter++;
},
error: function () {
counter++;
}
};
log.overrideLogger(newLog);
log.debug();
log.info();
log.error();
expect(counter).to.equal(3);
});
});
|
REM DefaultTest.sql
REM Chapter 9, Oracle9i PL/SQL Programming by <NAME>
REM This script shows different ways of calling a procedure
REM with default parameters.
set serveroutput on
CREATE OR REPLACE PROCEDURE DefaultTest (
p_ParameterA NUMBER DEFAULT 10,
p_ParameterB VARCHAR2 DEFAULT 'abcdef',
p_ParameterC DATE DEFAULT SYSDATE) AS
BEGIN
DBMS_OUTPUT.PUT_LINE(
'A: ' || p_ParameterA ||
' B: ' || p_ParameterB ||
' C: ' || TO_CHAR(p_ParameterC, 'DD-MON-YYYY'));
END DefaultTest;
/
show errors
BEGIN
DefaultTest(p_ParameterA => 7, p_ParameterC => '30-DEC-95');
END;
/
BEGIN
-- Uses the default value for both p_ParameterB and
-- p_ParameterC.
DefaultTest(7);
END;
/
|
#!/usr/bin/env bash
alias mslack-term='slack-term -config $DOTFILES/slack-term/default.json'
|
<gh_stars>1000+
# test % operation on big integers
delta = 100000000000000000000000000000012345
for i in range(11):
for j in range(11):
x = delta * (i - 5)
y = delta * (j - 5)
if y != 0:
print(x % y)
# these check an edge case on 64-bit machines where two mpz limbs
# are used and the most significant one has the MSB set
x = 0x8000000000000000
print((x + 1) % x)
x = 0x86c60128feff5330
print((x + 1) % x)
|
// Define a function to calculate the discount rate and price after discount
func updateDiscountFields() {
// Assuming discRate and priceAfterDisc are the calculated values
let discRate = calculateDiscountRate() // Replace with the actual calculation
let priceAfterDisc = calculatePriceAfterDiscount() // Replace with the actual calculation
// Update the text fields with the calculated values
EnteredDiscRateOutlet.text = "\(EnteredDiscRateOutlet.text!)\(discRate)"
PriceAfterDiscOutlet.text = "\(PriceAfterDiscOutlet.text!)\(priceAfterDisc)"
}
// Define functions to perform the actual calculations
func calculateDiscountRate() -> String {
// Replace with the actual calculation for discount rate
let discountRate = 0.15 // Example calculation
return String(format: "%.2f", discountRate * 100) + "%" // Format as percentage
}
func calculatePriceAfterDiscount() -> String {
// Replace with the actual calculation for price after discount
let originalPrice = 100.0 // Example original price
let discountRate = 0.15 // Example discount rate
let priceAfterDiscount = originalPrice * (1 - discountRate)
return String(format: "%.2f", priceAfterDiscount) // Format as currency or decimal
}
// Call the function to update the text fields
updateDiscountFields() |
class MonsterIndex::Monster
attr_accessor :name, :size_type, :hit_dice, :initiative, :speed, :ac, :attack, :alignment, :url
@@all = []
def initialize(monster_hash = {})
monster_hash.each {|key, value| self.send(("#{key}="), value)}
@@all << self
end
def self.create_from_collection(monster_array)
monster_array.each do |monster_hash|
self.new(monster_hash)
end
end
def add_monster_attributes(attributes_hash)
attributes_hash.each {|key, value| self.send(("#{key}="), value)}
self
end
def self.get_names
names = []
@@all.each do |monster|
names << monster.name
end
names
end
def self.list
@@all.each do |monster|
puts "#{monster.name}"
end
puts
end
def self.list_some(letter)
@@all.each do |monster|
if monster.name.downcase.match(/\A[#{letter.downcase}]/)
puts "#{monster.name}"
end
end
puts
end
def self.view_profile(name)
monster = @@all.find {|one| one.name == name}
puts "Size/Type: #{monster.size_type}"
puts "Hit Dice: #{monster.hit_dice}"
puts "Initiative: #{monster.initiative}"
puts "Speed: #{monster.speed}"
puts "AC: #{monster.ac}"
puts "Attack: #{monster.attack}"
puts "Alignment: #{monster.alignment}"
puts "More info at: #{monster.url}"
puts
end
def self.get_monster(name)
@@all.find {|one| one.name == name}
end
def self.all
@@all
end
end |
import * as functions from 'firebase-functions';
import * as admin from 'firebase-admin';
import { doLogin, doSwissLogin } from './login';
import { doProcessMainActionsChange, doProcessDelegationChange, doProcessBvChange, doProcessActionTitleChange } from './database';
admin.initializeApp({
databaseURL: "https://dune-new-dawn.firebaseio.com"
});
export let login = functions.https.onRequest(async (request, response) => {
await doLogin(request.query["password"] as string, response)
})
export let swissLogin = functions.https.onRequest(async (request, response) => {
await doSwissLogin(request.query["password"] as string, response)
})
export let processMainActionsChange = functions.database.ref("delegateRounds/{delegateId}/{roundId}/mainActions").onWrite(async (change, context) => {
await doProcessMainActionsChange(context.params.delegateId, context.params.roundId, change)
})
export let processDelegationChange = functions.database.ref("delegateRounds/{delegateId}/{roundId}/delegation").onWrite(async (change, context) => {
await doProcessDelegationChange(context.params.delegateId, context.params.roundId, change)
})
export let processBvChange = functions.database.ref("delegateRounds/{delegateId}/{roundId}/bv").onWrite(async (change, context) => {
await doProcessBvChange(context.params.delegateId, context.params.roundId, change)
})
export let processActionTitleChange = functions.database.ref("actions/{roundId}/{actionId}/title").onWrite(async (change, context) => {
await doProcessActionTitleChange(context.params.roundId, context.params.actionId, change)
}) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.