text
stringlengths 27
775k
|
|---|
"use strict";
import * as assert from "assert";
import {isMsg, MsgExt} from "msg-interface";
import {createDecoder} from "../";
const msgpack = createDecoder();
const TITLE = __filename.split("/").pop();
describe(TITLE, () => {
const D4 = "d4-10-11";
it(D4, () => {
const ext = msgpack.decode(hexToBinary(D4));
assert(ext instanceof MsgExt, "should be a MsgExt instance");
assert(isMsg(ext), "should implement Msg interface");
assert.equal(ext.type, 0x10);
assert.equal(ext.buffer.length, 1);
assert.equal(ext.buffer[0], 0x11);
});
const D5 = "d5-20-21-22";
it(D5, () => {
const ext = msgpack.decode(hexToBinary(D5));
assert(ext instanceof MsgExt, "should be a MsgExt instance");
assert(isMsg(ext), "should implement Msg interface");
assert.equal(ext.type, 0x20);
assert.equal(ext.buffer.length, 2);
assert.equal(ext.buffer[0], 0x21);
});
const D6 = "d6-30-31-32-33-34";
it(D6, () => {
const ext = msgpack.decode(hexToBinary(D6));
assert(ext instanceof MsgExt, "should be a MsgExt instance");
assert(isMsg(ext), "should implement Msg interface");
assert.equal(ext.type, 0x30);
assert.equal(ext.buffer.length, 4);
assert.equal(ext.buffer[0], 0x31);
});
const D7 = "d7-40-41-42-43-44-45-46-47-48";
it(D7, () => {
const ext = msgpack.decode(hexToBinary(D7));
assert(ext instanceof MsgExt, "should be a MsgExt instance");
assert(isMsg(ext), "should implement Msg interface");
assert.equal(ext.type, 0x40);
assert.equal(ext.buffer.length, 8);
assert.equal(ext.buffer[0], 0x41);
});
const D8 = "d8-50-51-52-53-55-55-56-57-58-59-5a-5b-5c-5d-5e-5f-60";
it(D8, () => {
const ext = msgpack.decode(hexToBinary(D8));
assert(ext instanceof MsgExt, "should be a MsgExt instance");
assert(isMsg(ext), "should implement Msg interface");
assert.equal(ext.type, 0x50);
assert.equal(ext.buffer.length, 16);
assert.equal(ext.buffer[0], 0x51);
});
const C7 = "c7-01-02-03";
it(C7, () => {
const ext = msgpack.decode(hexToBinary(C7));
assert(ext instanceof MsgExt, "should be a MsgExt instance");
assert(isMsg(ext), "should implement Msg interface");
assert.equal(ext.type, 0x02);
assert.equal(ext.buffer.length, 1);
assert.equal(ext.buffer[0], 0x03);
});
const C8 = "c8-00-02-03-04-05";
it(C8, () => {
const ext = msgpack.decode(hexToBinary(C8));
assert(ext instanceof MsgExt, "should be a MsgExt instance");
assert(isMsg(ext), "should implement Msg interface");
assert.equal(ext.type, 0x03);
assert.equal(ext.buffer.length, 2);
assert.equal(ext.buffer[0], 0x04);
});
const C9 = "c9-00-00-00-03-04-05-06-07";
it(C9, () => {
const ext = msgpack.decode(hexToBinary(C9));
assert(ext instanceof MsgExt, "should be a MsgExt instance");
assert(isMsg(ext), "should implement Msg interface");
assert.equal(ext.type, 0x04);
assert.equal(ext.buffer.length, 3);
assert.equal(ext.buffer[0], 0x05);
});
});
function hexToBinary(str: string) {
const array = str.split(/[^0-9a-fA-F]+/).map(parseHex);
return Buffer.from(array);
}
function parseHex(str: string) {
return parseInt(str, 16);
}
|
insert into users (email, encrypted_password) values ('danielnagore@marbella.es', 'NADA');
insert into users (email, encrypted_password) values ('raulsierra@marbella.es', 'NADA');
insert into users (email, encrypted_password) values ('azaldivar@marbella.es', 'NADA');
insert into grupos (descripcion) values ('(3) Cuerpo de ingenieros del Ayto de Marbella');
insert into grupos (descripcion) values ('(-2) Compañero Dani - Administración Electrónica');
insert into grupos (descripcion) values ('(1+1) Compañero Raúl - Informática');
insert into grupos (descripcion) values ('(4) Surferos del mal - Ayto de Marbella');
insert into grupo_user (grupo_id, user_id, propietario) values (1,1,1);
insert into grupo_user (grupo_id, user_id) values (1,2);
insert into grupo_user (grupo_id, user_id) values (1,3);
insert into grupo_user (grupo_id, user_id, propietario) values (2,1,1);
insert into grupo_user (grupo_id, user_id) values (2,2);
insert into grupo_user (grupo_id, user_id, propietario) values (3,1,1);
insert into grupo_user (grupo_id, user_id) values (3,3);
insert into grupo_user (grupo_id, user_id, propietario) values (4,1,1);
insert into grupo_user (grupo_id, user_id) values (4,2);
insert into grupo_user (grupo_id, user_id) values (4,3);
insert into grupo_user (grupo_id, user_id) values (4,4);
insert into eventos (grupo_id, user_id, created_at) values (1, 1, '2021-07-12');
insert into eventos (grupo_id, user_id, created_at) values (1, 3, '2021-07-13');
insert into eventos (grupo_id, user_id, created_at) values (1, 2, '2021-07-14');
insert into eventos (grupo_id, user_id, created_at) values (1, 3, '2021-07-16');
insert into eventos (grupo_id, user_id, created_at) values (1, 1, '2021-07-19');
insert into eventos (grupo_id, user_id, created_at) values (1, 2, '2021-07-20');
insert into eventos (grupo_id, user_id, created_at) values (1, 3, '2021-08-11');
insert into eventos (grupo_id, user_id, created_at) values (1, 1, '2021-08-12');
insert into eventos (grupo_id, user_id, created_at) values (2, 2, '2021-07-15');
insert into eventos (grupo_id, user_id, created_at) values (2, 1, '2021-07-21');
insert into eventos (grupo_id, user_id, created_at) values (2, 2, '2021-07-23');
insert into eventos (grupo_id, user_id, created_at) values (3, 1, '2021-07-27');
insert into eventos (grupo_id, user_id, created_at) values (3, 3, '2021-08-03');
insert into eventos (grupo_id, user_id, created_at) values (3, 1, '2021-08-04');
insert into eventos (grupo_id, user_id, created_at) values (3, 3, '2021-08-05');
insert into eventos (grupo_id, user_id, created_at) values (3, 3, '2021-08-06');
insert into eventos (grupo_id, user_id, created_at) values (4, 4, '2021-06-30');
insert into eventos (grupo_id, user_id, created_at) values (4, 1, '2021-07-05');
insert into eventos (grupo_id, user_id, created_at) values (4, 2, '2021-08-09');
insert into eventos (grupo_id, user_id, created_at) values (4, 3, '2021-08-10');
insert into eventos (grupo_id, user_id, created_at) values (4, 4, '2021-08-13');
|
# frozen_string_literal:true
RSpec.describe Hyrax::Migrator::Services::FacetReporterService do
class Facet
attr_accessor :solr_name, :label
end
let(:service) { described_class.new('batch123', 'coll123') }
let(:location_service) { instance_double('Hyrax::Migrator::Services::BagFileLocationService') }
let(:bag1) { double }
let(:search_service) { instance_double('Hyrax::Migrator::HyraxCore::SearchService') }
let(:facet_results) do
{ 'facet_counts' =>
{ 'facet_fields' =>
{ 'workType_label_sim' =>
['colour photographs', 1,
'photomicrographs', 1,
'slides (photographs)', 1,
'stamps (exchange media)', 1,
'tax stamps', 1] } } }
end
let(:collection) { instance_double('Collection', id: 'ohba') }
let(:solr_record) do
{ :id => 'fx719p24f',
'member_of_collection_ids_ssim' => %w[osu-scarc ohba],
'workflow_state_name_ssim' => 'pending_review',
'workType_label_sim' => %w[colour_photographs photomicrographs] }
end
let(:facets) do
f = Facet.new
f.solr_name = 'workType_label_sim'
f.label = 'Work Type'
[f]
end
let(:test_io) { StringIO.new }
before do
allow(Hyrax::Migrator::Services::BagFileLocationService).to receive(:new).and_return(location_service)
allow(location_service).to receive(:bags_to_ingest).and_return({ 'batch123' => ['fx719p24f.zip'] })
allow(Hyrax::Migrator::HyraxCore::Asset).to receive(:solr_record).and_return(solr_record)
allow(Hyrax::Migrator::HyraxCore::Asset).to receive(:find).and_return(collection)
allow(collection).to receive(:available_facets).and_return(facets)
allow(Hyrax::Migrator::HyraxCore::SearchService).to receive(:new).and_return(search_service)
allow(search_service).to receive(:search).and_return(facet_results)
allow(File).to receive(:open).and_return test_io
end
describe '#create_report' do
it 'writes a file' do
service.create_report
expect(test_io.string).to include 'fx719p24f'
end
it 'writes the totals for the facets' do
service.create_report
expect(test_io.string).to include 'Work Type'
end
context 'when there is not a solr record' do
before do
allow(Hyrax::Migrator::HyraxCore::Asset).to receive(:solr_record).and_return([])
end
it 'logs the pid' do
service.create_report
expect(test_io.string).to include("assets not found:\nfx719p24f")
end
end
end
describe '#add_collections' do
context 'when the solr record has no collection' do
let(:solr_asset) { { 'member_of_collection_ids_ssim' => [] } }
it 'stops, does not call find' do
expect(Hyrax::Migrator::HyraxCore::Asset).not_to receive(:find)
service.send(:add_collections, solr_asset)
end
end
context 'when there are collections' do
let(:solr_asset) { { 'member_of_collection_ids_ssim' => %w[mycoll1 mycoll2] } }
context 'when the collection is already known' do
let(:colls) { %w[mycoll1 mycoll2 mycoll3] }
before do
service.instance_variable_set(:@collections, colls)
end
it 'skips, does not call find' do
expect(Hyrax::Migrator::HyraxCore::Asset).not_to receive(:find)
service.send(:add_collections, solr_asset)
end
end
context 'when the collection is not known' do
it 'adds the collection' do
service.send(:add_collections, solr_asset)
expect(service.instance_variable_get(:@collections)).to include('mycoll1')
end
end
end
end
end
|
import {Component, OnInit, OnDestroy} from '@angular/core';
import {FormBuilder, FormGroup, Validators} from '@angular/forms';
import {ActivatedRoute, Router} from '@angular/router';
import {AuthenticationService} from '../../service/authentication.service';
import {first} from 'rxjs/operators';
@Component({
selector: 'app-reset-password',
templateUrl: './reset-password.component.html',
styleUrls: ['./reset-password.component.scss']
})
export class ResetPasswordComponent implements OnInit, OnDestroy {
resetPasswordForm: FormGroup;
loading = false;
submitted = false;
returnUrl: string;
error = '';
constructor(
private formBuilder: FormBuilder,
private route: ActivatedRoute,
private router: Router,
private authenticationService: AuthenticationService
) {
}
ngOnInit() {
// setup reset password form
this.resetPasswordForm = this.formBuilder.group({
currentPassword: ['', Validators.required],
newPassword: ['', [Validators.required, Validators.minLength(8)]],
verifyPassword: ['', Validators.required]
}, {validators: this.MustMatch('newPassword', 'verifyPassword')});
// get return url from route parameters or default to '/'
this.returnUrl = this.route.snapshot.queryParams['returnUrl'] || '/';
}
ngOnDestroy() {
}
// convenience getter for easy access to form fields
get f() {
return this.resetPasswordForm.controls;
}
onSubmit() {
this.submitted = true;
// stop here if form is invalid
if (this.resetPasswordForm.invalid) {
return;
}
this.loading = true;
this.authenticationService.changePassword(this.f.currentPassword.value, this.f.newPassword.value)
.pipe(first())
.subscribe(
data => {
this.authenticationService.logout();
this.router.navigate(['/login']);
},
error => {
this.error = error.error['response'];
this.loading = false;
});
}
MustMatch(controlName: string, matchingControlName: string) {
return (formGroup: FormGroup) => {
const control = formGroup.controls[controlName];
const matchingControl = formGroup.controls[matchingControlName];
if (matchingControl.errors && !matchingControl.errors.mustMatch) {
// return if another validator has already found an error on the matchingControl
return;
}
// set error on matchingControl if validation fails
if (control.value !== matchingControl.value) {
matchingControl.setErrors({mustMatch: true});
} else {
matchingControl.setErrors(null);
}
};
}
logout() {
this.authenticationService.logout();
this.router.navigate(['/login']);
}
}
|
---
authorName: syntonica
canDelete: false
contentTrasformed: false
from: '"syntonica" <syntonica@...>'
headers.inReplyToHeader: .nan
headers.messageIdInHeader: PGR0MTNmMituMmp2QGVHcm91cHMuY29tPg==
headers.referencesHeader: .nan
layout: email
msgId: 675
msgSnippet: Wow! Need some air after diving in so deep. I never realized how many
concepts in English were expressed in Latin compound words and didn t really have
any
nextInTime: 676
nextInTopic: 676
numMessagesInTopic: 2
postDate: '1140068642'
prevInTime: 674
prevInTopic: 0
profile: syntonica
replyTo: LIST
senderId: TVXMzzCE5myeLdq8RQuciAcoZe-INfKmbYeSc42FOzH4BpHteIUQeCKBrdMEnkUrYrOe7skd0cslZrzR6ZEN13tdUXevELxQ
spamInfo.isSpam: false
spamInfo.reason: '12'
systemMessage: false
title: Some Observations
topicId: 675
userId: 88184315
---
Wow! Need some air after diving in so deep. I never realized how many con=
cepts in
English were expressed in Latin compound words and didn't really =
have any Germanic
synonyms. (de-, con-, ex-, etc.)
1. (centi <> hekto) =
=3D trainwreck! Don't use extra words when you can let the math do the
wo=
rk for you. Let centipedes be centipedes!
100 =3D "centi"=3D"hekto"=3Do=
ne hundred
1/100=3D"verte-centi"=3D"verte-hekto"=3Done one-hundredth
Facil=
i, ne?
2. I think I get the "ne" vs "no" distinction. No(n) is the oppos=
ite of something. "Ne" just
denies the quality.
Amo love
No=
n-amo hate
Ne-amo not loved, but not necessarily hated either. =
(The nice way to let your suitor
down!)
Ne-xeno not a stranger (but=
not a friend either); acquaintance
An es ne-xeno. He is known to me.
3. =
I don't think the word "zero" hit western culture until the Pope was speak=
ing Italian.
Hindu/Arabic numerals and the zero (sifr[Arabic]=3D>cypher, =
zefiro=3D>zero) didn't arrive
until the 11th or 12th century!)
4. "Vag=
ona" is definitely from the German, probably arriving 410AD with Alaric and=
his
armies. I would suggest "harma" [Gr.] or "carrus" [L] instead. Both=
mean "chariot."
5. The verb structures still seem a little Western-centr=
ic. While most moods can be
expressed, I would suggest adding the followi=
ng verb modifiers:
"dubi" for a dubitative or a hearsay mood:
qo-lo es F=
red nu-di? Where is Fred today?
Fred dubi es pato. Fred, (I gu=
ess), is sick.
Irene dubi dice mi; ... Irene told me (it's hearsay)...
=
"ja" for a cohortive/energetic/counterfactual mood:
Na ja dice glosa! =
Let's speak Glosa!
Mi ja pote dice glosa! (But) I _can_ speak Gl=
osa!
Id ja es boni di! It's a bee-a-utiful day!
Boni sani a pa=
n!
Sintonika
|
<?php if(!defined('BASEPATH')) exit('No direct script access allowed');
class User_model extends CI_Model
{
public function __construct(){
//$this->load->database();
}
/**
* This function used to check the login credentials of the user
* @param string $email : This is email of the user
* @param string $password : This is encrypted password of the user
*/
function getAllUSer()
{
$this->db->select("B.*,G.NAME AS G_NAME,H.NAME AS DIVISION, J.NAME AS SUBDIVISION");
$this->db->from("MST_USER B");
$this->db->join("M_USER_TYPE G","G.ID = B.TYPE_ID");
$this->db->join("M_DIVISION H","G.DIVISION_ID = H.ID");
$this->db->join("M_DIVISION J","G.SUBDIVISION_ID = J.ID");
$query = $this->db->get();
$user = $query->result();
return $user;
}
function getUserInfo($id)
{
$this->db->select("B.*,G.NAME AS G_NAME,H.NAME AS DIVISION, J.NAME AS SUBDIVISION, B.ID AS ID");
$this->db->from("MST_USER B");
$this->db->join("M_USER_TYPE G","G.ID = B.TYPE_ID");
$this->db->join("M_DIVISION H","G.DIVISION_ID = H.ID");
$this->db->join("M_DIVISION J","G.SUBDIVISION_ID = J.ID");
$this->db->where("B.ID = ", $id);
$query = $this->db->get();
$userInfo = $query->result();
return $userInfo;
}
function getDivision()
{
$this->db->select("*");
$this->db->from("M_DIVISION");
$this->db->where("SUPERIOR_DIV <= 2");
$query = $this->db->get();
$divs = $query->result();
return $divs;
}
function getSubDivision()
{
$this->db->select("*");
$this->db->from("M_DIVISION");
$this->db->where("SUPERIOR_DIV > 2");
$query = $this->db->get();
$divs = $query->result();
return $divs;
}
function getUserType()
{
$this->db->select("*");
$this->db->from("M_USER_TYPE");
$this->db->where("IS_ADMIN <> 1");
$query = $this->db->get();
$types = $query->result();
return $types;
}
function getSpesialization()
{
$this->db->select("*");
$this->db->from("M_SPESIALIZATION");
$query = $this->db->get();
$specs = $query->result();
return $specs;
}
function addNewUser($userInfo, $specs)
{
$this->db->trans_start();
$this->db->insert('MST_USER', $userInfo);
$this->db->select_max('ID');
$query = $this->db->get('MST_USER');
$insert_id = $query->row();
foreach($specs as $spec)
{
$user_spec = array('USER_ID'=> $insert_id->ID, 'SPESIALIST_ID'=>$spec);
$this->db->insert('USER_SPESIAL', $user_spec);
}
$this->db->trans_complete();
return $insert_id->ID;
}
function editUser($userInfo, $id)
{
// echo "<pre>";
// print_r($id);
// echo "</pre>";
// die();
$this->db->trans_start();
$this->db->where('ID', $id);
if($this->db->update('MST_USER', $userInfo)){
$this->db->trans_complete();
return 1;
}else{
return 0;
}
}
public function Delete($table, $where){
$res = $this->db->delete($table, $where); // Kode ini digunakan untuk menghapus record yang sudah ada
return $res;
}
}
?>
|
from collections import namedtuple
from tempfile import NamedTemporaryFile
import numpy as np
import pytest
import pandas as pd
from ananse.network import Network
from ananse.commands import network
@pytest.fixture
def binding_fname():
return "tests/example_data/binding2.tsv"
@pytest.fixture
def network_obj():
return Network(genome="", gene_bed="ananse/db/hg38.genes.bed")
def test_unique_enhancer(network_obj, binding_fname):
regions = network_obj.unique_enhancers(binding_fname)
regions = regions.as_df()
assert regions.shape[0] == 6
assert sorted(list(regions["Chromosome"].unique())) == ["chr1", "chr10", "chr17"]
assert sorted(list(regions["Start"].unique())) == [7677184, 7687827]
def test_distance_weight(network_obj):
dw = network_obj.distance_weight(
include_promoter=True,
promoter_region=20,
full_weight_region=50,
maximum_distance=100,
alpha=5,
)
assert list(dw.columns) == ["weight", "dist"]
dw = dw.set_index("dist")
assert dw.loc[0, "weight"] == 1
assert dw.loc[25, "weight"] == 1
assert dw.loc[50, "weight"] == 1
assert dw.loc[51, "weight"] < 1
assert np.isclose(dw.loc[100, "weight"], 0, atol=1e-4)
assert dw.shape[0] == 101
dw = network_obj.distance_weight(
include_promoter=False,
promoter_region=20,
full_weight_region=50,
maximum_distance=100,
alpha=5,
)
assert list(dw.columns) == ["weight", "dist"]
dw = dw.set_index("dist")
assert dw.loc[0, "weight"] == 0
assert dw.loc[20, "weight"] == 0
assert dw.loc[21, "weight"] == 1
assert dw.shape[0] == 101
def test_command():
with NamedTemporaryFile() as tmp:
fname = tmp.name
Args = namedtuple(
"args",
"genome annotation include_promoter include_enhancer binding fin_expression outfile ncore",
)
args = Args(
genome="hg38",
annotation=None,
include_promoter=True,
include_enhancer=True,
binding="tests/data/network/binding.h5",
fin_expression="tests/data/network/heart_expression.txt",
outfile=fname,
ncore=2,
)
network(args)
df = pd.read_table(fname, sep="\t")
assert df.shape[0] == 30690
assert list(df.columns).__eq__(["tf_target", "prob"])
|
export default root => {
const header = window.document.createElement('header');
const nav = window.document.createElement('nav');
const mobileNavigation = window.document.createElement('div');
mobileNavigation.className = 'nav-wrapper container';
nav.appendChild(mobileNavigation);
const logoLink = window.document.createElement('a');
logoLink.href = '#';
logoLink.className = `brand-logo`;
logoLink.innerText = 'Logo';
mobileNavigation.appendChild(logoLink);
header.appendChild(nav);
root.appendChild(header);
return true;
};
|
#ifndef _DAM_H_
#define _DAM_H_
//includes
#include <ACS712.h>
#include <ACS712_AC.h>
#include <AccelStepper.h>
#include <HX711.h>
#include <ros.h>
#include <prismm_msgs/dam_data.h>
#include <prismm_msgs/getBool.h>
#include <MovingAverageFilter.h>
#include <Servo.h>
//
#define STP_Y_ACCEL 2000
#define STP_X_ACCEL 800
#define STEPS_PER_REV 800
// Probe Servo
#define SERVO_ROT_PIN 9
#define SERVO_EXT_PIN 10
// X Steppers
#define STP_X_STEP_PIN 5
#define STP_X_DIR_PIN 4
#define STP_X_HOME_PIN 32
// Probe Stepper
#define STP_PROBE_STEP_PIN 3
#define STP_PROBE_DIR_PIN 2
#define STP_PROBE_HOME_PIN 34
//Drill stepper
#define STP_DRILL_STEP_PIN 7
#define STP_DRILL_DIR_PIN 6
#define STP_DRILL_HOME_PIN 33
#define STP_DRILL_CURRENT_PIN A0
//Switches
#define PROBE_LIMMIT_PIN 31
class Dam {
public:
Dam(ros::NodeHandle nh);
enum DamState{
E_STOP = -1,
DEFAULT_STATE = 0,
HOMED = 1,
HOMING = 2,
HOMING_DRILL = 3,
HOMING_PROBE = 4,
DRILLING = 5,
BOWL = 6,
};
bool update();// Should be run in loop and do iterative processes
DamState getState();
bool startDrilling();// Return false if probe not homed
bool stopDrilling();
bool homeProbe();
bool gotoProbeRot(int angle);
bool gotoProbeExt(int angle);
bool setProbeSpeed(int max_speed);
bool homeX();// Return false if drill and probe not homed
bool gotoX(int pos); // Return false if distance is out of bounds
bool setXSpeed(int max_speed);
bool homeDrill();
bool gotoDrill(int pos); // Return false if distance is out of bounds
bool setDrillSpeed(int max_speed);
//bool startRockwell(double max_pressure);// Press probe down and melt ice (or just heat)
bool startBowl(float speed = 1.0);// Return false if homed (or we know we aren't near ice)
bool stopBowl();
bool gotoProbe(int pos); // Return false if distance is out of bounds
bool probeNotHomed();
prismm_msgs::dam_data getData();
bool eStop();
bool resume();// Continue drilling or moving with motors after e stop
bool reset();// Resume processing but reset motor movements and state
private:
ros::NodeHandle nh;
const prismm_msgs::getBoolRequest probe_srv_req;
prismm_msgs::getBoolResponse probe_srv_resp;
bool tool_is_drill = true;
bool e_stopped = false;
float bowl_speed = 1.0;
int bowl_direction = 1;
float rot_pos = 0;
float ext_pos = 0;
DamState last_state = DEFAULT_STATE;
DamState state = DEFAULT_STATE;
prismm_msgs::dam_data data_out;
float probe_step_per_mm = -800/2.54;
float drill_step_per_mm = 800/2.54;
float x_step_per_mm = 800/100;
float probe_home_speed = 200.0;//currently in steps per second
float drill_home_speed = 200.0;
float x_home_speed = 200.0;
float probe_max_speed = 800.0;//currently in steps per second
float drill_max_speed = 800.0;
float x_max_speed = 800.0;
ACS712 stp_drill_current_sensor;
Servo servo_ext;
Servo servo_rot;
AccelStepper stp_x;
AccelStepper stp_drill;
AccelStepper stp_probe;
MovingAverageFilter drill_current_avg;
void iterateBowl();
void incrementProbeHome();
void incrementDrillHome();
void incrementXHome();
};
#endif /** _Dam_H_ **/
|
# Project
### 꼭 개인 Branch에서 작업해주세요!!
```
// 작업 전 수행
git checkout 브랜치 이름
git pull origin 브랜치 이름
// 작업 후 수행
git add .
git commit -m "커밋 메시지"
git push origin 브랜치 이름
```
|
# `Matlab`向量
向量是数字的一维数组。在`MATLAB`中,允许创建两种类型的向量:
- 行向量
- 列向量
## 1. 行向量
行向量是通过用方括号中的元素集合来创建的,使用`空格`或`逗号`分隔元素。
```matlab
r = [1 18 19 21 41]
```
## 2. 列向量
列向量是通过用方括号中的元素集合来创建的,`分号`用于分隔元素。
```matlab
c = [17; 28; 39; 60; 81]
```
## 3. 引用向量的元素
3.1 可以通过多种方式来引用一个或多个向量的元素。向量`v`的第`i`个分量叫作`v(i)`。 例如:
```matlab
v = [ 1; 2; 3; 4; 5; 6]; % creating a column vector of 6 elements
v(3)
```
3.2 引用带`冒号`的向量(如`v(:)`)时,将列出向量的所有组件。
```matlab
v = [ 1; 2; 3; 4; 5; 6]; % creating a column vector of 6 elements
v(:)
```
这个操作的结果都是行向量。
3.3 从向量中选择一系列元素。
这个操作不会改变行向量或列向量。
例如,创建一个`9`个元素的行向量`rv`,然后通过`rv(3:7)`引用第`3`个到第`7`个元素,然后引用来向一个新创建的`sub_rv`向量赋值。如下代码所示:
```matlab
rv = [1 2 3 4 5 6 7 8 9];
sub_rv = rv(3:7)
```
## 4. 向量运算
4.1 向量的加减
可以对两个向量做加减运算。这两个操作的向量必须是相同的类型并且具有相同数量的元素。
4.2 向量的标量乘法
将一个向量乘以一个数字时,这称为标量乘法。标量乘法产生相同类型的新向量,原始向量的每个元素乘以数字。
4.3 转置向量
转置操作是将列向量更改为行向量,反之亦然。 转置操作由单引号`'`表示。
4.4 附加向量
`MATLAB`允许将多个向量附加在一起来创建新的向量。
假设,如果有两个具有`n`和`m`个元素的`行`向量`r1`和`r2`,通过附加这两个向量来创建`n+m`个元素的行向量`r`:
```matlab
r = [r1;r2]
```
然而,要做到这一点,这两个向量应该具有相同数量的元素。
两个具有`n`和`m`个元素的`列`向量`c1`和`c2`,通过附加这些向量可创建`n+m`个元素的列向量`c`:
```matlab
c = [c1; c2]
```
还可以通过附加这两个`列`向量来创建矩阵`c`; 向量`c2`将是矩阵的`第二列`:
```matlab
c = [c1, c2]
```
要做到这一点,这两个向量应该具有相同数量的元素。
```matlab
r1 = [ 1 2 3 4 ];
r2 = [5 6 7 8 ];
r = [r1,r2]
rMat = [r1;r2]
c1 = [ 1; 2; 3; 4 ];
c2 = [5; 6; 7; 8 ];
c = [c1; c2]
cMat = [c1,c2]
```
4.5 向量的幅值大小
具有元素为`[v1,v2,v3,...,vn]`的向量`v`的幅值(大小)由下列公式求出:

计算步骤:
(1) 以向量的乘积为单位,使用数组乘法`(.*)`产生向量`sv`,向量`sv`的元素是向量`v`的元素的平方。即:`sv = v.*v`;
(2) 使用`sum函数`得到向量`v`的元素的平方和,也称为向量v的点积。即:`dp= sum(sv)`;
(3) 使用`sqrt`函数得到和的平方根就是向量`v的幅值`。即:`mag = sqrt(s)`;
```matlab
v = [1: 2: 20];
sv = v.* v; %the vector with elements
% as square of v's elements
dp = sum(sv); % sum of squares -- the dot product
mag = sqrt(dp); % magnitude
disp('Magnitude:'); disp(mag);
```
4.6 向量点积
两个向量`a = (a1,a2,...,an)`和`b = (b1,b2,...,bn)`的点积由下公式计算给出:
```
a.b = ∑(ai.bi)
```
使用`dot函数`计算两个向量`a`和`b`的点积。
```matlab
dot(a, b);
```
4.7 具有均匀间隔元素的向量
`MATLAB`可创建具有均匀间隔元素的向量。使用起始元素值`s`,结束元素值`e`,步长`n` 来创建一个向量`v`,可以这样书写:
```
v = [s : n : e]
```
|
$ cat groceries
Item Quantity Price
Apples 5 0.50
Cereal 1 3.40
Soda 2 1.10
|
package transmission
import (
"strconv"
"strings"
"time"
"github.com/pkg/errors"
)
// Time is a wrapper around time.Time with custom JSON serialization logic.
type Time struct {
time.Time
}
func (t *Time) MarshalJSON() ([]byte, error) {
return []byte(strconv.FormatInt(t.Time.Unix(), 10)), nil
}
func (t *Time) UnmarshalJSON(b []byte) error {
val := strings.Trim(string(b), "\"")
unix, err := strconv.ParseInt(val, 10, 64)
if err != nil {
return errors.Wrapf(err, "failed to parse int %q", val)
}
t.Time = time.Unix(unix, 0)
return nil
}
|
package scalatest.guide.ch02styles
// Because it gives absolute freedom (and no guidance) on how specification text
// should be written, FreeSpec is a good choice for teams experienced with BDD
// and able to agree on how to structure the specification text.
import org.scalatest._
class Ex05SetSpec extends freespec.AnyFreeSpec {
"A Set" - {
"when empty" - {
"should have size 0" in {
assert(Set.empty.size == 0)
}
"should produce NoSuchElementException when head is invoked" in {
assertThrows[NoSuchElementException] {
Set.empty.head
}
}
}
}
}
|
import "math"
func maximalSquare(matrix [][]byte) int {
mx := 0
areas := make([][]int, len(matrix))
for i := range areas {
areas[i] = make([]int, len(matrix[0]))
}
for i := range matrix {
for j := range matrix[0] {
if matrix[i][j] == '1' {
areas[i][j] = 1
if i > 0 && j > 0 {
if int(areas[i-1][j-1]) > 0 {
l := int(math.Sqrt(float64(areas[i-1][j-1])))
width := getWidth(matrix, i, j, l)
areas[i][j] = width * width
}
}
if areas[i][j] > mx {
mx = areas[i][j]
}
} else {
areas[i][j] = 0
}
}
}
return mx
}
func getWidth(matrix [][]byte, i, j, max int) int {
for l := 0; l < max; l++ {
if matrix[i][j-l-1] == '0' || matrix[i-l-1][j] == '0' {
return l + 1
}
}
return max + 1
}
|
$(document).ready(function(){
var redStart = 0;
var redEnd = 255;
var greenStart = 0;
var greenEnd = 255;
var blueStart = 0;
var blueEnd = 255;
$( '#automateChangeBackground' ).change(function(){
var agreed = $(this).is( ':checked' );
if(agreed === true) {
var rateOfChanges = $( '#rateOfChanges' ).val(); //miliseconds
var timeDuration = $( '#timeDuration' ).val(); //seconds
if (typeof rateOfChanges !== 'undefined' || typeof timeDuration !== 'undefined') {
$( '.spinner-grow' ).show();
var counterChanges = (timeDuration * 1000) / rateOfChanges;
for (i = 0; i < counterChanges; i++) {
doSetTimeout(i, rateOfChanges);
}
}
} else {
$( '.spinner-grow' ).hide();
}
});
function doSetTimeout(delay, periodicity) {
setTimeout(function() {
rgb = [
Math.round(Math.random() * (redEnd - redStart) + redStart),
Math.round(Math.random() * (greenEnd - greenStart) + greenStart),
Math.round(Math.random() * (blueEnd - blueStart) + blueStart)
]
$(document.body).css('background','rgb('+rgb.join(',')+')');
console.log('automate background','rgb('+rgb.join(',')+')');}, delay * periodicity);
};
$(document).mousemove(function(event){
rgb = [
Math.round(Math.random() * (redEnd - redStart) + redStart),
Math.round(Math.random() * (greenEnd - greenStart) + greenStart),
Math.round(Math.random() * (blueEnd - blueStart) + blueStart)
];
$(document.body).css('background','rgb('+rgb.join(',')+')');
console.log('background','rgb('+rgb.join(',')+')');
});
$( '#default-values' ).click(function(event, ui) {
redStart = 0;
redEnd = 255;
greenStart = 0;
greenEnd = 255;
blueStart = 0;
blueEnd = 255;
$( '#slider-red' ).find( '.ui-slider-handle:first' ).text(redStart);
$( '#slider-red' ).find( '.ui-slider-handle:last' ).text(redEnd);
$( '#slider-green' ).find( '.ui-slider-handle:first' ).text(greenStart);
$( '#slider-green' ).find( '.ui-slider-handle:last' ).text(greenEnd);
$( '#slider-blue' ).find( '.ui-slider-handle:first' ).text(blueStart);
$( '#slider-blue' ).find( '.ui-slider-handle:last' ).text(blueEnd);
});
$(document).click(function(){
$( '#modalWindow' ).modal();
$( '#slider-red' ).slider({
animate:'slow',
orientation: 'horizontal',
range:true,
min: 0,
max: 255,
values: [ redStart, redEnd ],
slide: function( event, ui ) {
redStart = ( ui.values[ 0 ] );
redEnd = ( ui.values[ 1 ] );
$( '#slider-red' ).find( '.ui-slider-handle:first' ).text(redStart);
$( '#slider-red' ).find( '.ui-slider-handle:last' ).text(redEnd);
if (redStart === redEnd) {
$( 'p:first' ).html( "<span class='warning'>Red: The red color is unchanged</span>" );
} else if (redStart !== redEnd) {
$( 'p:first' ).html( '<span>Red: </span>' );
}
}
});
$( '#slider-green' ).slider({
animate:'slow',
orientation: 'horizontal',
range:true,
min: 0,
max: 255,
values: [ greenStart, greenEnd ],
slide: function( event, ui ) {
greenStart = ( ui.values[ 0 ] );
greenEnd = ( ui.values[ 1 ] );
$( '#slider-green' ).find( '.ui-slider-handle:first' ).text(greenStart);
$( '#slider-green' ).find( '.ui-slider-handle:last' ).text(greenEnd);
if (greenStart === greenEnd) {
$( 'p:eq(1)' ).html( "<span class='warning'>Green: The green color is unchanged</span>" );
} else if (greenStart !== greenEnd) {
$( 'p:eq(1)' ).html( "<span>Green: </span>" );
}
}
});
$( '#slider-blue' ).slider({
animate:'slow',
orientation: 'horizontal',
range:true,
min: 0,
max: 255,
values: [ blueStart, blueEnd ],
slide: function( event, ui ) {
blueStart = ( ui.values[ 0 ] );
blueEnd = ( ui.values[ 1 ] );
$( '#slider-blue' ).find( '.ui-slider-handle:first' ).text(blueStart);
$( '#slider-blue' ).find( '.ui-slider-handle:last' ).text(blueEnd);
if (blueStart === blueEnd) {
$( 'p:last' ).html( "<span class='warning'>Blue: The blue color is unchanged</span>" );
} else if (blueStart !== blueEnd) {
$( 'p:last' ).html( "<span>Blue: </span>" );
}
}
});
});
});
|
package com.explore.playground.utils
import com.google.firebase.messaging.FirebaseMessagingService
import com.google.firebase.messaging.RemoteMessage
class FireBaseUtil : FirebaseMessagingService() {
override fun onDeletedMessages() {
super.onDeletedMessages()
//Called when the FCM server deletes pending messages.
}
override fun onDestroy() {
super.onDestroy()
}
override fun onMessageReceived(p0: RemoteMessage) {
super.onMessageReceived(p0)
}
override fun onMessageSent(p0: String) {
super.onMessageSent(p0)
}
override fun onNewToken(token: String) {
super.onNewToken(token)
LocalValue.fcm = token
}
override fun onSendError(p0: String, p1: Exception) {
super.onSendError(p0, p1)
}
}
|
# frozen_string_literal: true
module Codebreaker
RSpec.describe Cli do
subject(:cli) { Codebreaker::Cli.new(game) }
let(:game) { Game.new }
let(:code) { '1234' }
let(:name) { 'Alex' }
it 'output hello to console' do
allow(STDIN).to receive(:gets).and_return('exit')
expect { cli.run }.to output(/#{I18n.t(:welcome)}/).to_stdout
end
it 'output choose_option to console' do
allow(STDIN).to receive(:gets).and_return('exit')
expect { cli.run }.to output(/#{I18n.t(:choose_option)}/).to_stdout
end
it 'output wrong input option' do
allow(STDIN).to receive(:gets).and_return('start', 'exit')
allow(game).to receive(:user).and_return(name)
expect { cli.run }.to output(include(I18n.t(:wrong_input_option))).to_stdout
end
it 'output rules to console' do
allow(STDIN).to receive(:gets).and_return('rules', 'exit')
expect { cli.run }.to output(include(I18n.t(:rules))).to_stdout
end
it 'output stats alert message' do
allow(STDIN).to receive(:gets).and_return('stats', 'exit')
expect { cli.run }.to output(/#{I18n.t(:clear_stats)}/).to_stdout
end
it 'output hint message' do
allow(STDIN).to receive(:gets).and_return('hint', 'exit')
expect { cli.run }.to output(include(I18n.t(:wrong_input_option))).to_stdout
end
describe '#difficult_method_with_diff_values' do
before do
allow(game).to receive(:secrete_code).and_return(code.chars.map(&:to_i))
end
it 'chose hell difficult' do
allow(STDIN).to receive(:gets).and_return('start', name, 'hell', code, 'exit')
expect { cli.run }.to output(include(I18n.t(:win))).to_stdout
end
it 'chose easy difficult' do
allow(STDIN).to receive(:gets).and_return('start', name, 'easy', code, 'exit')
expect { cli.run }.to output(include(I18n.t(:win))).to_stdout
end
it 'chose medium difficult' do
allow(STDIN).to receive(:gets).and_return('start', name, 'medium', code)
expect { cli.run }.to output(include(I18n.t(:win))).to_stdout
end
it 'chose wrong difficult' do
allow(STDIN).to receive(:gets).and_return('start', name, 'fff', 'medium', 'exit')
expect { cli.run }.to output(include(I18n.t(:wrong_input_option))).to_stdout
end
end
end
end
|
# DataStructure-And-Algorithm
> 算法和数据结构学习(Typescript 实现)
|
using LazyProductions.GridManager;
using UnityEngine;
using Grid = LazyProductions.GridManager.Grid;
using Random = System.Random;
namespace Tests.TestScenes.BasicVoxelTest
{
public class MarchingCubesTestRandom : MonoBehaviour
{
// Start is called before the first frame update
public void Start()
{
MeshRenderer meshRenderer = gameObject.AddComponent<MeshRenderer>();
Material material = new Material(Shader.Find("Standard"));
meshRenderer.sharedMaterial = material;
MeshFilter meshFilter = gameObject.AddComponent<MeshFilter>();
Mesh mesh = new Mesh();
Grid grid = new Grid(10, new Vector3(0,0,0));
// pre-set
for (int x = 0; x < grid.Size; x++)
{
for (int y = 0; y < grid.Size; y++)
{
for (int z = 0; z < grid.Size; z++)
{
grid.Point[x, y, z] = new Point(PointType.VOID, 1);
}
}
}
Random random = new Random();
for (int x = 1; x < grid.Size -1; x++)
{
for (int y = 1; y < grid.Size - 1; y++)
{
for (int z = 1; z < grid.Size - 1; z++)
{
int num = random.Next(2);
Debug.Log(num);
grid.Point[x, y, z] = new Point(num == 0 ? PointType.SOLID : PointType.VOID, 1);
}
}
}
MarchingMeshData data = MarchingCubesMeshBuilder.Generate(grid);
mesh.vertices = data.Vertices;
mesh.triangles = data.Triangles;
mesh.RecalculateNormals();
meshFilter.mesh = mesh;
}
// Update is called once per frame
public void Update()
{
}
}
}
|
package com.oreodroid.codingchallenge.ui
import android.os.Bundle
import android.view.View
import android.widget.ProgressBar
import androidx.appcompat.app.AppCompatActivity
import androidx.fragment.app.Fragment
import androidx.fragment.app.FragmentManager
import androidx.fragment.app.FragmentTransaction
import androidx.lifecycle.Observer
import androidx.lifecycle.ViewModelProviders
import com.oreodroid.codingchallenge.R
import com.oreodroid.codingchallenge.viewmodels.JobViewModel
class MainActivity : AppCompatActivity() {
private lateinit var jobViewModel: JobViewModel
private lateinit var currentFragment: Fragment
private lateinit var progressBar: ProgressBar
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
// Get progress bar
progressBar = findViewById(R.id.progressBar)
// Initialize jobViewModel
jobViewModel = ViewModelProviders.of(this).get(JobViewModel::class.java)
// Observe the job list ViewModel
jobViewModel.getJobList().observe(this, Observer { jobs ->
if (jobs.isEmpty()) {
// Show NoResult Fragment
supportFragmentManager.inTransaction {
currentFragment = NoResultFragment()
replace(R.id.fragmentContainer, currentFragment)
}
} else {
// Show ListJob Fragment
supportFragmentManager.inTransaction {
currentFragment =
ListJobFragment.newInstance(
ArrayList(jobs)
)
replace(R.id.fragmentContainer, currentFragment)
}
}
// Show/Hide progress
togglePrgressBar()
})
}
override fun onDestroy() {
super.onDestroy()
jobViewModel.cancelJobs()
}
private inline fun FragmentManager.inTransaction(func: FragmentTransaction.() -> Unit) {
val fragmentTransaction = beginTransaction()
fragmentTransaction.func()
fragmentTransaction.commit()
}
// Show/Hide progress bar
private fun togglePrgressBar() {
if (progressBar.visibility == View.VISIBLE)
progressBar.visibility = View.INVISIBLE
else
progressBar.visibility = View.VISIBLE
}
}
|
import FriendListItem from "../friends/FriendListItem";
import PropTypes from 'prop-types';
function FriendList({ items }) {
return (
<FriendListItem
avatar={items.avatar}
name={items.name}
status={items.isOnline} />
)}
FriendList.propTypes = {
items: PropTypes.arrayOf(PropTypes.shape({
name: PropTypes.string.isRequired,
}))
}
export default FriendList;
|
; Copyright 2016 David O'Meara
;
; Licensed under the Apache License, Version 2.0 (the "License");
; you may not use this file except in compliance with the License.
; You may obtain a copy of the License at
;
; http://www.apache.org/licenses/LICENSE-2.0
;
; Unless required by applicable law or agreed to in writing, software
; distributed under the License is distributed on an "AS IS" BASIS,
; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
; See the License for the specific language governing permissions and
; limitations under the License.
(ns ui.data
(:require [ui.debug :as debug]))
(defn overlap? [from1 to1 from2 to2]
(and (< from2 to1)
(< from1 to2)))
(defn overlapped-results [results new-results]
(for [[[f t] _] results
[[nf nt] _] new-results]
(when (overlap? f t nf nt)
[f t])))
(defn update-results [state new-results]
(let [opened (:opened-file state)
results (:results opened)
results-to-remove (overlapped-results results new-results)
results-removed (reduce dissoc results results-to-remove)
results-merged (merge results-removed new-results)]
(assoc-in state [:opened-file :results] results-merged)))
(defn update-text [state text]
(-> state
(assoc-in [:opened-file :text] text)
(assoc-in [:opened-file :dirty?] true)))
(defn update-cursor-selection [state cursor-selection]
(assoc-in state [:opened-file :cursor-selection] cursor-selection))
(defn shift-each [results [from inserted to]]
(let [delta (- to from)
shift (- inserted delta)]
(->> results
(map (fn [[[rf rt] r]]
(if (overlap? from to rf rt)
nil
(if (<= from rf)
[[(+ rf shift) (+ rt shift)] r]
[[rf rt] r]))))
(filter #(not (nil? %)))
(into {}))))
(defn shift-results [state arg]
(let [results (get-in state [:opened-file :results])]
(assoc-in state [:opened-file :results] (shift-each results arg))))
|
class Token
attr_accessor :type, :text, :position
def initialize(type, text, position)
@type = type
@text = text
@position = position
end
end
|
package DDG::Spice::Maps::Maps;
# ABSTRACT: Map of current cocation
use strict;
use Text::Trim;
use DDG::Spice;
use Data::Dumper;
spice to => 'http://api.mapbox.com/v4/geocode/mapbox.places/$1.json?access_token={{ENV{DDG_SPICE_MAPBOX_KEY}}}';
spice is_cached => 0;
spice proxy_cache_valid => "418 1d";
spice wrap_jsonp_callback => 1;
my @startend_triggers = ("map of", "map", "maps", "current location");
my $startend_joined = join "|", @startend_triggers;
my $start_qr = qr/^($startend_joined)/;
my $end_qr = qr/($startend_joined)$/;
my $skip_words_qr = qr/google|yahoo|bing|mapquest|fallout|time zone|editor|world|star|search/i;
my @all_triggers = @startend_triggers;
push @all_triggers, "directions";
# allows us to handle e.g.
# - "directions to florida" (matches "florida")
# - "driving directions to 10 canal street new york" (matches "10 canal street new york")
# - "directions from leeds to skipton uk" (matches "skipton uk")
my $directions_qr = qr/^(\w+\s)?directions.*\bto\b/;
triggers any => @all_triggers;
handle query_lc => sub {
my $query_lc = $_;
return if $query_lc =~ $skip_words_qr;
# handle maps/locations queries
if ($query_lc =~ $start_qr or $query_lc =~ $end_qr) {
# replace trigger words
$query_lc =~ s/$start_qr//g;
$query_lc =~ s/$end_qr//g;
$query_lc = trim ($query_lc);
return $query_lc if $query_lc;
# if there's no remainder, show the user's location
my $location = $loc->loc_str;
return $location if $location;
}
# directions queries
if ($query_lc =~ $directions_qr) {
$query_lc =~ s/$directions_qr//g;
$query_lc = trim ($query_lc);
# there's a lot of queries like "directions from one place to another"
return if $query_lc eq "another";
return $query_lc if $query_lc;
}
return;
};
1;
|
const Eleventy = require('@11ty/eleventy');
const config = require('../../config');
const eleventy = new Eleventy();
module.exports = async function templates() {
await eleventy.init();
if (config.get('watch')) {
await eleventy.watch();
} else {
await eleventy.write();
}
};
|
package com.arpadfodor.stolenvehicledetector.android.app.view
import android.content.Intent
import android.os.Bundle
import androidx.core.view.GravityCompat
import androidx.drawerlayout.widget.DrawerLayout
import androidx.lifecycle.Observer
import androidx.lifecycle.ViewModelProvider
import com.arpadfodor.stolenvehicledetector.android.app.R
import com.arpadfodor.stolenvehicledetector.android.app.view.utils.AppActivity
import com.arpadfodor.stolenvehicledetector.android.app.view.utils.overshootAppearingAnimation
import com.arpadfodor.stolenvehicledetector.android.app.viewmodel.AccountViewModel
import com.arpadfodor.stolenvehicledetector.android.app.viewmodel.utils.AppViewModel
import com.google.android.material.navigation.NavigationView
import kotlinx.android.synthetic.main.content_account.*
class AccountActivity : AppActivity() {
override lateinit var viewModel: AccountViewModel
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_account)
val drawer = findViewById<DrawerLayout>(R.id.accountActivityDrawerLayout)
val navigation = findViewById<NavigationView>(R.id.account_navigation)
viewModel = ViewModelProvider(this).get(AccountViewModel::class.java)
initUi(drawer, navigation)
if(viewModel.isCurrentAccountGuest()){
startActivity(Intent(this, LoginActivity::class.java))
}
else{
viewModel.fragmentTagToShow.postValue(AccountManageFragment.TAG)
}
}
override fun onResume() {
super.onResume()
subscribeToViewModel()
AccountManageFragment.setParams(viewModel)
AccountEditFragment.setParams(viewModel)
}
override fun appearingAnimations() {
accountLogo.overshootAppearingAnimation(this)
}
override fun subscribeToViewModel() {
// Create the observer which updates the UI in case of value change
val fragmentTagToShowObserver = Observer<String> { fragmentTag ->
val fragment = when(fragmentTag){
AccountManageFragment.TAG -> {
AccountManageFragment()
}
AccountEditFragment.TAG -> {
AccountEditFragment()
}
else -> null
}
fragment?.let {
supportFragmentManager.beginTransaction()
.setCustomAnimations(R.anim.nav_default_enter_anim, R.anim.nav_default_exit_anim)
.replace(R.id.account_fragment_container, fragment, fragmentTag)
.addToBackStack(null)
.commit()
}
}
// Observe the LiveData, passing in this viewLifeCycleOwner as the LifecycleOwner and the observer
viewModel.fragmentTagToShow.observe(this, fragmentTagToShowObserver)
}
override fun subscribeListeners() {}
override fun unsubscribe() {}
override fun onBackPressed() {
if(activityDrawerLayout.isDrawerOpen(GravityCompat.START)){
activityDrawerLayout.closeDrawer(GravityCompat.START)
}
else{
if(viewModel.fragmentTagToShow.value == AccountEditFragment.TAG) {
viewModel.fragmentTagToShow.postValue(AccountManageFragment.TAG)
}
else{
this.finish()
}
}
}
}
|
#!/bin/bash
function usage() {
echo "Usage: "$0" <domain> <subdomain_file> [<dns_server>]"
if [ -n "$1" ] ; then
echo "Error: "$1"!"
fi
exit
}
if [ $# -lt 2 ] || [ $# -gt 3 ] ; then
usage
fi
domain=$1
file=$2
dnsserver=""
n=0
if [ $# -eq 3 ] ; then
dnsserver=$3
fi
if ! [ -f $file ] ; then
usage "subdomain file not found"
fi
for sub in $(cat $file) ; do
tmp=`host $sub.$domain $dnsserver | grep 'has address' | grep $domain | cut -d ' ' -f 1,4`
if [ -n "$tmp" ] ; then
echo $tmp
n=$[$n+1]
fi
done
echo
echo $n" sub domains found."
exit
|
<?php
/**
* Created by PhpStorm.
* User: kpicaza
* Date: 23/08/16
* Time: 21:55
*/
namespace App\Action;
use App\Entity\User;
use Doctrine\ORM\EntityManagerInterface;
use Psr\Http\Message\ResponseInterface;
use Psr\Http\Message\ServerRequestInterface;
use Zend\Diactoros\Response\JsonResponse;
class PostUserAction
{
protected $em;
public function __construct(EntityManagerInterface $em)
{
$this->em = $em;
}
public function __invoke(ServerRequestInterface $request, ResponseInterface $response, callable $next = null)
{
$data = $request->getParsedBody();
if (!$this->validate($data)) {
return new JsonResponse('', 400);
}
try {
$user = new User($data['username'], $data['email']);
$this->em->persist($user);
$this->em->flush();
}catch (\Exception $e) {
throw new \InvalidArgumentException($e->getMessage(), 400);
}
return new JsonResponse([
'id' => $user->getId(),
'username' => $user->getUsername(),
'email' => $user->getEmail()
]);
}
/**
* Better use some validation library :-P
*
* @param array $data
* @return bool
*/
protected function validate(array $data)
{
if (
!array_key_exists('username', $data) ||
!array_key_exists('email', $data)
) {
return false;
}
return true;
}
}
|
import { Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { Mensaje } from 'src/mensaje/Entities/mensaje.entity';
import { Repository, DeleteResult } from 'typeorm';
import { CreateMensajeDto } from 'src/mensaje/dto/create-mensaje-dto';
@Injectable()
export class MensajeService {
constructor(@InjectRepository(Mensaje) private mensajeRepository: Repository<Mensaje>, ){
}
async getAll(): Promise<Mensaje[]>{
return await this.mensajeRepository.find();
}
async createMensaje(mensaje: CreateMensajeDto): Promise<Mensaje>{
const mensajeNuevo = new Mensaje();
mensajeNuevo.nick = mensaje.nick
mensajeNuevo.mensaje = mensaje.mensaje;
return await this.mensajeRepository.save(mensajeNuevo);
}
async updateMensaje(id: number, mensaje: CreateMensajeDto): Promise<Mensaje>{
const mensajeUpdate = await this.mensajeRepository.findOne(id);
mensajeUpdate.nick = mensaje.nick;
mensajeUpdate.mensaje = mensaje.mensaje;
return await this.mensajeRepository.save(mensajeUpdate);
}
async deleteMensaje(id: number) {
return await this.mensajeRepository.delete(id);
}
}
|
# Check if Parenthesis are balanced in an expression:
Given a n length of expression which consist of parenthesis ('()','{}','[]') in it. This algorithm checks if the parenthesis are balanced in the expression that means the pairs and orders of "{","}","(",")","[","]" are correct.
### Algorithm
- Initialize a stack
- Now traverse the expression
- If the current character is '(' or '{' or '[', then push it.
- If the current character is ')' or '}' or ']' then pop it and check for the matching starting bracket, if not matched then return false else fine.
- After complete traversal is stack is not empty then return false else fine.
### Input Format
- The input asks for a expression.
### Output Format
"It's Balanced" : If the parenthesis are balanced.
"It's not Balanced" : If the parenthesis aren't balanced.
### Sample Input
```
()))({}
{()}[]
```
### Sample Output
```
It's not Balanced
It's Balanced
```
### Time Complexity
The time complexity is O(n) where n is the length of given expression and Auxiliary Space is O(n) for stack.
### Implemented in:
- [C](inversion.c)
### Contributed by:
- [Rahul Indra](https://github.com/indrarahul2013)
|
#!/bin/bash
echo "nameserver 127.0.0.1" > /etc/resolv.conf
#
|
use std::cell::UnsafeCell;
use std::marker::PhantomData;
use std::rc::Rc;
use futures::{Async, Future, Poll};
use error::Error;
use handler::{
AsyncHandler, AsyncResult, AsyncResultItem, FromRequest, Handler, Responder,
RouteHandler, WrapHandler,
};
use http::StatusCode;
use httprequest::HttpRequest;
use httpresponse::HttpResponse;
use middleware::{
Finished as MiddlewareFinished, Middleware, Response as MiddlewareResponse,
Started as MiddlewareStarted,
};
use pred::Predicate;
use with::{ExtractorConfig, With, WithAsync};
/// Resource route definition
///
/// Route uses builder-like pattern for configuration.
/// If handler is not explicitly set, default *404 Not Found* handler is used.
pub struct Route<S> {
preds: Vec<Box<Predicate<S>>>,
handler: InnerHandler<S>,
}
impl<S: 'static> Default for Route<S> {
fn default() -> Route<S> {
Route {
preds: Vec::new(),
handler: InnerHandler::new(|_| HttpResponse::new(StatusCode::NOT_FOUND)),
}
}
}
impl<S: 'static> Route<S> {
#[inline]
pub(crate) fn check(&self, req: &mut HttpRequest<S>) -> bool {
for pred in &self.preds {
if !pred.check(req) {
return false;
}
}
true
}
#[inline]
pub(crate) fn handle(&mut self, req: HttpRequest<S>) -> AsyncResult<HttpResponse> {
self.handler.handle(req)
}
#[inline]
pub(crate) fn compose(
&mut self, req: HttpRequest<S>, mws: Rc<Vec<Box<Middleware<S>>>>,
) -> AsyncResult<HttpResponse> {
AsyncResult::async(Box::new(Compose::new(req, mws, self.handler.clone())))
}
/// Add match predicate to route.
///
/// ```rust
/// # extern crate actix_web;
/// # use actix_web::*;
/// # fn main() {
/// App::new().resource("/path", |r| {
/// r.route()
/// .filter(pred::Get())
/// .filter(pred::Header("content-type", "text/plain"))
/// .f(|req| HttpResponse::Ok())
/// })
/// # .finish();
/// # }
/// ```
pub fn filter<T: Predicate<S> + 'static>(&mut self, p: T) -> &mut Self {
self.preds.push(Box::new(p));
self
}
/// Set handler object. Usually call to this method is last call
/// during route configuration, so it does not return reference to self.
pub fn h<H: Handler<S>>(&mut self, handler: H) {
self.handler = InnerHandler::new(handler);
}
/// Set handler function. Usually call to this method is last call
/// during route configuration, so it does not return reference to self.
pub fn f<F, R>(&mut self, handler: F)
where
F: Fn(HttpRequest<S>) -> R + 'static,
R: Responder + 'static,
{
self.handler = InnerHandler::new(handler);
}
/// Set async handler function.
pub fn a<H, R, F, E>(&mut self, handler: H)
where
H: Fn(HttpRequest<S>) -> F + 'static,
F: Future<Item = R, Error = E> + 'static,
R: Responder + 'static,
E: Into<Error> + 'static,
{
self.handler = InnerHandler::async(handler);
}
/// Set handler function, use request extractor for parameters.
///
/// ```rust
/// # extern crate bytes;
/// # extern crate actix_web;
/// # extern crate futures;
/// #[macro_use] extern crate serde_derive;
/// use actix_web::{http, App, Path, Result};
///
/// #[derive(Deserialize)]
/// struct Info {
/// username: String,
/// }
///
/// /// extract path info using serde
/// fn index(info: Path<Info>) -> Result<String> {
/// Ok(format!("Welcome {}!", info.username))
/// }
///
/// fn main() {
/// let app = App::new().resource(
/// "/{username}/index.html", // <- define path parameters
/// |r| r.method(http::Method::GET).with(index),
/// ); // <- use `with` extractor
/// }
/// ```
///
/// It is possible to use tuples for specifing multiple extractors for one
/// handler function.
///
/// ```rust
/// # extern crate bytes;
/// # extern crate actix_web;
/// # extern crate futures;
/// #[macro_use] extern crate serde_derive;
/// # use std::collections::HashMap;
/// use actix_web::{http, App, Json, Path, Query, Result};
///
/// #[derive(Deserialize)]
/// struct Info {
/// username: String,
/// }
///
/// /// extract path info using serde
/// fn index(
/// info: (Path<Info>, Query<HashMap<String, String>>, Json<Info>),
/// ) -> Result<String> {
/// Ok(format!("Welcome {}!", info.0.username))
/// }
///
/// fn main() {
/// let app = App::new().resource(
/// "/{username}/index.html", // <- define path parameters
/// |r| r.method(http::Method::GET).with(index),
/// ); // <- use `with` extractor
/// }
/// ```
pub fn with<T, F, R>(&mut self, handler: F) -> ExtractorConfig<S, T>
where
F: Fn(T) -> R + 'static,
R: Responder + 'static,
T: FromRequest<S> + 'static,
{
let cfg = ExtractorConfig::default();
self.h(With::new(handler, Clone::clone(&cfg)));
cfg
}
/// Set async handler function, use request extractor for parameters.
/// Also this method needs to be used if your handler function returns
/// `impl Future<>`
///
/// ```rust
/// # extern crate bytes;
/// # extern crate actix_web;
/// # extern crate futures;
/// #[macro_use] extern crate serde_derive;
/// use actix_web::{http, App, Error, Path};
/// use futures::Future;
///
/// #[derive(Deserialize)]
/// struct Info {
/// username: String,
/// }
///
/// /// extract path info using serde
/// fn index(info: Path<Info>) -> Box<Future<Item = &'static str, Error = Error>> {
/// unimplemented!()
/// }
///
/// fn main() {
/// let app = App::new().resource(
/// "/{username}/index.html", // <- define path parameters
/// |r| r.method(http::Method::GET).with_async(index),
/// ); // <- use `with` extractor
/// }
/// ```
pub fn with_async<T, F, R, I, E>(&mut self, handler: F) -> ExtractorConfig<S, T>
where
F: Fn(T) -> R + 'static,
R: Future<Item = I, Error = E> + 'static,
I: Responder + 'static,
E: Into<Error> + 'static,
T: FromRequest<S> + 'static,
{
let cfg = ExtractorConfig::default();
self.h(WithAsync::new(handler, Clone::clone(&cfg)));
cfg
}
}
/// `RouteHandler` wrapper. This struct is required because it needs to be
/// shared for resource level middlewares.
struct InnerHandler<S>(Rc<UnsafeCell<Box<RouteHandler<S>>>>);
impl<S: 'static> InnerHandler<S> {
#[inline]
fn new<H: Handler<S>>(h: H) -> Self {
InnerHandler(Rc::new(UnsafeCell::new(Box::new(WrapHandler::new(h)))))
}
#[inline]
fn async<H, R, F, E>(h: H) -> Self
where
H: Fn(HttpRequest<S>) -> F + 'static,
F: Future<Item = R, Error = E> + 'static,
R: Responder + 'static,
E: Into<Error> + 'static,
{
InnerHandler(Rc::new(UnsafeCell::new(Box::new(AsyncHandler::new(h)))))
}
#[inline]
pub fn handle(&self, req: HttpRequest<S>) -> AsyncResult<HttpResponse> {
// reason: handler is unique per thread, handler get called from async code only
let h = unsafe { &mut *self.0.as_ref().get() };
h.handle(req)
}
}
impl<S> Clone for InnerHandler<S> {
#[inline]
fn clone(&self) -> Self {
InnerHandler(Rc::clone(&self.0))
}
}
/// Compose resource level middlewares with route handler.
struct Compose<S: 'static> {
info: ComposeInfo<S>,
state: ComposeState<S>,
}
struct ComposeInfo<S: 'static> {
count: usize,
req: HttpRequest<S>,
mws: Rc<Vec<Box<Middleware<S>>>>,
handler: InnerHandler<S>,
}
enum ComposeState<S: 'static> {
Starting(StartMiddlewares<S>),
Handler(WaitingResponse<S>),
RunMiddlewares(RunMiddlewares<S>),
Finishing(FinishingMiddlewares<S>),
Completed(Response<S>),
}
impl<S: 'static> ComposeState<S> {
fn poll(&mut self, info: &mut ComposeInfo<S>) -> Option<ComposeState<S>> {
match *self {
ComposeState::Starting(ref mut state) => state.poll(info),
ComposeState::Handler(ref mut state) => state.poll(info),
ComposeState::RunMiddlewares(ref mut state) => state.poll(info),
ComposeState::Finishing(ref mut state) => state.poll(info),
ComposeState::Completed(_) => None,
}
}
}
impl<S: 'static> Compose<S> {
fn new(
req: HttpRequest<S>, mws: Rc<Vec<Box<Middleware<S>>>>, handler: InnerHandler<S>,
) -> Self {
let mut info = ComposeInfo {
count: 0,
req,
mws,
handler,
};
let state = StartMiddlewares::init(&mut info);
Compose { state, info }
}
}
impl<S> Future for Compose<S> {
type Item = HttpResponse;
type Error = Error;
fn poll(&mut self) -> Poll<Self::Item, Self::Error> {
loop {
if let ComposeState::Completed(ref mut resp) = self.state {
let resp = resp.resp.take().unwrap();
return Ok(Async::Ready(resp));
}
if let Some(state) = self.state.poll(&mut self.info) {
self.state = state;
} else {
return Ok(Async::NotReady);
}
}
}
}
/// Middlewares start executor
struct StartMiddlewares<S> {
fut: Option<Fut>,
_s: PhantomData<S>,
}
type Fut = Box<Future<Item = Option<HttpResponse>, Error = Error>>;
impl<S: 'static> StartMiddlewares<S> {
fn init(info: &mut ComposeInfo<S>) -> ComposeState<S> {
let len = info.mws.len();
loop {
if info.count == len {
let reply = info.handler.handle(info.req.clone());
return WaitingResponse::init(info, reply);
} else {
match info.mws[info.count].start(&mut info.req) {
Ok(MiddlewareStarted::Done) => info.count += 1,
Ok(MiddlewareStarted::Response(resp)) => {
return RunMiddlewares::init(info, resp)
}
Ok(MiddlewareStarted::Future(fut)) => {
return ComposeState::Starting(StartMiddlewares {
fut: Some(fut),
_s: PhantomData,
})
}
Err(err) => return FinishingMiddlewares::init(info, err.into()),
}
}
}
}
fn poll(&mut self, info: &mut ComposeInfo<S>) -> Option<ComposeState<S>> {
let len = info.mws.len();
'outer: loop {
match self.fut.as_mut().unwrap().poll() {
Ok(Async::NotReady) => return None,
Ok(Async::Ready(resp)) => {
info.count += 1;
if let Some(resp) = resp {
return Some(RunMiddlewares::init(info, resp));
}
loop {
if info.count == len {
let reply = info.handler.handle(info.req.clone());
return Some(WaitingResponse::init(info, reply));
} else {
match info.mws[info.count].start(&mut info.req) {
Ok(MiddlewareStarted::Done) => info.count += 1,
Ok(MiddlewareStarted::Response(resp)) => {
return Some(RunMiddlewares::init(info, resp));
}
Ok(MiddlewareStarted::Future(fut)) => {
self.fut = Some(fut);
continue 'outer;
}
Err(err) => {
return Some(FinishingMiddlewares::init(
info,
err.into(),
))
}
}
}
}
}
Err(err) => return Some(FinishingMiddlewares::init(info, err.into())),
}
}
}
}
// waiting for response
struct WaitingResponse<S> {
fut: Box<Future<Item = HttpResponse, Error = Error>>,
_s: PhantomData<S>,
}
impl<S: 'static> WaitingResponse<S> {
#[inline]
fn init(
info: &mut ComposeInfo<S>, reply: AsyncResult<HttpResponse>,
) -> ComposeState<S> {
match reply.into() {
AsyncResultItem::Err(err) => RunMiddlewares::init(info, err.into()),
AsyncResultItem::Ok(resp) => RunMiddlewares::init(info, resp),
AsyncResultItem::Future(fut) => ComposeState::Handler(WaitingResponse {
fut,
_s: PhantomData,
}),
}
}
fn poll(&mut self, info: &mut ComposeInfo<S>) -> Option<ComposeState<S>> {
match self.fut.poll() {
Ok(Async::NotReady) => None,
Ok(Async::Ready(response)) => Some(RunMiddlewares::init(info, response)),
Err(err) => Some(RunMiddlewares::init(info, err.into())),
}
}
}
/// Middlewares response executor
struct RunMiddlewares<S> {
curr: usize,
fut: Option<Box<Future<Item = HttpResponse, Error = Error>>>,
_s: PhantomData<S>,
}
impl<S: 'static> RunMiddlewares<S> {
fn init(info: &mut ComposeInfo<S>, mut resp: HttpResponse) -> ComposeState<S> {
let mut curr = 0;
let len = info.mws.len();
loop {
resp = match info.mws[curr].response(&mut info.req, resp) {
Err(err) => {
info.count = curr + 1;
return FinishingMiddlewares::init(info, err.into());
}
Ok(MiddlewareResponse::Done(r)) => {
curr += 1;
if curr == len {
return FinishingMiddlewares::init(info, r);
} else {
r
}
}
Ok(MiddlewareResponse::Future(fut)) => {
return ComposeState::RunMiddlewares(RunMiddlewares {
curr,
fut: Some(fut),
_s: PhantomData,
})
}
};
}
}
fn poll(&mut self, info: &mut ComposeInfo<S>) -> Option<ComposeState<S>> {
let len = info.mws.len();
loop {
// poll latest fut
let mut resp = match self.fut.as_mut().unwrap().poll() {
Ok(Async::NotReady) => return None,
Ok(Async::Ready(resp)) => {
self.curr += 1;
resp
}
Err(err) => return Some(FinishingMiddlewares::init(info, err.into())),
};
loop {
if self.curr == len {
return Some(FinishingMiddlewares::init(info, resp));
} else {
match info.mws[self.curr].response(&mut info.req, resp) {
Err(err) => {
return Some(FinishingMiddlewares::init(info, err.into()))
}
Ok(MiddlewareResponse::Done(r)) => {
self.curr += 1;
resp = r
}
Ok(MiddlewareResponse::Future(fut)) => {
self.fut = Some(fut);
break;
}
}
}
}
}
}
}
/// Middlewares start executor
struct FinishingMiddlewares<S> {
resp: Option<HttpResponse>,
fut: Option<Box<Future<Item = (), Error = Error>>>,
_s: PhantomData<S>,
}
impl<S: 'static> FinishingMiddlewares<S> {
fn init(info: &mut ComposeInfo<S>, resp: HttpResponse) -> ComposeState<S> {
if info.count == 0 {
Response::init(resp)
} else {
let mut state = FinishingMiddlewares {
resp: Some(resp),
fut: None,
_s: PhantomData,
};
if let Some(st) = state.poll(info) {
st
} else {
ComposeState::Finishing(state)
}
}
}
fn poll(&mut self, info: &mut ComposeInfo<S>) -> Option<ComposeState<S>> {
loop {
// poll latest fut
let not_ready = if let Some(ref mut fut) = self.fut {
match fut.poll() {
Ok(Async::NotReady) => true,
Ok(Async::Ready(())) => false,
Err(err) => {
error!("Middleware finish error: {}", err);
false
}
}
} else {
false
};
if not_ready {
return None;
}
self.fut = None;
if info.count == 0 {
return Some(Response::init(self.resp.take().unwrap()));
}
info.count -= 1;
match info.mws[info.count as usize]
.finish(&mut info.req, self.resp.as_ref().unwrap())
{
MiddlewareFinished::Done => {
if info.count == 0 {
return Some(Response::init(self.resp.take().unwrap()));
}
}
MiddlewareFinished::Future(fut) => {
self.fut = Some(fut);
}
}
}
}
}
struct Response<S> {
resp: Option<HttpResponse>,
_s: PhantomData<S>,
}
impl<S: 'static> Response<S> {
fn init(resp: HttpResponse) -> ComposeState<S> {
ComposeState::Completed(Response {
resp: Some(resp),
_s: PhantomData,
})
}
}
|
package org.nakedobjects.examples.orders.fixture;
import java.util.ArrayList;
import java.util.List;
import org.nakedobjects.applib.fixtures.CompositeFixture;
public class AllFixtures implements CompositeFixture {
public List<Object> getFixtures() {
List<Object> list = new ArrayList<Object>();
list.add(new JoeBloggsFixture());
list.add(new ProductsFixture());
list.add(new CustomersFixture());
list.add(new CustomerOrdersFixture());
return list;
}
}
|
{-# LANGUAGE TemplateHaskell #-}
module Core.Move
(Location(..), X(..), Y(..), Direction(..), run, Shipper(..), Id(..))
where
import Control.Lens
newtype Id = Id {_id :: String}
newtype X = X {_x :: Int} deriving (Show, Eq)
makeLenses ''X
newtype Y = Y {_y :: Int} deriving (Show, Eq)
makeLenses ''Y
data Direction = North | South | West | East deriving (Show, Eq)
data Location = Location { _absc :: X, _orde :: Y, _dir :: Direction }
makeLenses ''Location
instance Eq Location where
(==) l1 l2 = _absc l1 == _absc l2 && _orde l1 == _orde l2 && _dir l1 == _dir l2
(/=) l1 l2 = _absc l1 /= _absc l2 && _orde l1 /= _orde l2 && _dir l1 /= _dir l2
instance Show Location where
show (Location (X x) (Y y) direction) = "("++ show x ++ ", " ++ show y ++") " ++ show direction
data Shipper = Shipper { id :: Id, locations :: [Location] }
turnRight :: Location -> Location
turnRight l @ (Location _ _ North) = l & dir .~ East
turnRight l @ (Location _ _ East) = l & dir .~ South
turnRight l @ (Location _ _ South) = l & dir .~ West
turnRight l @ (Location _ _ West) = l & dir .~ North
turnLeft :: Location -> Location
turnLeft l @ (Location _ _ North) = l & dir .~ West
turnLeft l @ (Location _ _ West) = l & dir .~ South
turnLeft l @ (Location _ _ South) = l & dir .~ East
turnLeft l @ (Location _ _ East) = l & dir .~ North
forward :: Location -> Location
forward l @ (Location _ y North) = l & orde .~ Y (_y y + 1)
forward l @ (Location _ y South) = l & orde .~ Y (_y y - 1)
forward l @ (Location x _ East) = l & absc .~ X (_x x + 1)
forward l @ (Location x _ West) = l & absc .~ X (_x x - 1)
move :: Char -> Location -> Location
move 'A' l = forward l
move 'D' l = turnRight l
move 'I' l = turnLeft l
move _ l = l
run :: [Char] -> Location -> Location
--run [] l = l
--run (h:t) l = run t (move h l)
run t l = foldl (flip move) l t
|
import { Meteor } from 'meteor/meteor';
import { Tiles } from '../lib/collections';
// Publish collections for consuming client-side
Meteor.publish( 'tiles', ( ) => {
const selector = {};
const sort = { sort: { relTimestamp: 1 } };
const entries = Tiles.find( selector, sort );
if ( entries ) {
return entries;
}
return this.ready();
} );
|
require 'rails_helper'
RSpec.describe User, type: :model do
it "nickname,email,avatar,password,password_confirmation,avatarが存在すれば登録出来ること" do
user = build(:user)
expect(user).to be_valid
end
it "nicknameがない場合は登録出来ないこと" do
user = build(:user,nickname: nil)
user.valid?
expect(user.errors[:nickname]).to include("can't be blank")
end
it "nicknameが16文字以上の場合は登録出来ないこと" do
user = build(:user,nickname: "aaaaaaaaaaaaaaaa")
user.valid?
expect(user.errors[:nickname]).to include("is too long (maximum is 15 characters)")
end
it "nicknameが15文字以下の場合は登録出来ること" do
user = build(:user,nickname: "aaaaaaaaaaaaaaa")
user.valid?
expect(user).to be_valid
end
it "重複したnicknameなら無効であること" do
create(:user, nickname: "nori")
another_user = build(:user, nickname: "nori")
another_user.valid?
expect(another_user.errors[:nickname]).to include("has already been taken")
end
it "emailがない場合は登録出来ないこと" do
user = build(:user,email: nil)
user.valid?
expect(user.errors[:email]).to include("can't be blank")
end
it "重複したemailなら無効であること" do
create(:user, email: "nori@gmail.com")
another_user = build(:user, email: "nori@gmail.com")
another_user.valid?
expect(another_user.errors[:email]).to include("has already been taken")
end
it "passwordがない場合は登録出来ないこと" do
user = build(:user,password: nil)
user.valid?
expect(user.errors[:password]).to include("can't be blank")
end
it "passwordが存在してもpassword_confirmationがない場合は登録出来ないこと" do
user = build(:user,password_confirmation: "")
user.valid?
expect(user.errors[:password_confirmation]).to include("doesn't match Password")
end
it "passwordが6文字以上であれば登録出来ること" do
user = build(:user,password: "aaaaaa",password_confirmation: "aaaaaa")
user.valid?
expect(user).to be_valid
end
it "passwordが5文字以下であれば登録出来ないこと" do
user = build(:user,password: "aaaaa",password_confirmation: "aaaaa")
user.valid?
expect(user.errors[:password]).to include("is too short (minimum is 6 characters)")
end
it "avatarがない場合は登録出来ないこと" do
user = build(:user,avatar: nil)
user.valid?
expect(user.errors[:avatar]).to include("can't be blank")
end
end
|
<?php
require 'config.php';
require 'utils.php';
$conn = connect_db($dbhost, $dbuser, $dbpass, $dbname);
if (exist_user($conn, $_GET["username"], $_GET["type"]))
die(encode_result(1, "Registration failed: username already exists."));
if (new_user($conn, $_GET["username"], $_GET["password"], $_GET["type"], $_GET["gender"], $_GET["realname"]))
echo encode_result(0, "You have registered succussfully!");
else
echo encode_result(1, "Registration failed: invalid syntax.");
close_db($conn);
?>
|
import os
from typing import Tuple
import numpy as np
from numpy.core.fromnumeric import var
MIN_REGION = -50
MAX_REGION = 50
REGION_SIZE = MAX_REGION - MIN_REGION + 1
COORD_OFFSET = -1 * MIN_REGION
cur_dir = os.path.dirname(os.path.abspath(__file__))
reactor = np.zeros((REGION_SIZE, REGION_SIZE, REGION_SIZE), dtype=int)
def parse_extents(ext: str) -> Tuple[int, int]:
return tuple([int(x) for x in ext.split("=")[1].split("..")])
def parse_region(line: str) -> Tuple[int, int, int, int, int, int]:
extents = line.split(",")
xmin, xmax = parse_extents(extents[0])
ymin, ymax = parse_extents(extents[1])
zmin, zmax = parse_extents(extents[2])
return xmin, xmax, ymin, ymax, zmin, zmax
def parse_command(line: str) -> Tuple[str, int, int, int, int, int, int]:
split_line = line.strip().split()
command = split_line[0]
extents = split_line[1]
xmin, xmax, ymin, ymax, zmin, zmax = parse_region(extents)
return command, xmin, xmax, ymin, ymax, zmin, zmax
def shift_and_normalize_extents(var_min, var_max) -> Tuple[int, int]:
var_min += COORD_OFFSET
var_max += COORD_OFFSET
if var_min < MIN_REGION + COORD_OFFSET:
var_min = MIN_REGION + COORD_OFFSET
elif var_min > MAX_REGION + COORD_OFFSET:
var_min = MAX_REGION + COORD_OFFSET + 1
if var_max < MIN_REGION + COORD_OFFSET:
var_max = MIN_REGION + COORD_OFFSET - 1
elif var_max > MAX_REGION + COORD_OFFSET:
var_max = MAX_REGION + COORD_OFFSET
return var_min, var_max
with open(f"{cur_dir}/input") as f:
for line in f:
command, xmin, xmax, ymin, ymax, zmin, zmax = parse_command(line)
xmin, xmax = shift_and_normalize_extents(xmin, xmax)
ymin, ymax = shift_and_normalize_extents(ymin, ymax)
zmin, zmax = shift_and_normalize_extents(zmin, zmax)
print(f"{xmin}, {xmax}, {ymin}, {ymax}, {zmin}, {zmax}")
# print(reactor[xmin : xmax + 1, ymin : ymax + 1, zmin : zmax + 1])
if command == "on":
reactor[xmin : xmax + 1, ymin : ymax + 1, zmin : zmax + 1] = 1
elif command == "off":
reactor[xmin : xmax + 1, ymin : ymax + 1, zmin : zmax + 1] = 0
else:
raise Exception("Unknown command")
# print(reactor[xmin : xmax + 1, ymin : ymax + 1, zmin : zmax + 1])
print(np.sum(reactor))
print(np.sum(reactor))
|
package u_time
import (
"fmt"
"math"
"time"
)
func DurationMillis(from int64, to int64) time.Duration {
delta := int64(math.Abs(float64(to - from)))
return time.Duration(delta * int64(time.Millisecond))
}
func DurationToNow(millis int64) time.Duration {
return DurationMillis(millis, CurrentMillis())
}
func DurationFromNow(millis int64) time.Duration {
return DurationMillis(CurrentMillis(), millis)
}
func FormatDuration(d time.Duration, layout string) string {
d = d.Round(time.Second)
h := d / time.Hour
d -= h * time.Hour
m := d / time.Minute
d -= m * time.Minute
s := d / time.Second
return fmt.Sprintf(layout, h, m, s)
}
|
//
// CMLChameleonCacheItem.h
//
// Created by Chameleon-Team on 2018/6/6.
//
#import "JSONModel.h"
typedef NS_ENUM(NSUInteger, CMLChameleonCacheKind) {
CMLChameleonCacheUnknown = 0,
CMLChameleonCachePrefetch,
CMLChameleonCacheRuntime,
};
@interface CMLCacheItem : JSONModel
@property (nonatomic, strong) NSString *identifier;
@property (nonatomic, strong) NSNumber *cacheType;
@property (nonatomic, strong) NSNumber *fileSize;
- (NSString *)filePath;
- (BOOL)deleteItemFile;
@end
|
/** *******************************************************
* ロードデータ一覧.
******************************************************* */
#include "LoadDataList.h"
/**
* コンストラクタ.
*/
LoadDataList::LoadDataList() {
_dataName = {0};
_error = true;
}
/**
* デストラクタ.
*/
LoadDataList::~LoadDataList() {
}
/**
* 初期処理.
*/
void LoadDataList::initialize() {
}
/**
* 終了処理.
*/
void LoadDataList::finalize() {
// ヘッダーを破棄
if(_header != nullptr) {
_header->column.clear();
_header->column.shrink_to_fit();
_header = nullptr;
}
// 行データ破棄
size_t num = rowData.size();
for(size_t i = 0; i < num; i++) {
rowData[i].finalize();
}
rowData.clear();
rowData.shrink_to_fit();
}
/**
* エラー情報設定.
*/
void LoadDataList::setError(bool error) { _error = error; }
/**
* エラー情報取得.
*/
bool LoadDataList::getError() { return _error; }
/**
* データ名称設定.
*/
void LoadDataList::setDataName(const char *val) {
std::string str(val);
_dataName = str;
}
/**
* データ名称取得.
*/
std::string LoadDataList::getDataName() {
return _dataName;
}
/**
* ヘッダー設定.
*/
void LoadDataList::setHeader(RowData *header) {
_header = header;
}
/**
* ヘッダー取得.
*/
RowData *LoadDataList::getHeader() {
return _header;
}
|
hs33 = AmplModel(joinpath(@__DIR__, "hs033.nl"))
result = madnlp(hs33;print_level=MadNLP.ERROR)
@test result.status == :first_order
@test solcmp(result.solution,[0.0,1.4142135570650927,1.4142135585382265])
@test solcmp(result.multipliers,[0.17677669589725922,-0.17677669527079812])
@test solcmp(result.multipliers_L,[11.000000117266442,1.7719330023793877e-9,1.7753439380861844e-9])
@test solcmp(result.multipliers_U,[0.,0.,0.])
@test solcmp([result.objective],[-4.585786548956206])
finalize(hs33)
|
# How To
To generate OpenApi.json and Swagger.json
1. run the server on `localhost:8080`
2. run from this directory the script `generate-openapi-and-swagger.sh`
|
package com.mina_mikhail.base_mvvm.data.general.repository
import com.mina_mikhail.base_mvvm.data.general.data_source.remote.GeneralRemoteDataSource
import com.mina_mikhail.base_mvvm.data.local.preferences.AppPreferences
import com.mina_mikhail.base_mvvm.domain.general.repository.GeneralRepository
import javax.inject.Inject
class GeneralRepositoryImpl @Inject constructor(
private val remoteDataSource: GeneralRemoteDataSource,
private val appPreferences: AppPreferences
) : GeneralRepository
|
/*
* <<
* Davinci
* ==
* Copyright (C) 2016 - 2017 EDP
* ==
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* >>
*/
import * as React from 'react'
const Icon = require('antd/lib/icon')
const Col = require('antd/lib/col')
const styles = require('../Dashboard.less')
let ReactQuill
interface ITextChartProps {
data: any
loading: boolean
editing: boolean
className: string
chartParams: any
onTextEditorChange: (content: any) => void
}
interface ITextChartStates {
editorLoaded: boolean
popClass: string
sizeWhiteList: any[]
}
export class TextChart extends React.PureComponent<ITextChartProps, ITextChartStates> {
constructor (props) {
super(props)
this.state = {
editorLoaded: false,
popClass: styles.popHide,
sizeWhiteList: []
}
import('react-quill').then((rq) => {
ReactQuill = rq
const size = ReactQuill.Quill.import('attributors/style/size')
size.whitelist = ['10px', '11px', '12px', '13px', '14px', '16px', '18px', '20px', '24px', '28px']
ReactQuill.Quill.register(size, true)
this.setState({
sizeWhiteList: size.whitelist,
editorLoaded: true
})
})
}
public static defaultProps = {
chartParams: {}
}
public static spliterSelectCallback: (item: string) => void = null
private modules = {
toolbar: {
container: '#toolbar',
handlers: {
insertTitle () {
TextChart.spliterSelectCallback = (item) => {
const cursorPosition = this.quill.getSelection().index
this.quill.insertText(cursorPosition, `〖@dv_${item}_dv@〗`)
this.quill.setSelection(cursorPosition + 1)
}
}
}
}
}
private formats = [
'header', 'font', 'size', 'align',
'bold', 'italic', 'underline', 'strike',
'color', 'link', 'clean'
]
private showSelectDiv = () => {
this.setState({
popClass: this.state.popClass === styles.popShow ? styles.popHide : styles.popShow
})
}
private onSelectItem = (c) => () => {
this.setState({
popClass: styles.popHide
})
TextChart.spliterSelectCallback(c)
}
private selHeaderChange = (e) => e.persist()
public render () {
const {
data,
loading,
editing,
className,
onTextEditorChange,
chartParams
} = this.props
const {
editorLoaded,
popClass,
sizeWhiteList
} = this.state
let content = chartParams.richTextEdited
if (content) {
const keys = data.keys || []
const dataValue = {}
keys.forEach((key) => {
dataValue[key] = data.dataSource.map((item) => item[key])
})
content.replace(/〖@dv_(.+?)_dv@〗/g, function (match, p1) {
if (dataValue.hasOwnProperty(p1)) {
content = content.replace(match, dataValue[p1])
}
})
}
const editorContent = editorLoaded
? (
<div className={`text-editor ${styles.textEditor}`}>
<div id="toolbar">
<select className="ql-header" onChange={this.selHeaderChange} />
<select className="ql-font" />
<select className={`ql-size ${styles.size}`} defaultValue="13px">
{sizeWhiteList.map((i) => <option value={i} key={i}>{i}</option>)}
</select>
<select className="ql-align" />
<button className="ql-bold" />
<button className="ql-italic" />
<button className="ql-underline" />
<button className="ql-strike" />
<select className="ql-color" />
<button className="ql-link" />
<button className="ql-clean" />
<button
className={`ql-insertTitle ${styles.selectBtn}`}
onClick={this.showSelectDiv}
>
<Icon type="select" className={styles.selectIcon} />
<div className={popClass}>
{
data.keys
? data.keys.map((c) => (
<Col key={c}>
<ul onClick={this.onSelectItem(c)}>
<li key={c} className={styles.selectItem}>{c}</li>
</ul>
</Col>))
: ''
}
</div>
</button>
</div>
<ReactQuill
value={chartParams.richTextEdited}
onChange={onTextEditorChange}
className={styles.editor}
modules={this.modules}
formats={this.formats}
theme={'snow'}
/>
</div>
)
: (
<p>编辑器加载中……</p>
)
const chartContent = loading
? (
<div className={styles.scorecard}>
<div className={styles.scorecardContainer}>
<Icon type="loading" />
</div>
</div>
)
: (
<div className={styles.textEditorContainer}>
{
editing
? editorContent
: (<div className="ql-editor" dangerouslySetInnerHTML={{__html: content}} />)
}
</div>
)
return (
<div className={className}>
{chartContent}
</div>
)
}
}
export default TextChart
|
//
// Hippo
// (C) 2021 Brave Monday
//
extern crate proc_macro;
mod codegen;
mod meta;
use proc_macro as pm1;
use proc_macro2 as pm2;
use proc_macro_error::{abort_call_site, proc_macro_error};
use hippo_shared::{Configuration, OutputFormat};
use codegen::{emit, Container};
use meta::Meta;
/// Preprocess and embed an asset in conformance to the [`Preprocessed`](hippo::Preprocessed)
/// trait.
#[proc_macro_derive(Preprocess, attributes(hippo))]
#[proc_macro_error]
pub fn preprocess(ast: pm1::TokenStream) -> pm1::TokenStream {
let ast = syn::parse_macro_input!(ast as syn::DeriveInput);
let meta = Meta::new(&ast);
let name = &meta.preprocessor;
let inputs = &meta.inputs;
let cfg_path = Configuration::locate().unwrap_or_else(
|| abort_call_site!("Hippo configuration file could not be located")
);
let cfg = Configuration::load(&cfg_path).unwrap_or_else(
|e| abort_call_site!("Hippo configuration contains one or more errors: {}", e)
);
let pp = cfg.preprocessors.get(name).unwrap_or_else(
|| abort_call_site!("preprocessor `{}` not found in configuration", name)
);
let inputs = pp.rewrite_arguments(inputs);
let out = pp.execute(&inputs).unwrap_or_else(
|e| abort_call_site!("preprocessor command resulted in an error: {}", e)
);
let output: pm2::TokenStream = {
match pp.format {
OutputFormat::Bytes => emit(
ast,
&inputs,
Container::Bytes(out.stdout.into())
),
OutputFormat::Utf8 => emit(
ast,
&inputs,
Container::Utf8(String::from_utf8_lossy(&out.stdout).to_string())
)
}
};
pm1::TokenStream::from(output)
}
|
import winston from 'winston';
import { name, version } from '../../package.json';
import { LOG_LEVEL } from '../config';
export const logger = winston.createLogger({
level: LOG_LEVEL,
format: winston.format.json(),
transports: [new winston.transports.Console()],
defaultMeta: {
package: {
name,
version,
},
},
});
|
import Link from 'next/link'
import styles from '@styles/table.module.css'
import { PAGE_LINKS } from '@utils/settings'
import { useRouter } from 'next/router'
const LinksTable = () => {
const { pathname } = useRouter()
return (
<table className={styles.table}>
<thead>
<tr className={styles.headerRow}>
<th className={styles.cell}>Code</th>
<th className={styles.cell}>Page</th>
<th className={styles.cell}>Description</th>
</tr>
</thead>
<tbody>
{PAGE_LINKS.map(({ name, url, code, description }) => (
<tr key={url} className={styles.bodyRow}>
<td className={styles.cell}>
<Link href={code}>
<a>source</a>
</Link>
</td>
<td className={styles.cell}>
{pathname === url ? (
name
) : (
<Link href={url}>
<a>{name}</a>
</Link>
)}
</td>
<td
className={styles.cell}
dangerouslySetInnerHTML={{ __html: description }}
/>
</tr>
))}
</tbody>
</table>
)
}
export { LinksTable }
|
package tftp
import (
"bytes"
"io"
"io/ioutil"
"net"
"testing"
"time"
)
func TestServer(t *testing.T) {
t.Parallel()
p1, err := ioutil.ReadFile("./tftp/payload.svg")
if err != nil {
t.Fatal(err)
}
conn, err := net.ListenPacket("udp", "127.0.0.1:")
if err != nil {
t.Fatal(err)
}
done := make(chan struct{})
s := Server{Payload: p1}
go func() {
_ = s.Serve(conn)
close(done)
}()
rrq := ReadReq{Filename: "test"}
client, err := net.ListenPacket("udp", "127.0.0.1:")
if err != nil {
t.Fatal(err)
}
b, err := rrq.MarshalBinary()
if err != nil {
t.Fatal(err)
}
n, err := client.WriteTo(b, conn.LocalAddr())
if err != nil {
t.Fatal(err)
}
if n != len(b) {
t.Fatalf("expected %d bytes; wrote %d bytes", len(b), n)
}
p2 := new(bytes.Buffer)
for {
_ = client.SetReadDeadline(time.Now().Add(time.Second))
buf := make([]byte, DatagramSize)
n, addr, err := client.ReadFrom(buf)
if err != nil {
t.Fatal(err)
}
var data Data
err = data.UnmarshalBinary(buf[:n])
if err != nil {
t.Fatal(err)
}
_, err = io.Copy(p2, data.Payload)
if err != nil {
t.Fatal(err)
}
ack := Ack(data.Block)
b, err = ack.MarshalBinary()
if err != nil {
t.Fatal(err)
}
_, err = client.WriteTo(b, addr)
if err != nil {
t.Fatal(err)
}
if n < DatagramSize {
break
}
}
_ = client.Close()
_ = conn.Close()
<-done
if !bytes.Equal(p1, p2.Bytes()) {
t.Fatal("sent payload not equal to received payload")
}
}
|
#![cfg(test)]
use log::trace;
//fn bytes_from_hex_string(hex_str: &str) -> &[u8] {
pub fn bytes_from_hex_string(hex_str: &str) -> Vec<u8> {
trace!(target: "util::bytes_from_hex_string", "hex_str: {}", hex_str);
let result = hex_str
.split_whitespace()
.map(|hex_digit| u8::from_str_radix(hex_digit, 16))
.collect::<Result<Vec<u8>, _>>();
match result {
Ok(bytes) => {
trace!(target: "util", "{:02X?}", bytes);
return bytes;
}
Err(e) => panic!("error {} parsing: {}", e, hex_str),
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_one() {
assert_eq!(bytes_from_hex_string("01"), [0x1])
}
#[test]
fn test_two() {
assert_eq!(bytes_from_hex_string("01 0a"), [0x1, 0xa])
}
} // mod tests
|
#!/bin/bash
# This small shell script initializes the I2C bus on a Raspberry Pi
# and activates an installed real time clock module.
# Afterwards the system time is synced to the hardware clock.
echo ds1307 0x68 > /sys/class/i2c-adapter/i2c-1/new_device
hwclock -s
|
using System;
namespace KnowledgeMinutes.Models
{
public abstract class Animal
{
public override string ToString()
{
return this.GetType().Name;
}
public abstract float Weight { get; set; }
public abstract AnimalType Type { get; set; }
public void Deconstruct(out float weight)
{
weight = this.Weight;
}
}
public class Dog : Animal
{
public virtual void SayHello()
{
Console.WriteLine("wuff");
}
public override float Weight { get; set; } = 20;
public override AnimalType Type { get; set; } = AnimalType.Mammal;
}
public class Cat : Animal
{
public virtual void SayHello()
{
Console.WriteLine("meow");
}
public override float Weight { get; set; } = 5;
public override AnimalType Type { get; set; } = AnimalType.Mammal;
}
public class Bat : Animal
{
public virtual void SayHello()
{
Console.WriteLine("piep in high frequency");
}
public override float Weight { get; set; } = 0.5f;
public override AnimalType Type { get; set; } = AnimalType.Mammal;
}
public enum AnimalType
{
Reptile = 0,
Mammal = 1
}
}
|
package main
import (
"bufio"
"fmt"
"os"
"os/signal"
"strings"
"rdfs/cryp"
"rdfs/geth"
"rdfs/ipfs"
"rdfs/jrpc"
"rdfs/util"
"github.com/ethereum/go-ethereum/accounts/keystore"
"github.com/ipfs/go-ipfs-api"
)
var (
geth_pid int = -1
ipfs_pid int = -1
geth_client *geth.GethRPC
geth_keys []*keystore.Key
ipfs_shell *shell.Shell
)
func purchase(args ...string) bool {
/*
As following the File Purchase Scenario,
1. EF = ipfs.Get(EFH)
2. Send token, EF to contract
- (Contract) Check token amount and transfer token to File Owner
- (Contract) Return true to buyer
3. If accepted, receive true from owner with his info
4. Send buyer's pk(publicKey) to owner's json-rpc(?) server
- Check the transaction
- If valid, edk = E(dk, pk) and send edk to buyer's json-rpc server
5. Receive edk and decrypt EF
- dk = D(edk, sk) *sk = privateKey
- F = D(EF, dk)
*/
var dir_path string = util.RDFS_DOWN_DIR
if len(args) == 2 {
dir_path = args[1]
}
// 1. EF = ipfs.Get(EFH)
if !ipfs.Get(ipfs_shell, args[0], dir_path) {
return false
}
// 2. Send token, EF to contract
fmt.Printf("[+] Send token, EF to Ethereum through GETH\n")
// 3. If accepted, receive true from owner with his info
fmt.Printf("[+] Successfully made transaction with the owner\n")
ownerIP := "127.0.0.1"
// 4. Send buyer's pk(publickKey) to owner's json-rpc(?) server
var edk jrpc.EDK
if !jrpc.RequestE(ownerIP, jrpc.Key("BUYER KEY"), &edk) {
fmt.Printf("[-] Couldn't receive EDK from owner\n")
return false
}
// 5. Receive edk and decrypt EF
fmt.Printf("[+] RPC Client: Successfully received EDK from the owner\n"+
":%s\n", edk.Raw)
return true
}
func store(args ...string) bool {
/*
As following the File Storage Scenario,
1. ek, dk = hash(pk, sk, F)
2. EF = E(F, ek)
3. EFH = ipfs.Set(EF)
4. Write ownership(EFH, nodeID) and information(EF size, Owner IP) on contract
- Data Structure
type Node struct {
ID string
IP string
}
type EF struct {
Hash string
Size int
Owner Node
}
*/
// 1. ek, dk = hash(pk, sk, F)
// 2. EF = E(F, ek)
// 3. EFH = ipfs.Set(EF)
var hash string
if !ipfs.Set(ipfs_shell, args[0], &hash) {
return false
}
// 4. Write ownership(EFH, odeID) and information(EF size, Owner IP) on contract
return true
}
func help() {
fmt.Printf("\nRDFS, version %s. Type 'help' to see this list.\n\n", util.RDFS_VER)
for i := 0; i < len(util.CMD); i++ {
str := strings.Split(util.CMD[i], ":")
fmt.Printf(" - %-30s : %s\n", str[0], str[1])
}
fmt.Println()
}
func prompt() {
in := bufio.NewReader(os.Stdin)
for {
print(">> ")
input, _ := in.ReadString('\n')
cmd, cmd_args := util.Shell(input)
switch cmd {
case util.CMD_EXIT:
rdfsClose()
os.Exit(0)
case util.CMD_STORE:
store(cmd_args...)
case util.CMD_PURCHASE:
purchase(cmd_args...)
case util.CMD_TEST:
if strings.Compare(cmd_args[0], "-k") == 0 {
/*
if privKey == nil || pubKey == nil {
privKey, pubKey = cryp.GenerateKeySet()
fmt.Printf("[+] Successfully generated key(private, public) set\n")
cryp.SavePrivKeyPEMTo(util.RDFS_CONFIG_DIR+"private.pem", privKey)
cryp.SavePubKeyPEMTo(util.RDFS_CONFIG_DIR+"public.pem", pubKey)
cryp.Hash(privKey, pubKey, "/pss/tmp/test.txt")
}
cryp.Hash(privKey, pubKey, "/pss/tmp/t.py")
*/
} else if strings.Compare(cmd_args[0], "-j") == 0 {
if len(cmd_args) != 4 {
break
}
jrpc.TestB(cmd_args[1], cmd_args[2], cmd_args[3])
} else if strings.Compare(cmd_args[0], "-g") == 0 {
// geth.Test()
}
case util.CMD_HELP:
help()
case util.CMD_LIST:
if strings.Compare(cmd_args[0], "-h") == 0 {
ipfs.List(ipfs_shell, cmd_args[1], ".")
} else if strings.Compare(cmd_args[0], "-f") == 0 {
util.List(cmd_args[1])
}
case util.CMD_NETVERSION:
netVersion, err := geth_client.NetVersion()
if err != nil {
fmt.Println(err)
}
fmt.Println(netVersion)
case util.CMD_COINBASE:
address, err := geth_client.EthCoinBase()
if err != nil {
fmt.Println(err)
}
fmt.Println(address)
case util.CMD_ISMINING:
mining, err := geth_client.EthMining()
if err != nil {
fmt.Println(err)
}
fmt.Println(mining)
case util.CMD_BLOCKNUMBER:
num, err := geth_client.EthBlockNumber()
if err != nil {
fmt.Println(err)
}
fmt.Println(num)
case util.CMD_ACCOUNTS:
accounts, err := geth_client.EthAccounts()
if err != nil {
fmt.Println(err)
}
fmt.Println(accounts)
case util.CMD_BALANCE:
balance := geth_client.EthGetBalance(cmd_args[0])
fmt.Println(balance)
default:
println("Unsupported Command")
}
}
}
func rdfsInit() {
fmt.Println("[+] Initializing RDFS")
/*
Need to implement pre-processing
for IPFS, GETH to get initialized
*/
geth_keys = cryp.GetKey()
if len(geth_keys) == 0 {
fmt.Printf("[-] Couldn't initialize RDFS\n")
os.Exit(1)
}
for _, key := range geth_keys {
fmt.Printf("[+] Processed the key set of address: %x\n", key.Address.Bytes())
}
ipfs_pid = ipfs.Open()
ipfs_shell = shell.NewShell("localhost:5001")
geth_pid, geth_client = geth.Open()
go jrpc.InitServer()
}
func rdfsClose() {
fmt.Println("\n[+] Closing RDFS")
ipfs.Close(ipfs_pid)
geth.Close(geth_pid)
}
func main() {
c := make(chan os.Signal, 1)
signal.Notify(c, os.Interrupt)
rdfsInit()
go func() {
<-c
rdfsClose()
os.Exit(0)
}()
prompt()
}
|
/**
* Copyright 2020 HCL Technologies Ltd.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import Promise from 'bluebird';
import iampkg from '@domino/node-iam-client';
import jwtpkg from 'jsonwebtoken';
import iam from './iam.js';
import config from './config.js';
const { IAMClient } = iampkg;
const { decode } = jwtpkg;
const cleanSession = (session) => {
delete session.iamCtx; // eslint-disable-line no-param-reassign
delete session.iamToken; // eslint-disable-line no-param-reassign
delete session.authorizationUrl; // eslint-disable-line no-param-reassign
delete session.user; // eslint-disable-line no-param-reassign
return Promise.promisify(session.save, { context: session })();
};
const ldapToDomino = (ldapdn) => {
return ldapdn.replace(/([^\\]),/g, '$1/').replace(/\\,/, ',');
};
export default async (req, res, next) => {
const { session } = req;
if (req.path === config.get('webFrontend:callbackPath')) { // Handle IAM response.
const { iamCtx } = session;
await cleanSession(session);
if (req.query.error) {
res.type('text/plain').send(`${JSON.stringify(req.query, undefined, ' ')}`);
} else {
try {
const client = await IAMClient.createInstance(iam.clientOptions);
client.client.CLOCK_TOLERANCE = 1000;
const iamToken = await client.getToken(req, iamCtx);
const user = await decode(iamToken.id_token);
session.user = user;
user.sub = ldapToDomino(user.sub);
// this is only our authentication request
// we need to let the authorization for proton happen
if (iamToken.scope !== 'openid') {
// for demo only, store this more securely.
session.iamToken = iamToken;
}
res.redirect('/');
return;
} catch (e) {
res.type('text/plain').send(e.stack);
}
}
next();
return;
}
try {
if (session.iamCtx && session.authorizationUrl) {
// dance not finished. Bail
res.redirect(session.authorizationUrl);
return;
}
if (session.iamCtx) {
await cleanSession(session);
res.redirect('/');
return;
}
// Who is the user?
if (!session.user) {
const client = await IAMClient.createInstance(iam.clientOptions);
client.client.CLOCK_TOLERANCE = 1000;
const {
authorizationUrl,
secureCtx,
} = client.createAuthorizationCtx(iam.basicContext);
session.iamCtx = secureCtx;
session.authorizationUrl = authorizationUrl;
res.redirect(session.authorizationUrl);
return;
}
// Ask user to authorize
if (!session.iamToken) {
const client = await IAMClient.createInstance(iam.clientOptions);
client.client.CLOCK_TOLERANCE = 1000;
const {
authorizationUrl,
secureCtx,
} = client.createAuthorizationCtx(iam.fullContext);
session.iamCtx = secureCtx;
session.authorizationUrl = authorizationUrl;
res.redirect(session.authorizationUrl);
return;
}
if (!session.iamToken) {
delete session.iamToken;
await Promise.promisify(session.save, { context: session })();
res.redirect('/');
return;
}
if (session.iamToken) {
const { expires_at: expires } = session.iamToken; // seconds
if (expires < new Date().getTime() / 1000) { // milliseconds
delete session.iamToken;
await Promise.promisify(session.save, { context: session })();
res.redirect('/');
return;
}
}
next();
} catch (e) {
console.error(e); // eslint-disable-line no-console
await Promise.promisify(session.destroy, { context: session })();
res.redirect('/');
}
};
|
-- create a database with name employee_data
CREATE DATABASE IF NOT EXISTS `employee_data`;
use `employee_data`;
-- create a table with name employee
CREATE TABLE IF NOT EXISTS `employee` (
id INT UNSIGNED NOT NULL AUTO_INCREMENT,
email VARCHAR(100) NOT NULL DEFAULT '',
name VARCHAR(30) NOT NULL DEFAULT '',
department_id INT UNSIGNED NOT NULL DEFAULT 0,
PRIMARY KEY (id)
);
-- create a table with name department
CREATE TABLE IF NOT EXISTS department (
id INT UNSIGNED NOT NULL AUTO_INCREMENT,
name VARCHAR(30) NOT NULL DEFAULT '',
PRIMARY KEY (id)
);
-- add entries to department table
INSERT INTO department (name) VALUES ('HR'), ('ENG');
|
import { renderStack } from '../../../render/const';
import { activeHu } from '../const';
import injectionPrivateToInstance from '../util/injectionPrivateToInstance';
export default (isCustomElement, target, root, targetProxy) => {
let $root = targetProxy;
let $parent;
if (isCustomElement) {
const length = renderStack.length;
for (let index = length - 1; index >= 0; index--) {
const el = renderStack[index];
const parentTargetProxy = activeHu.get(el);
if (parentTargetProxy) {
$parent = parentTargetProxy;
$root = $parent.$root;
$parent.$children.push(targetProxy);
break;
}
}
}
injectionPrivateToInstance(isCustomElement, target, root, {
$root,
$parent,
$children: []
});
};
|
<h1>Add new order for @Model.VendorName</h1><br><br>
<form action="/vendors/@Model.Id/orders" method="post">
<input id="vendorId" name ="vendorId" type="hidden" value="@Model.Id"><br>
<input id="orderName" name ="orderName" type="text" placeholder="Name of the order" required><br>
<input id="breadCount" name ="breadCount" type="number" placeholder="How many loaves of bread?" min="0" required><br>
<input id="pastryCount" name ="pastryCount" type="number" placeholder="How many pastries?" min="0" required><br>
$<input id="price" name ="price" type="number" placeholder="Total order price" min="0" required><br>
<input id="orderDate" name="orderDate" type="date" placeholder="date of the order" required><br>
<input id="orderDescription" name ="orderDescription" type="text" placeholder="Description" required><br>
<button type="submit">Add Order!</button>
</form>
<br><br>
<a href="/vendors/@Model.Id">Cancel and return to vendor</a><br>
<a href="/">Cancel and return to home screen</a>
|
<?php
namespace Enkatsu\PhpOscParser\Tests;
use Enkatsu\PhpOscParser\Reader;
class ReaderTest extends \PHPUnit_Framework_TestCase
{
public function testStringRead()
{
$hello = 'Hello';
$hex = \join(\array_map(dechex, \array_map(ord, \str_split($hello))));
$buf = \collect(\array_chunk(\str_split($hex), 2));
$pos = 0;
$result = Reader::parseString($buf, $pos);
$this->assertEquals($hello, $result);
$this->assertEquals($pos, $buf->count());
}
public function testIntRead()
{
$num = 2147483647;
$hex = \dechex($num);
$buf = \collect(\array_chunk(str_split($hex), 2));
$pos = 0;
$result = Reader::parseInt($buf, $pos);
$this->assertEquals($num, $result);
$this->assertEquals($pos, $buf->count());
}
public function testFloatRead()
{
$hex = '40200000';
$num = unpack('f', pack('h*', strrev($hex)));
$buf = \collect(\array_chunk(str_split($hex), 2));
$pos = 0;
$result = Reader::parseFloat($buf, $pos);
$this->assertEquals($num, $result);
$this->assertEquals($pos, $buf->count());
}
}
|
# Cloud Foundry Java Buildpack
# Copyright 2013-2017 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'spec_helper'
require 'application_helper'
require 'fileutils'
require 'java_buildpack/component/application'
describe JavaBuildpack::Component::Application do
include_context 'with application help'
it 'returns a parsed version of VCAP_APPLICATION as details' do
expect(application.details).to eq(vcap_application)
end
it 'removes VCAP_APPLICATION and VCAP_SERVICES from environment' do
expect(application.environment).to include('test-key')
expect(application.environment).not_to include('VCAP_APPLICATION')
expect(application.environment).not_to include('VCAP_SERVICES')
end
it 'returns a child path if it does not exist' do
expect(application.root + 'test-file').not_to be_nil
end
it 'does not return a child path that does not exist if it exists but is not in the initial contents' do
FileUtils.touch(app_dir + 'test-file')
expect(application.root + 'test-file').not_to exist
end
it 'returns a child path if it exists and is in the initial contents',
app_fixture: 'application' do
expect(application.root + 'test-file').not_to be_nil
end
it 'only lists children that exist initially',
app_fixture: 'application' do
FileUtils.mkdir_p(app_dir + '.ignore-directory')
FileUtils.mkdir_p(app_dir + 'ignore-directory')
FileUtils.touch(app_dir + '.ignore-file')
FileUtils.touch(app_dir + 'ignore-file')
children = application.root.children
expect(children.size).to eq(4)
expect(children).to include(app_dir + '.test-directory')
expect(children).to include(app_dir + 'test-directory')
expect(children).to include(app_dir + '.test-file')
expect(children).to include(app_dir + 'test-file')
expect(children).not_to include(app_dir + '.ignore-directory')
expect(children).not_to include(app_dir + 'ignore-directory')
expect(children).not_to include(app_dir + '.ignore-file')
expect(children).not_to include(app_dir + 'ignore-file')
end
it 'returns a parsed version of VCAP_SERVICES as services' do
expect(application.services.find_service(/test-service/)).to be
end
end
|
#!/usr/bin/env bash
cpuname=$(uname -p)
docker build -t crank-agent --build-arg CPUNAME=$cpuname -f Dockerfile ../../
|
import iqtrade.api as iq
qt = iq.QuestradeIQ("secrets.json")
msft = qt.get_tickers("MSFT")
msft_call = qt.get_tickers("MSFT15Oct21C300.00")
variants = [
iq.StrategyVariantRequest(
1,
iq.StrategyType.CoveredCall,
legs=[
iq.StrategyLeg(msft[0].symbol_id, iq.OrderAction.Buy, 200),
iq.StrategyLeg(msft_call[0].symbol_id, iq.OrderAction.Sell, 2),
],
)
]
quotes = qt.get_strategy_quotes(variants)
print(quotes[0])
|
Describe "New-AWSVaultAlias" {
Import-Module -Force .\posh-awsvault.psd1
$TestCases = @{
"When called with only an alias name" = @( "somecommand" )
"When called with an alias name and a command name" = @( "somealias", "somecommand" )
}
foreach($Case in $TestCases.Keys) {
Context $Case {
$TestState = @{
"Alias" = $null
"Function" = $null
}
$TestCaseArguments = $TestCases[$Case]
$TestAliasName = $TestCaseArguments[0]
$TestCommandName = $TestAliasName
if($TestCaseArguments.Count -gt 1) {
$TestCommandName = $TestCaseArguments[1]
}
New-AWSVaultAlias @TestCaseArguments
It "Creates an alias named $TestAliasName" {
$TestState["Alias"] = Get-Alias $TestAliasName
$TestState["Alias"] | Should -Not -BeNull
Write-Debug ($TestState["Alias"] | Out-String)
}
It "Points to a function" {
$TestState["Function"] = Get-Item Function:\$($TestState["Alias"].Definition)
$TestState["Function"] | Should -Not -BeNull
Write-Debug ($TestState["Function"] | Out-String)
}
Mock Invoke-WithAWSVaultExec -ModuleName $TestState["Function"].Module {
Write-Debug "Invoke-WithAWSVaultExec $($args[0]) $($args[1])"
}
It "Calls Invoke-WithAWSVaultExec for command $TestCommandName" {
&($TestState["Function"].Name)
Assert-MockCalled Invoke-WithAWSVaultExec -ModuleName $TestState["Function"].Module -ParameterFilter {
$CommandName -eq $TestCommandName
}
}
Remove-Module $TestState["Function"].Module -ErrorAction SilentlyContinue
}
}
Remove-Module posh-awsvault
}
|
package cromwell.engine.workflow.lifecycle.execution.callcaching
import cats.data.Validated._
import cats.syntax.apply._
import cats.syntax.validated._
import cromwell.core.WorkflowId
import cromwell.engine.workflow.lifecycle.execution.callcaching.CallCacheDiffQueryParameter.CallCacheDiffQueryCall
import common.validation.ErrorOr.{ErrorOr, ShortCircuitingFlatMap}
import scala.util.{Failure, Success, Try}
object CallCacheDiffQueryParameter {
case class CallCacheDiffQueryCall(workflowId: WorkflowId, callFqn: String, jobIndex: Option[Int])
private def missingWorkflowError(attribute: String) = s"missing $attribute query parameter".invalidNel
def fromParameters(parameters: Seq[(String, String)]): ErrorOr[CallCacheDiffQueryParameter] = {
def extractIndex(parameter: String): ErrorOr[Option[Int]] = {
parameters.find(_._1 == parameter) match {
case Some((_, value)) => Try(value.trim.toInt) match {
case Success(index) => Option(index).validNel
case Failure(f) => f.getMessage.invalidNel
}
case None => None.validNel
}
}
def extractAttribute(parameter: String): ErrorOr[String] = {
parameters.find(_._1 == parameter) match {
case Some((_, value)) => value.validNel
case None => missingWorkflowError(parameter)
}
}
def validateWorkflowId(parameter: String): ErrorOr[WorkflowId] = for {
workflowIdString <- extractAttribute(parameter)
workflowId <- fromTry(Try(WorkflowId.fromString(workflowIdString.trim)))
.leftMap(_.getMessage)
.toValidatedNel[String, WorkflowId]
} yield workflowId
val workflowAValidation = validateWorkflowId("workflowA")
val workflowBValidation = validateWorkflowId("workflowB")
val callAValidation: ErrorOr[String] = extractAttribute("callA").map(_.trim)
val callBValidation: ErrorOr[String] = extractAttribute("callB").map(_.trim)
val indexAValidation: ErrorOr[Option[Int]] = extractIndex("indexA")
val indexBValidation: ErrorOr[Option[Int]] = extractIndex("indexB")
(workflowAValidation,
callAValidation,
indexAValidation,
workflowBValidation,
callBValidation,
indexBValidation) mapN { (workflowA, callA, indexA, workflowB, callB, indexB) =>
CallCacheDiffQueryParameter(
CallCacheDiffQueryCall(workflowA, callA, indexA),
CallCacheDiffQueryCall(workflowB, callB, indexB)
)
}
}
}
case class CallCacheDiffQueryParameter(callA: CallCacheDiffQueryCall, callB: CallCacheDiffQueryCall)
|
#include <bits/stdc++.h>
using namespace std;
long long readInt(long long l,long long r,char endd){
long long x=0;
int cnt=0;
int fi=-1;
bool is_neg=false;
while(true){
char g=getchar();
if(g=='-'){
assert(fi==-1);
is_neg=true;
continue;
}
if('0'<=g && g<='9'){
x*=10;
x+=g-'0';
if(cnt==0){
fi=g-'0';
}
cnt++;
assert(fi!=0 || cnt==1);
assert(fi!=0 || is_neg==false);
assert(!(cnt>19 || ( cnt==19 && fi>1) ));
} else if(g==endd){
assert(cnt>0);
if(is_neg){
x= -x;
}
assert(l<=x && x<=r);
return x;
} else {
assert(false);
}
}
}
string readString(int l,int r,char endd){
string ret="";
int cnt=0;
while(true){
char g=getchar();
assert(g!=-1);
if(g==endd){
break;
}
cnt++;
ret+=g;
}
assert(l<=cnt && cnt<=r);
return ret;
}
long long readIntSp(long long l,long long r){
return readInt(l,r,' ');
}
long long readIntLn(long long l,long long r){
return readInt(l,r,'\n');
}
string readStringLn(int l,int r){
return readString(l,r,'\n');
}
string readStringSp(int l,int r){
return readString(l,r,' ');
}
map<string,int> mp;
string a[1000],b[1000];
int main()
{
int t = readIntLn(1,100);
while(t--)
{
mp.clear();
int n = readIntLn(1,100);
for (int i=1;i<=n;i++) {
a[i]=readStringSp(1,10);
b[i]=readStringLn(1,10);
mp[a[i]]++;
}
for (int i=1;i<=n;i++)
if (mp[a[i]]==1)
cout<<a[i]<<endl;
else
cout<<a[i]<<' '<<b[i]<<endl;
}
assert(getchar()==-1);
}
|
#' QR
#'
#' QR factorization.
#'
#' @param x
#' An fmlmat matrix.
#' @param qr
#' A compact QR factorization, specifically of class \code{qr_fml} - the return
#' of the \code{qr()} function on \code{fmlmat} data.
#' @param complete,Dvec,...
#' Ignored.
#'
#' @aliases qr.Q qr.R
#'
#' @name qr
#' @rdname qr
NULL
setOldClass("qr_fml")
#' @rdname qr
#' @export
qr.fmlmat = function(x, ...)
{
x_cp = DATA(x)$dupe()
qraux = skeleton_vec(x_cp)
linalg_qr(x_cp, qraux)
ret = list(
qr = wrapfml(x_cp),
rank = NA,
qraux = wrapfml(qraux),
pivot = NA
)
class(ret) = "qr_fml"
ret
}
setGeneric(name="qr.Q", useAsDefault=base::qr.Q, package="craze")
#' @rdname qr
#' @export
setMethod("qr.Q", signature(qr="qr_fml"),
function(qr, complete=FALSE, Dvec)
{
Q = skeleton_mat(qr$qr)
work = skeleton_vec(qr$qr)
linalg_qr_Q(DATA(qr$qr), DATA(qr$qraux), Q, work)
wrapfml(Q)
}
)
setGeneric(name="qr.R", useAsDefault=base::qr.R, package="craze")
#' @rdname qr
#' @export
setMethod("qr.R", signature(qr="qr_fml"),
function(qr, complete=FALSE)
{
R = skeleton_mat(qr$qr)
linalg_qr_R(DATA(qr$qr), R)
wrapfml(R)
}
)
|
export default class Campus {
constructor(campus) {
Object.keys(campus).forEach((prop) => {
this[prop] = campus[prop];
});
}
}
|
package main
import (
"encoding/json"
"flag"
"fmt"
"io"
"log"
"os"
"regexp"
"sort"
"strings"
"github.com/santhosh-tekuri/jsonschema/v5"
)
var punctuationRegexp = regexp.MustCompile(`[^\w\- ]`)
// ref: https://github.com/gjtorikian/html-pipeline/blob/main/lib/html/pipeline/toc_filter.rb
func gfmHeaderAnchor(header string) string {
header = strings.ToLower(header)
header = punctuationRegexp.ReplaceAllString(header, "")
return "#" + strings.ReplaceAll(header, " ", "-")
}
func fprintf(w io.Writer, f string, args ...interface{}) {
_, err := fmt.Fprintf(w, f, args...)
if err != nil {
log.Fatal(err)
}
}
func toJSON(v interface{}) string {
bytes, err := json.Marshal(v)
if err != nil {
log.Fatal(err)
}
return string(bytes)
}
func schemaItems(schema *jsonschema.Schema) *jsonschema.Schema {
if schema.Items2020 != nil {
return schema.Items2020
}
if items, ok := schema.Items.(*jsonschema.Schema); ok {
return items
}
return nil
}
type converter struct {
multiSchema bool
w io.Writer
rootLocation string
defs map[string]*jsonschema.Schema
}
func (c *converter) printf(f string, args ...interface{}) {
fprintf(c.w, f, args...)
}
func (c *converter) inlineDef(schema *jsonschema.Schema) bool {
return schema.Description == "" &&
schema.Title == "" &&
schema.Format == "" &&
len(schema.Properties) == 0 &&
len(schema.AllOf) == 0 &&
len(schema.AnyOf) == 0 &&
len(schema.OneOf) == 0 &&
schema.If == nil &&
schema.PropertyNames == nil &&
len(schema.PatternProperties) == 0 &&
schema.Items == nil &&
schema.AdditionalItems == nil &&
len(schema.PrefixItems) == 0 &&
schema.Items2020 == nil &&
schema.Contains == nil &&
schema.Pattern == nil
}
func (c *converter) recordDef(schema *jsonschema.Schema) {
if schema != nil && strings.HasPrefix(schema.Location, c.rootLocation) {
if _, has := c.defs[schema.Location]; !has {
c.defs[schema.Location] = schema
c.collectDefs(schema)
}
}
}
func (c *converter) recordDefs(schemas []*jsonschema.Schema) {
for _, schema := range schemas {
c.recordDef(schema)
}
}
func (c *converter) collectDefs(schema *jsonschema.Schema) {
c.recordDef(schema.Ref)
c.recordDef(schema.RecursiveRef)
c.recordDef(schema.DynamicRef)
c.recordDef(schema.Not)
c.recordDefs(schema.AllOf)
c.recordDefs(schema.AnyOf)
c.recordDefs(schema.OneOf)
c.recordDef(schema.If)
c.recordDef(schema.Then)
c.recordDef(schema.Else)
for _, schema := range schema.Properties {
c.collectDefs(schema)
}
c.recordDef(schema.PropertyNames)
for _, schema := range schema.PatternProperties {
c.collectDefs(schema)
}
if child, ok := schema.AdditionalProperties.(*jsonschema.Schema); ok {
c.recordDef(child)
}
for _, dep := range schema.Dependencies {
if schema, ok := dep.(*jsonschema.Schema); ok {
c.recordDef(schema)
}
}
for _, schema := range schema.DependentSchemas {
c.recordDef(schema)
}
c.recordDef(schema.UnevaluatedProperties)
switch items := schema.Items.(type) {
case *jsonschema.Schema:
c.recordDef(items)
case []*jsonschema.Schema:
c.recordDefs(items)
}
if child, ok := schema.AdditionalItems.(*jsonschema.Schema); ok {
c.recordDef(child)
}
c.recordDefs(schema.PrefixItems)
c.recordDef(schema.Items2020)
c.recordDef(schema.Contains)
c.recordDef(schema.UnevaluatedItems)
}
func (c *converter) schemaTitle(schema *jsonschema.Schema) string {
if schema.Title != "" {
return schema.Title
}
return "`" + schema.Location + "`"
}
func (c *converter) refLink(ref *jsonschema.Schema) string {
dest := ref.Location
if strings.HasPrefix(ref.Location, c.rootLocation) {
dest = gfmHeaderAnchor(c.schemaTitle(ref))
}
return fmt.Sprintf("[%v](%v)", c.schemaTitle(ref), dest)
}
func (c *converter) ref(ref *jsonschema.Schema) string {
if !c.inlineDef(ref) {
return c.refLink(ref)
}
if ref.Ref != nil {
return c.ref(ref.Ref)
}
if len(ref.Constant) != 0 {
return c.schemaConstant(ref)
}
if len(ref.Enum) != 0 {
return c.schemaEnum(ref)
}
return c.schemaTypes(ref)
}
func (c *converter) schemaTypes(schema *jsonschema.Schema) string {
types := schema.Types
if len(types) == 1 {
return fmt.Sprintf("`%v`", types[0])
}
var sb strings.Builder
for i, t := range types {
if i != 0 {
fprintf(&sb, " | ")
}
fprintf(&sb, "`%v`", t)
}
return sb.String()
}
func (c *converter) convertSchemaTypes(schema *jsonschema.Schema) {
types := schema.Types
switch len(types) {
case 0:
// Nothing to do
case 1:
c.printf("\n%v\n", c.schemaTypes(schema))
default:
c.printf("\n%v\n", c.schemaTypes(schema))
}
}
func (c *converter) convertSchemaStringValidators(schema *jsonschema.Schema) {
if schema.Format != "" {
c.printf("\nFormat: `%v`\n", schema.Format)
}
if schema.Pattern != nil {
c.printf("\nPattern: `%v`\n", schema.Pattern)
}
}
func (c *converter) convertSchemaRef(schema *jsonschema.Schema) {
if schema.Ref != nil {
c.printf("\n%v\n", c.refLink(schema.Ref))
}
}
func (c *converter) schemaConstant(schema *jsonschema.Schema) string {
return fmt.Sprintf("`%s`", toJSON(schema.Constant[0]))
}
func (c *converter) convertSchemaConstant(schema *jsonschema.Schema) {
if len(schema.Constant) != 0 {
c.printf("\nConstant: %v\n", c.schemaConstant(schema))
}
}
func (c *converter) schemaEnum(schema *jsonschema.Schema) string {
var sb strings.Builder
for i, v := range schema.Enum {
if i != 0 {
sb.WriteString(" | ")
}
fprintf(&sb, "`%s`", toJSON(v))
}
return sb.String()
}
func (c *converter) convertSchemaEnum(schema *jsonschema.Schema) {
if len(schema.Enum) != 0 {
c.printf("\nEnum: %v\n", c.schemaEnum(schema))
}
}
func (c *converter) convertSchemaLogic(schema *jsonschema.Schema) {
if len(schema.AllOf) != 0 {
c.printf("\nAll of:\n")
for _, ref := range schema.AllOf {
c.printf("- %v\n", c.ref(ref))
}
}
if len(schema.AnyOf) != 0 {
c.printf("\nAny of:\n")
for _, ref := range schema.AllOf {
c.printf("- %v\n", c.ref(ref))
}
}
if len(schema.OneOf) != 0 {
c.printf("\nOne of:\n")
for _, ref := range schema.AllOf {
c.printf("- %v\n", c.ref(ref))
}
}
if schema.If != nil {
c.printf("\nIf %v", c.ref(schema.If))
if schema.Then != nil {
c.printf(", then %v", c.ref(schema.Then))
}
if schema.Else != nil {
c.printf(", else %v", c.ref(schema.Else))
}
c.printf("\n")
}
}
func (c *converter) convertSchemaObject(schema *jsonschema.Schema, level int) {
if schema.PropertyNames != nil {
c.printf("\nProperty names: %v\n", c.ref(schema.PropertyNames))
}
if additionalProperties, ok := schema.AdditionalProperties.(*jsonschema.Schema); ok {
c.printf("\nAdditional properties: %v\n", c.ref(additionalProperties))
}
required := map[string]bool{}
for _, name := range schema.Required {
required[name] = true
}
properties := make([]string, 0, len(schema.Properties))
for name, schema := range schema.Properties {
if schema.Always != nil && !*schema.Always {
continue
}
properties = append(properties, name)
}
sort.Strings(properties)
if len(properties) != 0 {
c.printf("\n%v Properties\n", strings.Repeat("#", level+1))
c.printf("\n---\n")
for _, name := range properties {
c.printf("\n%s `%s`", strings.Repeat("#", level+2), name)
if required[name] {
c.printf(" (_required_)")
}
c.printf("\n")
c.convertSchema(schema.Properties[name], level+2)
c.printf("\n---\n")
}
}
}
func (c *converter) convertSchemaArray(schema *jsonschema.Schema) {
if items := schemaItems(schema); items != nil {
c.printf("\nItems: %v\n", c.ref(items))
}
}
func (c *converter) convertSchema(schema *jsonschema.Schema, level int) {
if schema.Description != "" {
c.printf("\n%s\n", schema.Description)
}
c.convertSchemaTypes(schema)
c.convertSchemaConstant(schema)
c.convertSchemaEnum(schema)
c.convertSchemaStringValidators(schema)
c.convertSchemaRef(schema)
c.convertSchemaLogic(schema)
c.convertSchemaArray(schema)
c.convertSchemaObject(schema, level)
}
func (c *converter) convertRootSchema(schema *jsonschema.Schema) {
c.collectDefs(schema)
level := 1
if c.multiSchema {
level = 2
}
c.printf("%s %s\n", strings.Repeat("#", level), c.schemaTitle(schema))
c.convertSchema(schema, level)
defs := make([]*jsonschema.Schema, 0, len(c.defs))
for _, def := range c.defs {
defs = append(defs, def)
}
sort.Slice(defs, func(i, j int) bool {
return c.schemaTitle(defs[i]) < c.schemaTitle(defs[j])
})
for _, def := range defs {
if !c.inlineDef(def) {
c.printf("\n%s %s\n", strings.Repeat("#", level+1), c.schemaTitle(def))
c.convertSchema(def, level+1)
}
}
}
func main() {
title := flag.String("title", "", "the top-level title for the output, if any")
idString := flag.String("ids", "", "a comma-separated list of 'id=path' mappings")
flag.Parse()
const rootID = "blob://stdin"
ids := map[string]string{
rootID: "-",
}
if *idString != "" {
for _, idm := range strings.Split(*idString, ",") {
eq := strings.IndexByte(idm, '=')
if eq == -1 {
log.Fatalf("invalid 'id=path' mapping '%v'", idm)
}
id, path := idm[:eq], idm[eq+1:]
if id == "" || path == "" {
log.Fatalf("invalid 'id=path' mapping '%v'", idm)
}
ids[id] = path
if path == "-" {
delete(ids, rootID)
}
}
if len(ids) > 1 && *title == "" {
log.Fatal("-title is required if more than one ID is mapped")
}
}
compiler := jsonschema.NewCompiler()
compiler.ExtractAnnotations = true
compiler.LoadURL = func(s string) (io.ReadCloser, error) {
if path, ok := ids[s]; ok {
if path == "-" {
return os.Stdin, nil
}
return os.Open(path)
}
return jsonschema.LoadURL(s)
}
schemas := make([]*jsonschema.Schema, 0, len(ids))
for id := range ids {
schema, err := compiler.Compile(id)
if err != nil {
log.Fatal(err)
}
schemas = append(schemas, schema)
}
sort.Slice(schemas, func(i, j int) bool { return schemas[i].Location < schemas[j].Location })
if *title != "" {
fprintf(os.Stdout, "# %v\n", *title)
}
for _, schema := range schemas {
fprintf(os.Stdout, "\n")
converter := converter{
multiSchema: len(ids) > 1,
w: os.Stdout,
rootLocation: schema.Location,
defs: map[string]*jsonschema.Schema{},
}
converter.convertRootSchema(schema)
}
}
|
<?php
if (!UNAME) {
header('location:index.php');
exit();
}
$view->header();
$users = $db->getUsers();
$view->listUsers($users);
$view->footer();
|
// Mocks generated by Mockito 5.1.0 from annotations
// in friends_secrets/test/app/modules/login/domain/usecases/get_logged_user_test.dart.
// Do not manually edit this file.
import 'dart:async' as _i5;
import 'package:dartz/dartz.dart' as _i3;
import 'package:friends_secrets/app/modules/login/domain/entities/logged_user_info.dart'
as _i7;
import 'package:friends_secrets/app/modules/login/domain/errors/errors.dart'
as _i6;
import 'package:friends_secrets/app/modules/login/infra/datasource/login_data_source.dart'
as _i2;
import 'package:friends_secrets/app/modules/login/infra/repositories/login_repository.dart'
as _i4;
import 'package:mockito/mockito.dart' as _i1;
// ignore_for_file: type=lint
// ignore_for_file: avoid_redundant_argument_values
// ignore_for_file: avoid_setters_without_getters
// ignore_for_file: comment_references
// ignore_for_file: implementation_imports
// ignore_for_file: invalid_use_of_visible_for_testing_member
// ignore_for_file: prefer_const_constructors
// ignore_for_file: unnecessary_parenthesis
// ignore_for_file: camel_case_types
class _FakeLoginDataSource_0 extends _i1.Fake implements _i2.LoginDataSource {}
class _FakeEither_1<L, R> extends _i1.Fake implements _i3.Either<L, R> {}
/// A class which mocks [LoginRepositoryImpl].
///
/// See the documentation for Mockito's code generation for more information.
class MockLoginRepositoryImpl extends _i1.Mock
implements _i4.LoginRepositoryImpl {
MockLoginRepositoryImpl() {
_i1.throwOnMissingStub(this);
}
@override
_i2.LoginDataSource get dataSource =>
(super.noSuchMethod(Invocation.getter(#dataSource),
returnValue: _FakeLoginDataSource_0()) as _i2.LoginDataSource);
@override
_i5.Future<_i3.Either<_i6.Failure, _i7.LoggedUserInfo>> loggedUser() =>
(super.noSuchMethod(Invocation.method(#loggedUser, []),
returnValue:
Future<_i3.Either<_i6.Failure, _i7.LoggedUserInfo>>.value(
_FakeEither_1<_i6.Failure, _i7.LoggedUserInfo>()))
as _i5.Future<_i3.Either<_i6.Failure, _i7.LoggedUserInfo>>);
@override
_i5.Future<_i3.Either<_i6.Failure, _i3.Unit>> logout() =>
(super.noSuchMethod(Invocation.method(#logout, []),
returnValue: Future<_i3.Either<_i6.Failure, _i3.Unit>>.value(
_FakeEither_1<_i6.Failure, _i3.Unit>()))
as _i5.Future<_i3.Either<_i6.Failure, _i3.Unit>>);
@override
_i5.Future<_i3.Either<_i6.Failure, _i7.LoggedUserInfo>> login() =>
(super.noSuchMethod(Invocation.method(#login, []),
returnValue:
Future<_i3.Either<_i6.Failure, _i7.LoggedUserInfo>>.value(
_FakeEither_1<_i6.Failure, _i7.LoggedUserInfo>()))
as _i5.Future<_i3.Either<_i6.Failure, _i7.LoggedUserInfo>>);
}
/// A class which mocks [LoginRepositoryImpl].
///
/// See the documentation for Mockito's code generation for more information.
class LoginRepositoryMock extends _i1.Mock implements _i4.LoginRepositoryImpl {
@override
_i2.LoginDataSource get dataSource =>
(super.noSuchMethod(Invocation.getter(#dataSource),
returnValue: _FakeLoginDataSource_0()) as _i2.LoginDataSource);
@override
_i5.Future<_i3.Either<_i6.Failure, _i7.LoggedUserInfo>> loggedUser() =>
(super.noSuchMethod(Invocation.method(#loggedUser, []),
returnValue:
Future<_i3.Either<_i6.Failure, _i7.LoggedUserInfo>>.value(
_FakeEither_1<_i6.Failure, _i7.LoggedUserInfo>()))
as _i5.Future<_i3.Either<_i6.Failure, _i7.LoggedUserInfo>>);
@override
_i5.Future<_i3.Either<_i6.Failure, _i3.Unit>> logout() =>
(super.noSuchMethod(Invocation.method(#logout, []),
returnValue: Future<_i3.Either<_i6.Failure, _i3.Unit>>.value(
_FakeEither_1<_i6.Failure, _i3.Unit>()))
as _i5.Future<_i3.Either<_i6.Failure, _i3.Unit>>);
@override
_i5.Future<_i3.Either<_i6.Failure, _i7.LoggedUserInfo>> login() =>
(super.noSuchMethod(Invocation.method(#login, []),
returnValue:
Future<_i3.Either<_i6.Failure, _i7.LoggedUserInfo>>.value(
_FakeEither_1<_i6.Failure, _i7.LoggedUserInfo>()))
as _i5.Future<_i3.Either<_i6.Failure, _i7.LoggedUserInfo>>);
}
|
package notifier
import (
"fmt"
"github.com/PagerDuty/go-pagerduty"
"github.com/elsevier-core-engineering/replicator/logging"
)
// PagerDutyProvider contains the required configuration to send PagerDuty
// notifications.
type PagerDutyProvider struct {
config map[string]string
}
// Name returns the name of the notification endpoint in a lowercase, human
// readable format.
func (p *PagerDutyProvider) Name() string {
return "pagerduty"
}
// NewPagerDutyProvider creates the PagerDuty notification provider.
func NewPagerDutyProvider(c map[string]string) (Notifier, error) {
p := &PagerDutyProvider{
config: c,
}
return p, nil
}
// SendNotification will send a notification to PagerDuty using the Event
// library call to create a new incident.
func (p *PagerDutyProvider) SendNotification(message FailureMessage) {
// Format the message description.
d := fmt.Sprintf("%s %s_%s_%s",
message.AlertUID, message.ClusterIdentifier, message.Reason,
message.ResourceID)
// Setup the PagerDuty event structure which will then be used to trigger
// the event call.
event := pagerduty.Event{
ServiceKey: p.config["PagerDutyServiceKey"],
Type: "trigger",
Description: d,
Details: message,
}
resp, err := pagerduty.CreateEvent(event)
if err != nil {
logging.Error("notifier/pagerduty: an error occurred creating the PagerDuty event: %v", err)
return
}
logging.Info("notifier/pagerduty: incident %s has been triggerd", resp.IncidentKey)
}
|
package testitemrendering.items;
import testitemrendering.TestItemRenderingMod;
import net.minecraft.client.renderer.texture.IIconRegister;
import net.minecraft.creativetab.CreativeTabs;
import net.minecraft.item.Item;
// The Lampshade demonstrates how an Item can be rendered in 3D using a custom renderer implementing IItemRenderer
public class ItemLampshade extends Item {
public ItemLampshade() {
setMaxStackSize(64);
setCreativeTab(CreativeTabs.tabMisc);
setUnlocalizedName("lampShade");
}
// this icon is never used because the custom ItemLampshadeRenderer is invoked instead
@Override
public void registerIcons(IIconRegister iconRegister)
{
itemIcon = iconRegister.registerIcon(TestItemRenderingMod.MODID+":Error");
}
// Lampshade uses the Block texture sheet for rendering instead of the Item texture sheet
@Override
public int getSpriteNumber() {
return 0;
}
}
|
#!/bin/sh
for i in {1..660}
do
fatcat dump -L $i 2>/dev/null | grep '^f' >> files
done
|
;;
;; Licensed to the Apache Software Foundation (ASF) under one or more
;; contributor license agreements. See the NOTICE file distributed with
;; this work for additional information regarding copyright ownership.
;; The ASF licenses this file to You under the Apache License, Version 2.0
;; (the "License"); you may not use this file except in compliance with
;; the License. You may obtain a copy of the License at
;;
;; http://www.apache.org/licenses/LICENSE-2.0
;;
;; Unless required by applicable law or agreed to in writing, software
;; distributed under the License is distributed on an "AS IS" BASIS,
;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;; See the License for the specific language governing permissions and
;; limitations under the License.
;;
(ns bert.util
(:require [clojure.java.io :as io]
[clojure.string :as string]
[cheshire.core :as json]))
(defn break-out-punctuation [s str-match]
(->> (string/split (str s "<punc>") (re-pattern (str "\\" str-match)))
(map #(string/replace % "<punc>" str-match))))
(defn break-out-punctuations [s]
(if-let [target-char (first (re-seq #"[.,?!]" s))]
(break-out-punctuation s target-char)
[s]))
(defn tokenize [s]
(->> (string/split s #"\s+")
(mapcat break-out-punctuations)
(into [])))
(defn pad [tokens pad-item num]
(if (>= (count tokens) num)
tokens
(into tokens (repeat (- num (count tokens)) pad-item))))
(defn get-vocab []
(let [vocab (json/parse-stream (io/reader "data/vocab.json"))]
{:idx->token (get vocab "idx_to_token")
:token->idx (get vocab "token_to_idx")}))
(defn tokens->idxs [token->idx tokens]
(let [unk-idx (get token->idx "[UNK]")]
(mapv #(get token->idx % unk-idx) tokens)))
(defn idxs->tokens [idx->token idxs]
(mapv #(get idx->token %) idxs))
|
package me.starchaser.SQLMamager;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.logging.Level;
import me.starchaser.KPManager;
import me.starchaser.karenprotect;
public abstract class Database {
karenprotect plugin;
Connection connection;
KPManager kpManager;
public int tokens = 0;
public Database(karenprotect instance, KPManager kpManager){
plugin = instance;
this.kpManager = kpManager;
}
public abstract Connection getSQLConnection();
public abstract void load();
public void initialize(){
connection = getSQLConnection();
try{
connection.prepareStatement("SELECT * FROM `KPBlocks`").executeQuery();
PreparedStatement ps = connection.prepareStatement("SELECT * FROM `KPPLayers`");
ResultSet rs = ps.executeQuery();
close(ps,rs);
} catch (SQLException ex) {
plugin.getLogger().log(Level.SEVERE, "KarenProtect²: Unable to retreive connection", ex);
}
}
public void close(PreparedStatement ps,ResultSet rs){
try {
if (ps != null)
ps.close();
if (rs != null)
rs.close();
} catch (SQLException ex) {
ex.printStackTrace();
kpManager.Log(false,true,"Error on close sql connection!");
}
}
}
|
package leveldb
import (
"io/ioutil"
"os"
"path/filepath"
"github.com/motif-foundation/lachesis-base/kvdb"
)
type Producer struct {
datadir string
getCache func(string) int
}
// NewProducer of level db.
func NewProducer(datadir string, getCache func(string) int) kvdb.IterableDBProducer {
return &Producer{
datadir: datadir,
getCache: getCache,
}
}
// Names of existing databases.
func (p *Producer) Names() []string {
var names []string
files, err := ioutil.ReadDir(p.datadir)
if err != nil {
panic(err)
}
for _, f := range files {
if !f.IsDir() {
continue
}
names = append(names, f.Name())
}
return names
}
// OpenDB or create db with name.
func (p *Producer) OpenDB(name string) (kvdb.DropableStore, error) {
path := p.resolvePath(name)
err := os.MkdirAll(path, 0700)
if err != nil {
return nil, err
}
onDrop := func() {
_ = os.RemoveAll(path)
}
db, err := New(path, p.getCache(name), 0, nil, onDrop)
if err != nil {
return nil, err
}
return db, nil
}
func (p *Producer) resolvePath(name string) string {
return filepath.Join(p.datadir, name)
}
|
// <copyright file="ICommunicationExceptionHandler.cs" company="Dark Bond, Inc.">
// Copyright © 2016-2017 - Dark Bond, Inc. All Rights Reserved.
// </copyright>
// <author>Donald Roy Airey</author>
namespace DarkBond
{
using System;
/// <summary>
/// Provides a handler for communication exceptions.
/// </summary>
public interface ICommunicationExceptionHandler
{
/// <summary>
/// Handler for communication exceptions.
/// </summary>
/// <param name="exception">The exception that occurred during the operation.</param>
/// <param name="operation">The operation where the exception occurred.</param>
/// <returns>True indicates the operation should not be retried, false indicates it should.</returns>
bool HandleException(Exception exception, string operation);
}
}
|
#pragma once
namespace cgb
{
/** @brief Shader source information and shader loading options
*
* This information is important especially for shader hot reloading.
*/
enum struct shader_source_info : uint32_t
{
nothing = 0x0000,
/** Shader source is loaded from a file */
from_file = 0x0001,
/** Shader source is loaded from memory (a string most likely) */
from_memory = 0x0002,
/** Load the shader and append a new-line to the source */
append_newline = 0x0004,
};
inline shader_source_info operator| (shader_source_info a, shader_source_info b)
{
typedef std::underlying_type<shader_source_info>::type EnumType;
return static_cast<shader_source_info>(static_cast<EnumType>(a) | static_cast<EnumType>(b));
}
inline shader_source_info operator& (shader_source_info a, shader_source_info b)
{
typedef std::underlying_type<shader_source_info>::type EnumType;
return static_cast<shader_source_info>(static_cast<EnumType>(a) & static_cast<EnumType>(b));
}
inline shader_source_info& operator |= (shader_source_info& a, shader_source_info b)
{
return a = a | b;
}
inline shader_source_info& operator &= (shader_source_info& a, shader_source_info b)
{
return a = a & b;
}
}
|
module T13417 where
-- Amazingly this crashed GHC 8.0.2
data T a = E7
cons7 :: T a -> T b
cons7 E7 = E7
|
#:title: Divine deployment: primer-ssh
#:author: Grove Pyree
#:email: grayarea@protonmail.ch
#:revdate: 2020.03.16
#:revremark: Update SSH primer description
#:created_at: 2019.06.30
D_DPL_NAME='primer-ssh'
D_DPL_DESC='[Install-only] Collection of SSH tasks for initial set-up'
D_DPL_PRIORITY=5000
D_DPL_FLAGS=!
D_DPL_WARNING=
D_DPL_OS=( any )
# Storage variables (defined here merely as table of content)
D_PRIMER_CFG_FILEPATH="$D__DPL_ASSET_DIR/$D_DPL_NAME.cfg.sh"
D_GPG_CMD=
D_SSH_CMD=
d_dpl_check()
{
# Detect gnupg executable
if gpg2 --version &>/dev/null; then D_GPG_CMD=gpg2
elif gpg --version &>/dev/null; then D_GPG_CMD=gpg
elif gpg1 --version &>/dev/null; then D_GPG_CMD=gpg1
else
d__notify -ls -- 'GnuPG executable not found (used for can file encryption)'
return 3
fi
# Detect ssh executable
if ssh -V &>/dev/null; then D_SSH_CMD=ssh
else
d__notify -ls -- 'SSH executable not found'
return 3
fi
# Check if cfg file is available
[ -r "$D_PRIMER_CFG_FILEPATH" -a -f "$D_PRIMER_CFG_FILEPATH" ] || {
d__notify -ls -- 'Configuration file is not readable at:' \
-i- "$D_PRIMER_CFG_FILEPATH"
return 3
}
# Source cfg file
source "$D_PRIMER_CFG_FILEPATH"
# Check if can path is provided
[ -n "$D_SSH_CAN_LOCATION" ] || {
d__notify -ls -- 'Can path not provided ($D_SSH_CAN_LOCATION)'
return 3
}
# Check if can path is for some reason a directory (not acceptable)
[ -d "$D_SSH_CAN_LOCATION" ] && {
d__notify -ls -- 'Path to can file is a directory:' \
-i- "$D_SSH_CAN_LOCATION"
return 3
}
# Check if SSH directory is provided
[ -n "$D_SSH_DIR" ] || {
d__notify -ls -- 'Path to SSH working directory not provided ($D_SSH_DIR)'
return 3
}
# SSH directory checks
[ -d "$D_SSH_DIR" ] && {
# Extract true path of it
local ssh_dir_true_path
ssh_dir_true_path="$( cd -- "$D_SSH_DIR" && pwd -P || exit $?)"
# Check if trick worked
[ $? -eq 0 ] || {
d__notify -ls -- 'SSH working directory is inaccessible'
return 3
}
# Check if SSH working directory is root (not acceptable)
[[ $ssh_dir_true_path == '/' ]] && {
d__notify -ls -- 'SSH working directory is root (not acceptable)'
return 3
}
}
# Check if SSH directory exists and is not a dir (not acceptable)
[ -e "$D_SSH_DIR" -a ! -d "$D_SSH_DIR" ] && {
d__notify -ls -- 'Path to SSH working directory is not a directory:' \
-i- "$D_SSH_DIR"
return 3
}
# Check if parent path of SSH directory is a writable directory
local ssh_dir_parent
ssh_dir_parent="$( dirname -- "$D_SSH_DIR" )"
[ -d "$ssh_dir_parent" -a -w "$ssh_dir_parent" ] || {
d__notify -ls -- 'SSH directory is located in non-writable directory:' \
-i "$ssh_dir_parent"
return 3
}
# Make no assumptions about installation status
return 0
}
d_dpl_install()
{
# Array of available routines
local routines_available=()
# Print status and assemble available routines along the way
printf >&2 '\n%s %s %s\n' \
"${BOLD}${YELLOW}==>${NORMAL}" \
'Working with:'
printf >&2 '%s: %s (%s)\n' \
'GnuPG version ' \
"${BOLD}$( $D_GPG_CMD --version | head -1 )${NORMAL}" \
"$( command -v $D_GPG_CMD )"
printf >&2 '%s: %s (%s)\n' \
'SSH version ' \
"${BOLD}$( $D_SSH_CMD -V 2>&1 | head -1 )${NORMAL}" \
"$( command -v $D_SSH_CMD )"
# Checks on SSH directory
printf >&2 '%s: %s\n' \
'SSH dir ' \
"${BOLD}${D_SSH_DIR}${NORMAL}"
# Check if ssh directory exists and is not empty
if [ -n "$( ls -A "$D_SSH_DIR" 2>/dev/null )" ]; then
# ssh directory exists and is not empty
printf >&2 '%s: %s\n' 'SSH dir status ' 'exists and contains files'
# Assemble tasks for working with it
routines_available+=( d_task__pack_into_can )
routines_available+=( d_task__erase_ssh_dir )
else
# ssh directory either does not exist or empty
[ -d "$D_SSH_DIR" ] \
&& printf >&2 '%s: %s\n' 'SSH dir status ' 'empty' \
|| printf >&2 '%s: %s\n' 'SSH dir status ' \
"does ${BOLD}not${NORMAL} yet exist"
fi
# Checks on can file
printf >&2 '%s: %s\n' \
'Can file ' \
"${BOLD}${D_SSH_CAN_LOCATION}${NORMAL}"
# Check if can file exists
if [ -r "$D_SSH_CAN_LOCATION" ]; then
# Can file exists
printf >&2 '%s: %s\n\n' 'Can status ' 'exists'
# Assemble tasks for working with it
routines_available+=( d_task__extract_from_can )
else
# Can file does not exist
printf >&2 '%s: %s\n\n' 'Can status ' \
"does ${BOLD}not${NORMAL} yet exist"
fi
# Offer to do nothing
routines_available+=( d_task__do_nothing )
# List available tasks
printf >&2 '%s %s %s\n' \
"${BOLD}${YELLOW}==>${NORMAL}" \
'Choose routine to perform:'
local i
for (( i=0; i<${#routines_available[@]}; i++ )); do
# Print title
printf '%s %s\n' \
"${BOLD}($(( i+1 )))${NORMAL}" \
"${BOLD}$( ${routines_available[$i]} --title )${NORMAL}"
# Print description
${routines_available[$i]} --desc
# Print separating newline
printf '\n'
done
# Prompt for choice
local input
printf '%s %s ' 'Your choice' "[1-${#routines_available[@]}]:"
while true; do
read -rsn1 input
if [[ $input =~ ^[0-9]$ ]]; then
(( input>=1 && input<=${#routines_available[@]} )) \
&& { printf '%s' "$input"; (( input-- )); break; }
fi
done
printf '\n'
# Store task title
local task_title
task_title="$( ${routines_available[$input]} --title )"
# Announce selected routine
printf >&2 '\n%s %s\n' "${BOLD}${GREEN}==>${NORMAL}" \
'Commencing task:'
printf >&2 ' %s\n' "$task_title"
# Execute routine
${routines_available[$input]}
# Check if main task has been completed
case $? in
0)
printf >&2 '\n%s %s\n' "${BOLD}${GREEN}==>${NORMAL}" \
'Task completed successfully:'
printf >&2 ' %s\n\n' "$task_title"
return 0
;;
1)
printf >&2 '\n%s %s\n' "${BOLD}${RED}==>${NORMAL}" \
'Task failed:'
printf >&2 ' %s\n\n' "$task_title"
return 1
;;
2)
printf >&2 '\n%s %s\n\n' "${BOLD}${WHITE}==>${NORMAL}" \
'Come back when you grow a pair'
return 2
;;
3)
printf >&2 '\n%s %s\n' "${BOLD}${WHITE}==>${NORMAL}" \
'Task aborted:'
printf >&2 ' %s\n\n' "$task_title"
return 2
;;
esac
}
d_dpl_remove()
{
d__notify -ls -- 'This deployment is install-only'
return 2
}
d_task__extract_from_can()
{
# Special mode: print title and return
[ "$1" = --title ] && {
printf '%s\n' 'Extract all SSH data from can file'
return 0
}
# Special mode: print description and return
[ "$1" = --desc ] && {
cat <<EOF
Pre-erase directory '$D_SSH_DIR';
Decrypt can file and put all its content in '$D_SSH_DIR'.
EOF
return 0
}
# Perform sub-tasks in sequence; bail out on first failure
d_subtask__erase_ssh_dir \
&& d_subtask__extract_from_can
}
d_task__pack_into_can()
{
# Special mode: print title and return
[ "$1" = --title ] && {
printf '%s\n' 'Package all SSH data into can file'
return 0
}
# Special mode: print description and return
[ "$1" = --desc ] && {
cat <<EOF
Encrypt content of '$D_SSH_DIR' into can file;
Overwrite existing can file.
EOF
return 0
}
# Perform sub-task
d_subtask__pack_into_can
}
d_task__erase_ssh_dir()
{
# Special mode: print title and return
[ "$1" = --title ] && {
printf '%s\n' \
'Erase SSH directory'
return 0
}
# Special mode: print description and return
[ "$1" = --desc ] && {
cat <<EOF
Erase directory '$D_SSH_DIR' ${BOLD}WITHOUT BACKING UP${NORMAL}.
EOF
return 0
}
# Perform sub-task
d_subtask__erase_ssh_dir
}
d_task__do_nothing()
{
# Special mode: print title and return
[ "$1" = --title ] && {
printf '%s\n' \
'Do nothing'
return 0
}
# Special mode: print description and return
[ "$1" = --desc ] && {
cat <<EOF
Pussy option.
EOF
return 0
}
# Return status of the main routine (skip status)
return 2
}
d_subtask__erase_ssh_dir()
{
# Check if there is anything to erase
if [ ! -e "$D_SSH_DIR" ]; then
# All good
return 0
fi
# Report start
printf >&2 '\n%s %s\n' \
"${BOLD}${YELLOW}==>${NORMAL}" \
'Erasing SSH directory at:'
printf >&2 ' %s\n' "$D_SSH_DIR"
# Warn user of next passphrase prompt
# (Optionally take description from caller)
local prompt_desc="$1"; shift
[ -n "$prompt_desc" ] || prompt_desc='erasing of SSH dir'
next_up "$prompt_desc" || return $?
rm -rf -- "$D_SSH_DIR" || {
# Report failure and return
printf >&2 '\n%s %s\n' \
"${BOLD}${RED}==>${NORMAL}" \
'Failed to erase SSH directory at'
printf >&2 ' %s\n' "$D_SSH_DIR"
return 1
}
# Report success
printf >&2 '\n%s %s\n' \
"${BOLD}${GREEN}==>${NORMAL}" \
'Successfully erased SSH directory at:'
printf >&2 ' %s\n' "$D_SSH_DIR"
}
d_subtask__extract_from_can()
{
# Report start
printf >&2 '\n%s %s\n' \
"${BOLD}${YELLOW}==>${NORMAL}" \
'Unpacking can file at:'
printf >&2 ' %s\n' "$D_SSH_CAN_LOCATION"
printf >&2 '%s\n' 'to SSH directory at:'
printf >&2 ' %s\n' "$D_SSH_DIR"
# Ensure can file is readable
[ -r "$D_SSH_CAN_LOCATION" ] || {
printf >&2 '\n%s %s\n' \
"${BOLD}${RED}==>${NORMAL}" \
'Can file is missing/unreadable'
return 1
}
# Ensure tar is available
tar --version &>/dev/null || {
printf >&2 '\n%s %s\n' \
"${BOLD}${RED}==>${NORMAL}" \
'tar executable not found'
return 1
}
# Create SSH directory
mkdir -p -- "$D_SSH_DIR" || {
printf >&2 '\n%s %s\n' \
"${BOLD}${RED}==>${NORMAL}" \
'Failed to create fresh SSH directory'
return 1
}
# Change permissions on SSH directory
chmod 0700 "$D_SSH_DIR" || {
printf >&2 '\n%s %s\n' \
"${BOLD}${RED}==>${NORMAL}" \
'Failed to change permissions of SSH directory'
return 1
}
# Storage variables
local gpg_opts=() can_name
# Extract can name
can_name="$( basename -- "$D_SSH_CAN_LOCATION" )"
# Populate gpg options
# Don't cache passphrase (relevant for gpg2 only)
$D_GPG_CMD --no-symkey-cache --version &>/dev/null \
&& gpg_opts+=(--no-symkey-cache)
# Other options
gpg_opts+=( \
--quiet \
--cipher-algo AES256 \
--decrypt "$D_SSH_CAN_LOCATION" \
)
# Warn user of next passphrase prompt
# (Optionally take description from caller)
local prompt_desc="$1"; shift
[ -n "$prompt_desc" ] || prompt_desc='can file passphrase (current)'
next_up "$prompt_desc" || return $?
# Carry out command
$D_GPG_CMD "${gpg_opts[@]}" 2>/dev/null \
| tar -C "$D_SSH_DIR" -xf -
# Check status
[ $? -eq 0 ] || {
printf >&2 '\n%s %s\n' \
"${BOLD}${RED}==>${NORMAL}" \
"Failed to decrypt and untar can file '$can_name'"
return 1
}
# Report success
printf >&2 '\n%s %s\n' \
"${BOLD}${GREEN}==>${NORMAL}" \
'Successfully unpacked can file at:'
printf >&2 ' %s\n' "$D_SSH_CAN_LOCATION"
printf >&2 '%s\n' 'to SSH directory at:'
printf >&2 ' %s\n' "$D_SSH_DIR"
}
d_subtask__pack_into_can()
{
# Report start
printf >&2 '\n%s %s\n' \
"${BOLD}${YELLOW}==>${NORMAL}" \
'Packing content of SSH directory at:'
printf >&2 ' %s\n' "$D_SSH_DIR"
printf >&2 '%s\n' 'into can file at:'
printf >&2 ' %s\n' "$D_SSH_CAN_LOCATION"
# Ensure tar is available
tar --version &>/dev/null || {
printf >&2 '\n%s %s\n' \
"${BOLD}${RED}==>${NORMAL}" \
'tar executable not found'
}
# Ensure sed is available
command -v sed &>/dev/null || {
printf >&2 '\n%s %s\n' \
"${BOLD}${RED}==>${NORMAL}" \
'sed executable not found'
}
# Ensure find is available
command -v find &>/dev/null || {
printf >&2 '\n%s %s\n' \
"${BOLD}${RED}==>${NORMAL}" \
'find executable not found'
}
# Ensure grep is available
command -v grep &>/dev/null || {
printf >&2 '\n%s %s\n' \
"${BOLD}${RED}==>${NORMAL}" \
'grep executable not found'
}
# Check that SSH directory exists and is not empty
[ -d "$D_SSH_DIR" -a -n "$( ls -A "$D_SSH_DIR" )" ] || {
printf >&2 '\n%s %s\n' \
"${BOLD}${RED}==>${NORMAL}" \
'Missing or empty SSH directory at:'
printf >&2 ' %s\n' "$D_SSH_DIR"
return 1
}
# Check if any of the files in SSH directory contain '|' in their path
# (This is a no-go for sed command to be used in further step)
grep -q '|' < <( find "$D_SSH_DIR" -mindepth 1 ) && {
printf >&2 '\n%s %s\n' \
"${BOLD}${RED}==>${NORMAL}" \
"Some files in SSH directory contain '|' (vertical bar) in their path"
printf >&2 '%s\n' 'Unfortunately, that is not supported'
return 1
}
# Check if can file already exists
if [ -e "$D_SSH_CAN_LOCATION" ]; then
# User approval required
# Prompt user if they are willing to overwrite
printf >&2 '\n%s %s\n %s\n' \
"${BOLD}${YELLOW}==>${NORMAL}" \
"Can file already exists at:" \
"$D_SSH_CAN_LOCATION"
# Prompt user
dprompt 'Overwrite?' || {
printf >&2 '\n%s %s\n' \
"${BOLD}${WHITE}==>${NORMAL}" \
"Aborting task"
return 3
}
else
# Can file does not exist, parent directory might not either
# Extract can's parent directory
local can_parent_dir
can_parent_dir=$( dirname -- "$D_SSH_CAN_LOCATION" )
# Check if parent path is a readable directory
[ -d "$can_parent_dir" -a -r "$can_parent_dir" ] || {
# Attempt to create parent path, or kill routine
mkdir -p -- "$can_parent_dir" || {
printf >&2 '\n%s %s\n %s\n' \
"${BOLD}${RED}==>${NORMAL}" \
"Failed to create can file parent directory at:" \
"$can_parent_dir"
return 1
}
}
fi
# Storage variables
local gpg_opts=() can_name
# Extract can name
can_name="$( basename -- "$D_SSH_CAN_LOCATION" )"
# Populate gpg options
# Don't cache passphrase (relevant for gpg2 only)
$D_GPG_CMD --no-symkey-cache --version &>/dev/null \
&& gpg_opts+=(--no-symkey-cache)
# Don't add comments to output
$D_GPG_CMD --no-comments --version &>/dev/null \
&& gpg_opts+=(--no-comments)
$D_GPG_CMD --no-emit-version --version &>/dev/null \
&& gpg_opts+=(--no-emit-version)
# Other options
gpg_opts+=( \
--quiet \
--armor \
--yes \
--cipher-algo AES256 \
--output "$D_SSH_CAN_LOCATION" \
--symmetric \
)
# Warn user of next passphrase prompt
# (Optionally take description from caller)
local prompt_desc="$1"; shift
[ -n "$prompt_desc" ] || prompt_desc='can file passphrase (new)'
next_up "$prompt_desc" || return $?
# Find, clean, tar, and encrypt, all in one pipeline
find "$D_SSH_DIR" -mindepth 1 \
| sed "s|^$D_SSH_DIR/||" \
| xargs tar -C "$D_SSH_DIR" -cf - \
| $D_GPG_CMD "${gpg_opts[@]}" 2>/dev/null
# Check status
[ $? -eq 0 ] || {
printf >&2 '\n%s %s\n' \
"${BOLD}${RED}==>${NORMAL}" \
"Failed to find, clean, tar, and encrypt can file '$can_name'"
return 1
}
# Report success
printf >&2 '\n%s %s\n' \
"${BOLD}${GREEN}==>${NORMAL}" \
'Successfully packed content of SSH directory at:'
printf >&2 ' %s\n' "$D_SSH_DIR"
printf >&2 '%s\n' 'into can file at:'
printf >&2 ' %s\n' "$D_SSH_CAN_LOCATION"
}
next_up()
{
# Extract message from arguments and trim it
local msg="$*"
# Early exit for empty message
[ -n "$msg" ] || return 1
# Warn user of next event
printf >&2 '\n%s %s %s\n' \
"${BOLD}${YELLOW}==>${NORMAL}" \
"${BOLD}NEXT UP:${NORMAL}" \
"${BOLD}${REVERSE} ${msg} ${NORMAL}"
# Read response
local input
printf '%s' "${YELLOW}Press any key to continue (or 'q' to quit)${NORMAL}"
read -rsn1 input && printf >&2 '\n'
# Check response
if [ "$input" = q ]; then
printf >&2 '\n%s %s\n' \
"${BOLD}${WHITE}==>${NORMAL}" \
"Aborting task"
return 3
else
return 0
fi
}
|
/*
* Copyright (c) 2016. Sunghyouk Bae <sunghyouk.bae@gmail.com>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
@file:JvmName("IOStreamx")
package debop4k.core.io
import debop4k.core.collections.*
import debop4k.core.loggerOf
import debop4k.core.utils.EMPTY_STRING
import org.eclipse.collections.impl.list.mutable.FastList
import org.springframework.util.FastByteArrayOutputStream
import java.io.*
import java.nio.ByteBuffer
import java.nio.charset.Charset
private val log = loggerOf("IOStreamx")
const val DEFAULT_BLOCK_SIZE = 1024 * 8
/** Empty [InputStream] */
val emptyInputStream: InputStream
get() = ByteArrayInputStream(emptyByteArray)
/** Empty [OutputStream] */
val emptyOutputStream: FastByteArrayOutputStream
get() = FastByteArrayOutputStream()
@JvmOverloads
fun fastByteArrayOutputStreamOf(blockSize: Int = DEFAULT_BLOCK_SIZE): FastByteArrayOutputStream
= FastByteArrayOutputStream(blockSize)
/**
* [InputStream]의 정보를 읽어, [OutputStream]에 씁니다.
*/
@JvmOverloads
fun InputStream.copy(output: OutputStream, bufferSize: Int = DEFAULT_BUFFER_SIZE): Long {
val buffer = ByteArray(bufferSize)
var readBytes = 0L
do {
val bytes = this.read(buffer, 0, bufferSize)
if (bytes > 0) {
output.write(buffer, 0, bytes)
readBytes += bytes
}
} while (bytes > 0)
// output.flush()
return readBytes
}
@JvmOverloads
fun InputStream.copy(writer: Writer, cs: Charset = Charsets.UTF_8): Long {
return this.reader(cs).copy(writer)
}
/**
* Copy [Reader] data to [Writer]
*/
fun Reader.copy(writer: Writer): Long {
val buffer = CharArray(DEFAULT_BUFFER_SIZE)
var readChars = 0L
do {
val chars = this.read(buffer, 0, DEFAULT_BUFFER_SIZE)
writer.write(buffer, 0, chars)
readChars += chars
} while (chars > 0)
return readChars
}
@JvmOverloads
fun Reader.copy(output: OutputStream, cs: Charset = Charsets.UTF_8): Long {
val writer = OutputStreamWriter(output, cs)
val count = copy(writer)
output.flush()
return count
}
/** [ByteArray] 를 [InputStream]으로 빌드합니다 */
fun ByteArray?.toInputStream(): InputStream {
if (this.isNullOrEmpty)
return emptyInputStream
return BufferedInputStream(ByteArrayInputStream(this!!))
}
/** 문자열을 [InputStream]으로 빌드합니다 */
@JvmOverloads
fun String?.toInputStream(cs: Charset = Charsets.UTF_8): InputStream {
if (this.isNullOrBlank())
return emptyInputStream
return this!!.toByteArray(cs).toInputStream()
}
/** [InputStream]의 정보를 읽어 [OutputStream]으로 빌드합니다 */
@JvmOverloads
fun InputStream?.toOutputStream(blockSize: Int = DEFAULT_BLOCK_SIZE): FastByteArrayOutputStream {
if (this == null)
return emptyOutputStream
val output = fastByteArrayOutputStreamOf(blockSize)
this.copy(output)
return output
}
/** 바이트 배열을 [OutputStream]으로 빌드합니다 */
@JvmOverloads
fun ByteArray?.toOutputStream(blockSize: Int = DEFAULT_BLOCK_SIZE): FastByteArrayOutputStream {
return this?.toInputStream()?.use { it.toOutputStream(blockSize) } ?: emptyOutputStream
}
/** 문자열을 [OutputStream]으로 빌드합니다 */
@JvmOverloads
fun String?.toOutputStream(blockSize: Int = DEFAULT_BLOCK_SIZE,
cs: Charset = Charsets.UTF_8): FastByteArrayOutputStream {
return this?.toByteArray(cs)?.toOutputStream(blockSize) ?: emptyOutputStream
}
fun InputStream?.availableBytes(): ByteArray {
if (this == null)
return emptyByteArray
val result = ByteArray(available())
read(result)
return result
}
/** [InputStream]을 읽어 바이트배열을 만듭니다 */
fun InputStream?.toByteArray(): ByteArray {
return this?.toOutputStream()?.use { it.toByteArrayUnsafe() } ?: emptyByteArray
}
/** [InputStream]을 읽어 [ByteBuffer]를 빌드합니다 */
fun InputStream.toByteBuffer(): ByteBuffer = ByteBuffer.wrap(this.toByteArray())
/** [InputStream]을 읽어 문자열로 빌드합니다 */
@JvmOverloads
fun InputStream?.toString(cs: Charset = Charsets.UTF_8): String {
return this?.toByteArray()?.toString(cs) ?: EMPTY_STRING
}
/**
* NOTE: 되도록 toLineSequence 보다는 [toStringList] 를 사용하세요.
*/
@JvmOverloads
fun InputStream?.toLineSequence(cs: Charset = Charsets.UTF_8): Sequence<String> {
return this?.bufferedReader(cs)?.lineSequence() ?: emptySequence()
}
/** [InputStream]을 읽어 문자열 컬렉션으로 빌드합니다 */
@JvmOverloads
fun InputStream?.toStringList(cs: Charset = Charsets.UTF_8): FastList<String> {
return this?.bufferedReader(cs)?.useLines { it.toFastList() } ?: fastListOf()
}
/**
* NOTE: 되도록 toLineSequence 보다는 [toStringList] 를 사용하세요.
*/
@JvmOverloads
fun ByteArray?.toLineSequence(cs: Charset = Charsets.UTF_8): Sequence<String> {
return this?.toInputStream()?.toLineSequence(cs) ?: emptySequence()
}
/**
* 바이트 배열을 문자열 컬렉션으로 빌드합니다
* NOTE: 되도록 toLineSequence 보다는 [toStringList] 를 사용하세요.
*/
@JvmOverloads
fun ByteArray?.toStringList(cs: Charset = Charsets.UTF_8): FastList<String> {
return this?.toInputStream()?.use { it.toStringList(cs) } ?: fastListOf()
}
|
create or replace FUNCTION f (n INTEGER)
RETURN INTEGER
IS
BEGIN
RETURN n *n;
END;
|
#ifndef NMOS_SCHEMAS_API_H
#define NMOS_SCHEMAS_API_H
#include "cpprest/api_router.h"
namespace slog
{
class base_gate;
}
// This is an experimental extension to expose the embedded JSON Schemas for the NMOS specifications
namespace nmos
{
namespace experimental
{
web::http::experimental::listener::api_router make_schemas_api(slog::base_gate& gate);
}
}
#endif
|
/**
* @param {{property: string}} object
* @param {string} objectProperty property of the object
* @returns {[string]} an array with the values from the requested property
*/
export function getProperty(object, objectProperty) {
let array = []
if (!object) {
throw new Error(`object is ${object}`)
} else if (object.constructor !== Object) {
throw new Error(
`expected ${
object.constructor === Array ? `[${object}]` : `${object}`
} to have a constructor of type Object but is of type ${
object.constructor.name
}`
)
} else if (Object.entries(object).length < 1) {
throw new Error("the object is empty")
} else if (objectProperty === undefined || objectProperty === null) {
throw new Error(`objectProperty is ${objectProperty}`)
} else if (objectProperty.constructor !== String) {
throw new Error(
`expected ${
objectProperty.constructor === Array
? `[${objectProperty}]`
: `${objectProperty}`
} to be a string but is of type ${objectProperty.constructor.name}`
)
} else if (objectProperty.trim() === "") {
throw new Error("argument objectProperty is empty")
} else {
for (const property in object) {
if (typeof object[property] === "string") {
if (property === objectProperty) {
array = [...array, object[property]]
}
} else {
throw new Error(
`expected value ${object[property]}` +
` of property ${property} to be a string` +
` but is of type ${typeof object[property]}`
)
}
}
}
return array
}
/**
* @param {string} selector get element node by selector
* @param {string} property to match a property of the getBoundingClientRect method
*
* @returns position of the element relative to the viewport
*/
export function getElementPosition(selector, property) {
let position = 0
const element =
selector === "documentElement"
? document.documentElement
: document.querySelector(selector)
switch (property) {
case "top":
position = element.getBoundingClientRect().top
break
case "left":
position = element.getBoundingClientRect().left
break
case "right":
position = element.getBoundingClientRect().right
break
case "bottom":
position = element.getBoundingClientRect().bottom
break
default:
throw new Error(
property
? `${property} is not a valid property for getBoundingClientRect`
: `argument for the property of getBoundingClientRect was not given`
)
}
// console.log(position)
return position
}
export function getElementDimensions(selector, property) {
let length = 0
const element = document.querySelector(selector)
switch (property) {
case "width":
length = element.width
break
case "height":
length = element.height
break
default:
throw new Error(
property
? `${property} is not a valid property of the ${HTMLElement.name} interface`
: `argument for the property of "${selector}" was not given`
)
}
// console.log(position)
return length
}
/**
* @param {string} backgroundColor must be in hex format
* @param {[string]} colors array of color values (must be in hex format) to compare with the background color
*
* @returns foreground color with highest contrast ratio to background color
*/
export function getForegroundColor(backgroundColor, colors) {
let foregroundColor = "#000"
let highest = 0
colors.forEach(color => {
const ratio = checkContrastRatio(backgroundColor, color)
if (ratio > highest) {
highest = ratio
foregroundColor = color
}
})
return foregroundColor
}
/**
* @param {string} backgroundColor must be in hex format
* @param {[string]} colors array of color values (must be in hex format) to compare with the background color
* @param {boolean} random get a random color
*
* @returns {string} a random color with an acceptable contrast ratio to the background color
*/
export function getRandomForegroundColor(backgroundColor, colors) {
let foregroundColors = []
colors.forEach(color => {
const ratio = checkContrastRatio(backgroundColor, color)
if (ratio) {
foregroundColors = [...foregroundColors, color]
}
})
return foregroundColors[getRandomNumber(foregroundColors.length)]
}
|
<?php
namespace Jetimob\Juno\Lib\Http\Document;
use Jetimob\Juno\Lib\Http\Response;
/**
* Class DocumentFileUploadResponse
* @package Jetimob\Juno\Lib\Http\Document
* @see https://dev.juno.com.br/api/v2#operation/uploadDocument
*/
class DocumentFileUploadResponse extends Response
{
/** @var string $id <ObjectId> */
public string $id;
public string $type;
public string $description;
/**
* @var string $approvalStatus
* Possible values:
* - AWAITING
* - VERIFYING
* - APPROVED
* - REJECTED
*/
public string $approvalStatus;
public ?string $rejectionReason;
public ?string $details;
}
|
# Updating routes? Please update the readme as well.
Rails.application.routes.draw do
get 'static_pages/home', :as => 'about'
resources :categories, only: [:show]
resources :items, except: [:destroy] do
resources :item_comments
member do
post :toggle
put "like", to: "items#upvote"
put "dislike", to: "items#downvote"
# post :vote, to: 'user_item_votes#create'
# delete :vote, to: 'user_item_votes#destroy'
end
end
root to: 'items#index'
resources :users, except: [:index]
resources :user_sessions, only: [:new, :create, :destroy]
get 'login' => 'user_sessions#new', as: :login
match 'logout' => 'user_sessions#destroy', as: :logout, via: [:get, :post]
namespace :admin do
root to: 'items#index'
resources :items
end
end
|
import { axiosInstance } from './api'
import { Health } from '../types/api'
document.getElementById('ping')!.addEventListener('click', () => {
axiosInstance.get<Health>('/ping').then(({ data }) => {
const counter = document.getElementById('count')!
counter.innerHTML = `${data.count}`
})
})
// webpack-hot-middleware の HMR を有効にする
if (module.hot) {
module.hot.accept()
}
|
// <copyright file="TemplateDirectiveTest.cs" company="Oleg Sych">
// Copyright © Oleg Sych. All Rights Reserved.
// </copyright>
namespace T4Toolbox.TemplateAnalysis
{
using System;
using System.ComponentModel;
using System.Linq;
using Microsoft.VisualStudio.Text;
using NSubstitute;
using Xunit;
public static class TemplateDirectiveTest
{
[Fact]
public static void TemplateDirectiveIsSubclassOfDirective()
{
Assert.True(typeof(TemplateDirective).IsSubclassOf(typeof(Directive)));
}
[Fact]
public static void TemplateDirectiveIsSealed()
{
Assert.True(typeof(TemplateDirective).IsSealed);
}
[Fact]
public static void AcceptCallsVisitTemplateDirectiveMethodOfSyntaxNodeVisitor()
{
var visitor = Substitute.For<SyntaxNodeVisitor>();
var directive = new TemplateDirective(new DirectiveBlockStart(0), new DirectiveName(4, "template"), new Attribute[0], new BlockEnd(24));
directive.Accept(visitor);
visitor.Received().VisitTemplateDirective(directive);
}
[Fact]
public static void CompilerOptionsReturnsValueOfCompilerOptionsAttribute()
{
var directive = new TemplateDirective(
new DirectiveBlockStart(0),
new DirectiveName(4, "template"),
new[] { new Attribute(new AttributeName(14, "compilerOptions"), new Equals(18), new DoubleQuote(19), new AttributeValue(20, "optimizer+"), new DoubleQuote(22)) },
new BlockEnd(24));
Assert.Equal("optimizer+", directive.CompilerOptions);
}
[Fact]
public static void CompilerOptionsReturnsEmptyStringWhenCompilerOptionsAttributeIsNotSpecified()
{
var directive = new TemplateDirective(new DirectiveBlockStart(0), new DirectiveName(4, "template"), new Attribute[0], new BlockEnd(24));
Assert.Equal(string.Empty, directive.CompilerOptions);
}
[Fact]
public static void CultureReturnsValueOfCultureAttribute()
{
var directive = new TemplateDirective(
new DirectiveBlockStart(0),
new DirectiveName(4, "template"),
new[] { new Attribute(new AttributeName(14, "culture"), new Equals(18), new DoubleQuote(19), new AttributeValue(20, "en-US"), new DoubleQuote(22)) },
new BlockEnd(24));
Assert.Equal("en-US", directive.Culture);
}
[Fact]
public static void CultureReturnsEmptyStringWhenCultureAttributeIsNotSpecified()
{
var directive = new TemplateDirective(new DirectiveBlockStart(0), new DirectiveName(4, "template"), new Attribute[0], new BlockEnd(24));
Assert.Equal(string.Empty, directive.Culture);
}
[Fact]
public static void CultureProvidesMetadataAboutWellKnownValues()
{
var attributeDescriptor = new AttributeDescriptor(TypeDescriptor.GetProperties(typeof(TemplateDirective))["Culture"]);
Assert.NotEqual(0, attributeDescriptor.Values.Count);
ValueDescriptor valueDescriptor = attributeDescriptor.Values["en-US"];
Assert.Equal("English (United States)", valueDescriptor.Description);
}
[Fact]
public static void DebugReturnsValueOfDebugAttribute()
{
var directive = new TemplateDirective(
new DirectiveBlockStart(0),
new DirectiveName(4, "template"),
new[] { new Attribute(new AttributeName(14, "debug"), new Equals(18), new DoubleQuote(19), new AttributeValue(20, "true"), new DoubleQuote(22)) },
new BlockEnd(24));
Assert.Equal("true", directive.Debug);
}
[Fact]
public static void DebugReturnsEmptyStringWhenDebugAttributeIsNotSpecified()
{
var directive = new TemplateDirective(new DirectiveBlockStart(0), new DirectiveName(4, "template"), new Attribute[0], new BlockEnd(24));
Assert.Equal(string.Empty, directive.Debug);
}
[Fact]
public static void DebugAttributeProvidesMetadataAboutWellKnownValues()
{
var attributeDescriptor = new AttributeDescriptor(TypeDescriptor.GetProperties(typeof(TemplateDirective))["Debug"]);
Assert.Equal(2, attributeDescriptor.Values.Count);
VerifyAttributeValueDescriptor(attributeDescriptor, "false");
VerifyAttributeValueDescriptor(attributeDescriptor, "true");
}
#region GetDescription
[Fact]
public static void GetDescriptionReturnsDescriptionOfDirective()
{
var directive = new TemplateDirective(new DirectiveBlockStart(0), new DirectiveName(4, "template"), new Attribute[0], new BlockEnd(24));
string description;
Span applicableTo;
Assert.True(directive.TryGetDescription(4, out description, out applicableTo));
Assert.Contains("Specifies how the template should be processed", description, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public static void GetDescriptionReturnsDescriptionOfCompilerOptionsAttribute()
{
var directive = new TemplateDirective(
new DirectiveBlockStart(0),
new DirectiveName(4, "template"),
new[] { new Attribute(new AttributeName(14, "compilerOptions"), new Equals(18), new DoubleQuote(19), new AttributeValue(20, "optimizer+"), new DoubleQuote(22)) },
new BlockEnd(24));
string description;
Span applicableTo;
Assert.True(directive.TryGetDescription(14, out description, out applicableTo));
Assert.Contains("compiler options", description, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public static void GetDescriptionReturnsDescriptionOfCultureAttribute()
{
var directive = new TemplateDirective(
new DirectiveBlockStart(0),
new DirectiveName(4, "template"),
new[] { new Attribute(new AttributeName(14, "culture"), new Equals(18), new DoubleQuote(19), new AttributeValue(20, "en-US"), new DoubleQuote(22)) },
new BlockEnd(24));
string description;
Span applicableTo;
Assert.True(directive.TryGetDescription(14, out description, out applicableTo));
Assert.Contains("Culture", description, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public static void GetDescriptionReturnsDescriptionOfDebugAttribute()
{
var directive = new TemplateDirective(
new DirectiveBlockStart(0),
new DirectiveName(4, "template"),
new[] { new Attribute(new AttributeName(14, "debug"), new Equals(18), new DoubleQuote(19), new AttributeValue(20, "true"), new DoubleQuote(22)) },
new BlockEnd(24));
string description;
Span applicableTo;
Assert.True(directive.TryGetDescription(14, out description, out applicableTo));
Assert.Contains("debugging", description, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public static void GetDescriptionReturnsDescriptionOfHostSpecificAttribute()
{
var directive = new TemplateDirective(
new DirectiveBlockStart(0),
new DirectiveName(4, "template"),
new[] { new Attribute(new AttributeName(14, "hostspecific"), new Equals(18), new DoubleQuote(19), new AttributeValue(20, "true"), new DoubleQuote(22)) },
new BlockEnd(24));
string description;
Span applicableTo;
Assert.True(directive.TryGetDescription(14, out description, out applicableTo));
Assert.Contains("host", description, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public static void GetDescriptionReturnsDescriptionOfInheritsAttribute()
{
var directive = new TemplateDirective(
new DirectiveBlockStart(0),
new DirectiveName(4, "template"),
new[] { new Attribute(new AttributeName(14, "inherits"), new Equals(18), new DoubleQuote(19), new AttributeValue(20, "TextTransformation"), new DoubleQuote(22)) },
new BlockEnd(24));
string description;
Span applicableTo;
Assert.True(directive.TryGetDescription(14, out description, out applicableTo));
Assert.Contains("base class", description, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public static void GetDescriptionReturnsDescriptionOfLanguageAttribute()
{
var directive = new TemplateDirective(
new DirectiveBlockStart(0),
new DirectiveName(4, "template"),
new[] { new Attribute(new AttributeName(13, "language"), new Equals(21), new DoubleQuote(22), new AttributeValue(23, "C#"), new DoubleQuote(22)) },
new BlockEnd(24));
string description;
Span applicableTo;
Assert.True(directive.TryGetDescription(13, out description, out applicableTo));
Assert.Contains("language", description, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public static void GetDescriptionReturnsDescriptionOfLinePragmasAttribute()
{
var directive = new TemplateDirective(
new DirectiveBlockStart(0),
new DirectiveName(4, "template"),
new[] { new Attribute(new AttributeName(13, "linePragmas"), new Equals(21), new DoubleQuote(22), new AttributeValue(23, "true"), new DoubleQuote(22)) },
new BlockEnd(24));
string description;
Span applicableTo;
Assert.True(directive.TryGetDescription(13, out description, out applicableTo));
Assert.Contains("line numbers", description, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public static void GetDescriptionReturnsDescriptionOfVisibilityAttribute()
{
var directive = new TemplateDirective(
new DirectiveBlockStart(0),
new DirectiveName(4, "template"),
new[] { new Attribute(new AttributeName(13, "visibility"), new Equals(21), new DoubleQuote(22), new AttributeValue(23, "internal"), new DoubleQuote(22)) },
new BlockEnd(24));
string description;
Span applicableTo;
Assert.True(directive.TryGetDescription(14, out description, out applicableTo));
Assert.Contains("visibility", description, StringComparison.OrdinalIgnoreCase);
}
#endregion
[Fact]
public static void InheritsAttributeReturnsValueOfInheritsAttribute()
{
var directive = new TemplateDirective(
new DirectiveBlockStart(0),
new DirectiveName(4, "template"),
new[] { new Attribute(new AttributeName(14, "inherits"), new Equals(18), new DoubleQuote(19), new AttributeValue(20, "TextTransformation"), new DoubleQuote(22)) },
new BlockEnd(24));
Assert.Equal("TextTransformation", directive.Inherits);
}
[Fact]
public static void InheritsReturnsEmptyStringWhenInheritsAttributeIsNotSpecified()
{
var directive = new TemplateDirective(new DirectiveBlockStart(0), new DirectiveName(4, "template"), new Attribute[0], new BlockEnd(24));
Assert.Equal(string.Empty, directive.Inherits);
}
[Fact]
public static void HostSpecificReturnsValueOfHostSpecificAttribute()
{
var directive = new TemplateDirective(
new DirectiveBlockStart(0),
new DirectiveName(4, "template"),
new[] { new Attribute(new AttributeName(14, "hostspecific"), new Equals(18), new DoubleQuote(19), new AttributeValue(20, "true"), new DoubleQuote(22)) },
new BlockEnd(24));
Assert.Equal("true", directive.HostSpecific);
}
[Fact]
public static void HostSpecificReturnsEmptyStringWhenHostSpecificAttributeIsNotSpecified()
{
var directive = new TemplateDirective(new DirectiveBlockStart(0), new DirectiveName(4, "template"), new Attribute[0], new BlockEnd(24));
Assert.Equal(string.Empty, directive.HostSpecific);
}
[Fact]
public static void HostSpecificProvidesMetadataAboutWellKnownValues()
{
var attributeDescriptor = new AttributeDescriptor(TypeDescriptor.GetProperties(typeof(TemplateDirective))["HostSpecific"]);
Assert.Equal(3, attributeDescriptor.Values.Count);
VerifyAttributeValueDescriptor(attributeDescriptor, "false");
VerifyAttributeValueDescriptor(attributeDescriptor, "true");
VerifyAttributeValueDescriptor(attributeDescriptor, "trueFromBase");
}
[Fact]
public static void LanguageReturnsValueOfLanguageAttribute()
{
var directive = new TemplateDirective(
new DirectiveBlockStart(0),
new DirectiveName(4, "template"),
new[] { new Attribute(new AttributeName(13, "language"), new Equals(21), new DoubleQuote(22), new AttributeValue(23, "C#"), new DoubleQuote(22)) },
new BlockEnd(24));
Assert.Equal("C#", directive.Language);
}
[Fact]
public static void LanguageReturnsEmptyStringWhenLanguageAttributeIsNotSpecified()
{
var directive = new TemplateDirective(new DirectiveBlockStart(0), new DirectiveName(4, "template"), new Attribute[0], new BlockEnd(24));
Assert.Equal(string.Empty, directive.Language);
}
[Fact]
public static void LanguageProvidesMetadataAboutWellKnownValues()
{
var attributeDescriptor = new AttributeDescriptor(TypeDescriptor.GetProperties(typeof(TemplateDirective))["Language"]);
Assert.Equal(2, attributeDescriptor.Values.Count);
VerifyAttributeValueDescriptor(attributeDescriptor, "C#");
VerifyAttributeValueDescriptor(attributeDescriptor, "VB");
}
[Fact]
public static void LinePragmasReturnsValueOfLinePragmasAttribute()
{
var directive = new TemplateDirective(
new DirectiveBlockStart(0),
new DirectiveName(4, "template"),
new[] { new Attribute(new AttributeName(13, "linePragmas"), new Equals(21), new DoubleQuote(22), new AttributeValue(23, "true"), new DoubleQuote(22)) },
new BlockEnd(24));
Assert.Equal("true", directive.LinePragmas);
}
[Fact]
public static void LinePragmasReturnsEmptyStringWhenLinePragmasAttributeIsNotSpecified()
{
var directive = new TemplateDirective(new DirectiveBlockStart(0), new DirectiveName(4, "template"), new Attribute[0], new BlockEnd(24));
Assert.Equal(string.Empty, directive.LinePragmas);
}
[Fact]
public static void LinePragmasProvidesMetadataAboutWellKnownValues()
{
var attributeDescriptor = new AttributeDescriptor(TypeDescriptor.GetProperties(typeof(TemplateDirective))["LinePragmas"]);
Assert.Equal(2, attributeDescriptor.Values.Count);
VerifyAttributeValueDescriptor(attributeDescriptor, "false");
VerifyAttributeValueDescriptor(attributeDescriptor, "true");
}
[Fact]
public static void VisibilityReturnsValueOfVisibilityAttribute()
{
var directive = new TemplateDirective(
new DirectiveBlockStart(0),
new DirectiveName(4, "template"),
new[] { new Attribute(new AttributeName(13, "visibility"), new Equals(21), new DoubleQuote(22), new AttributeValue(23, "internal"), new DoubleQuote(22)) },
new BlockEnd(24));
Assert.Equal("internal", directive.Visibility);
}
[Fact]
public static void VisibilityReturnsEmptyStringWhenVisibilityAttributeIsNotSpecified()
{
var directive = new TemplateDirective(new DirectiveBlockStart(0), new DirectiveName(4, "template"), new Attribute[0], new BlockEnd(24));
Assert.Equal(string.Empty, directive.Visibility);
}
[Fact]
public static void VisibilityProvidesMetadataAboutWellKnownValues()
{
var attributeDescriptor = new AttributeDescriptor(TypeDescriptor.GetProperties(typeof(TemplateDirective))["Visibility"]);
Assert.Equal(2, attributeDescriptor.Values.Count);
VerifyAttributeValueDescriptor(attributeDescriptor, "public");
VerifyAttributeValueDescriptor(attributeDescriptor, "internal");
}
[Fact]
public static void ValidateDoesNotReturnErrorsWhenNoAttributesAreSpecified()
{
var directive = new TemplateDirective(new DirectiveBlockStart(0), new DirectiveName(4, "template"), new Attribute[0], new BlockEnd(24));
Assert.False(directive.Validate().Any());
}
private static void VerifyAttributeValueDescriptor(AttributeDescriptor attribute, string valueName)
{
ValueDescriptor value = attribute.Values[valueName];
Assert.False(string.IsNullOrWhiteSpace(value.Description), valueName + " attribute value does not have a description");
}
}
}
|
<?php
declare(strict_types=1);
namespace TheImp\Exception\Http;
use Exception as PhpException;
use Throwable;
/** 302 Found (Previously "Moved temporarily") */
class FoundException extends PhpException
{
/**
* @var int
*/
protected $code = 302;
protected string $url;
/**
* @param string $url
* @param string $message
* @param Throwable|null $previous
*/
public function __construct(string $url, $message = "", Throwable $previous = null)
{
$this->url = $url;
parent::__construct($message, 301, $previous);
}
public function getUrl(): string
{
return $this->url;
}
}
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Gandalan.Client.Contracts.UIServices;
namespace Gandalan.Client.Contracts.AppServices
{
/// <summary>
/// Check for application updates
/// </summary>
public interface IApplicationUpdateService
{
/// <summary>
/// Progress in percent
/// </summary>
int Progress { get; }
/// <summary>
/// Info for user
/// </summary>
string StatusText { get; }
/// <summary>
/// Wether or not there are updates available. Should return
/// as quick as possible and not display progress
/// </summary>
/// <returns>status bool</returns>
Task<bool> HasUpdates();
/// <summary>
/// Performs the actual update (with progress indicator)
/// </summary>
/// <returns>void</returns>
Task UpdateApplication();
}
}
|
#pragma once
#include <stdint.h>
#include <sys/time.h>
#include <sys/event.h>
#ifndef MAP_TYPE
#define MAP_TYPE 0x0f
#endif
typedef struct SceKernelModuleSegmentInfo
{
void *address;
uint32_t size;
int32_t prot;
}
SceKernelModuleSegmentInfo;
typedef struct SceKernelModuleInfo
{
size_t size;
char name[256];
SceKernelModuleSegmentInfo segmentInfo[4];
uint32_t segmentCount;
uint8_t fingerprint[20];
}
SceKernelModuleInfo;
typedef struct timeval SceKernelTimeval;
typedef unsigned int SceKernelUseconds;
typedef void *ScePthread;
typedef void *ScePthreadAttr;
typedef void *ScePthreadMutex;
typedef void *ScePthreadMutexattr;
typedef uint32_t SceKernelModule;
typedef uint64_t SceKernelEqueue;
typedef struct kevent SceKernelEvent;
|
#!/bin/bash
##
## =============================================================================================
## IBM Confidential
## © Copyright IBM Corp. 2021-
## The source code for this program is not published or otherwise divested of its trade secrets,
## irrespective of what has been deposited with the U.S. Copyright Office.
## =============================================================================================
##
#VERSION="20210811_1628"
################################################################################
# This is a simple script that collects logs and html files from the server(s)
# indicated on the arguments, then it pulls it into the current dir.
################################################################################
ARGUMENTS=""
PREPATH="logs-ocs-ci" #Holds the initial part of the path to follow.
COMMON=0 #When the value is "1" ignore the use of month_day directories.
DAY="" # contains the day part of the date used to gather files.
FINDARG="" # used to indicate the argument to find.
FOUNDARG="" # auxilary indicates if an argument has been found.
GSADIR="/tucgsa-h2/08/cpratt/OCSReports/OCS48/" #for now let's use this dir.
GETDIR="" # used through the script as a generic get directory/path name.
HTMLF="" # used only to identify HTML files.
LOGPATH="" # Path to the logs from where we will select the logs to transfer.
RHOST="" # has the IP or name of the remote host.
MDY="" # holds a date on the form month/Day/year.
MDYF="" # holds a date to be used on the name of a dir.
MONTH="" # holds the name of the month to be used for the selection of files.
MONTHN="" # Holds the name of a month abreviated to three letters.
MONTHS=(Pad Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec) #month names table.
NEXTVAR="" # used to find the next value after identifying the argument.
TARGET="$(pwd)" #Holds the target directory for the files to be stored.
TRSFRGSA="1" # if this variable is 0 then do not transfer anything to GSA
USER="" # the user name.
VPATH="4.8" #Holds the version path it defaults to 4.8 but it can use 4.6 to 4.9.
VRBS=0 #for debug
################################################################################
# This is the find arg function.
# if the argument to be found, contained in the FIND variable is found, then it
# is passed through FOUNDARG
################################################################################
function findarg(){
if [[ VRBS -eq 1 ]]; then
echo -e "in findarg"
echo -e "Argument(s) to find $1"
fi
FOUNDARG=1
case $1 in
"-c")
if [[ VRBS -eq 1 ]]; then
echo -e "found $1"
fi
COMMON=1
;;
"-d")
if [[ VRBS -eq 1 ]]; then
echo -e "found $1"
fi
NEXTVAR="mnthdy"
;;
"-h")
if [[ VRBS -eq 1 ]]; then
echo -e "found $1"
fi
help
exit 0
;;
"-p")
if [[ VRBS -eq 1 ]]; then
echo -e "found $1"
fi
NEXTVAR="gtdr"
;;
"-s")
if [[ VRBS -eq 1 ]]; then
echo -e "found $1"
fi
NEXTVAR="hst"
;;
"-t")
if [[ VRBS -eq 1 ]]; then
echo -e "found $1"
fi
NEXTVAR="trgt"
;;
"-u")
if [[ VRBS -eq 1 ]]; then
echo -e "found $1"
fi
NEXTVAR="rd"
;;
"-v")
if [[ VRBS -eq 1 ]]; then
echo -e "found $1"
fi
NEXTVAR="vrsn"
;;
*)
if [[ VRBS -eq 1 ]]; then
echo -e "Not found $1"
fi
NEXTVAR=""
FOUNDARG=0
;;
esac
}
################################################################################
# This is the help function. is self explanatory.
################################################################################
function help(){
clear
echo -e "This script helps to copy files from a remote server/host to"
echo -e "your local machine. The files copied are log-files (.log) and"
echo -e "report files (.html)"
echo -e "The GSAID and SSHPASS environment variables need to be set to"
echo -e "the authorized user and password. If they are not present the"
echo -e "files will not be transferred to GSA using sftp.\n"
echo -e "you can change the behavior of the script using the next"
echo -e "arguments:"
echo -e " -c Uses the same directory for all found files. The files are"
echo -e " not distributed in subdirectories representing the month"
echo -e " and day they were created."
echo -e " -d The argument following must be a date on the form m/d/yyyy"
echo -e " or mm/dd/yyyy. This is the date of the files to extract."
echo -e " Sometimes files from the previous day may appear in this"
echo -e " directory."
echo -e " -h Displays this help."
echo -e " -p The following argument is the directory in the remote host"
echo -e " that contains the subdirectory with the running version."
echo -e " -s Following this modifier must be the name or the IP address"
echo -e " of the remote host/server from where the data will be"
echo -e " extracted."
echo -e " -t The argument following is the target directory in the"
echo -e " local machine. If none is given, then the current directory"
echo -e " is used."
echo -e " -u The argument following this modifier must be the username to"
echo -e " be used. It is assumed that this user has a key that allows"
echo -e " a server/host passwordless connection. If this is not the"
echo -e " case, a password will be prompted multiple times."
echo -e " -v The following argument must contain the OCS version of"
echo -e " interest. Not providing it will default to version 4.8."
echo -e "==============================================================="
}
function verifytarget(){
SPCHAR="_"
if [ $COMMON == 1 ]; then
if [ $VRBS -eq 1 ]; then
echo -e "verifytarget: TARGET=$TARGET"
fi
else
case "$DAY" in
" 1" | " 2" | " 3" | " 4" | " 5" | " 6" | " 7" | " 8" | " 9")
DAY=${DAY:1}
;;
*)
;;
esac
if [ $VRBS -eq 1 ]; then
echo -e "verifytarget: TARGET=$TARGET"
echo -e "verifytarget: dir for date = $MONTHN$SPCHAR$DAY"
fi
TARGET="$TARGET/$MONTHN$SPCHAR$DAY"
fi
# Verify the existence of the target directory, if it doesn't exist,
# Create de directory where is supposed to be.
if [ ! -d $TARGET ]; then
mkdir -p $TARGET
fi
if [ $VRBS -eq 1 ]; then
echo -e "\nverifytarget: TARGET verified is shown below:"
echo -e "verifytarget: path => $TARGET"
fi
return 0;
}
################################################################################
# Above this line I have the functions and all the stuff needed for the script
# to run. I do this because if the script grows it can be hard to find where
# things start.
# Below this comment is the entry and main code.
################################################################################
################################################################################
# MAIN
################################################################################
###
# check to see if we have arguments.
###
if [ $# -eq 0 ]; then
help
exit 0
fi
###
# We got arguments start procesing them.
###
while (("$#")); do
if [ $VRBS -eq 1 ]; then
echo -e "$1"
echo -e "$#"
fi
findarg $1
#After finding the argument needed, we look, if applicable for its value
case $NEXTVAR in
"gtdr")
shift
PREPATH="$1/$PREPATH"
LOGPATH="$1/logs-cron"
if [ $VRBS -eq 1 ]; then
echo -e "main: PREPATH=$PREPATH."
echo -e "main: LOGPATH=$LOGPATH."
fi
;;
"mnthdy")
shift
MDY=$1
MONTH=$(echo $1 | awk 'BEGIN { FS = "/" } ; { print $1 }')
DAY=$(echo $1 | awk 'BEGIN { FS = "/" } ; { print $2 }')
if [ "$DAY" -lt "10" ]; then
DAY=" "$DAY
fi
MONTHN=${MONTHS[$MONTH]}
if [ $VRBS -eq 1 ]; then
echo -e "main: MONTH= $MONTH ($MONTHN)"
echo -e "main: DAY= $DAY"
fi
;;
"rd")
shift
USER="$1"
if [ $VRBS -eq 1 ]; then
echo -e "main: USER=$USER."
fi
;;
"trgt")
shift
TARGET=$1
if [ $VRBS -eq 1 ]; then
echo -e "main: TARGET=$TARGET."
fi
;;
"vrsn")
shift
VPATH="$1"
if [ $VRBS -eq 1 ]; then
echo -e "main: VPATH=$VPATH."
fi
;;
"hst")
shift
RHOST="$1"
if [ $VRBS -eq 1 ]; then
echo -e "main: RHOST=$RHOST."
fi
;;
*)
if [ $VRBS -eq 1 ]; then
echo -e "main: no double shift needed."
fi
;;
esac
NEXTVAR=""
shift
done
if [ $VRBS -eq 1 ]; then
echo
echo -e "the files path are:"
echo -e "~/$PREPATH/$VPATH for HTML reports"
echo -e "~/$LOGPATH for log files"
echo -e "full ID: $USER@$RHOST"
fi
echo -e "extracting reports from $USER@$RHOST:/home/test/$PREPATH/$VPATH"
echo -e "extracting log files from $USER@$RHOST:/home/test/$LOGPATH"
echo -e "Looking for files generated on: $MONTHN / $DAY"
echo -e "Target directory/folder: $TARGET\n"
#start looking for the candidate files in the server-path indicated.
HTMLF=$(echo $(ssh -q -t $USER@$RHOST <<ENDSSH
ls -ltr ~/$PREPATH/$VPATH/*.html | grep "$MONTHN $DAY"
ENDSSH
) | awk -F/home '{
i=2
while (i <= NF) {
print $i
i=i+1 }
}')
LOGF=$(echo $(ssh -q -t $USER@$RHOST <<ENDSSH
ls -ltr ~/$LOGPATH/test*.log | grep "$MONTHN $DAY"
ENDSSH
) | awk -F/home '{
i=2
while (i <= NF) {
print $i
i=i+1 }
}')
echo -e "\nSelecting Files:"
SFILE=$(echo -e "$HTMLF" | awk -F" " 'BEGIN {ORS="\n"} {
i=1
while (i <= NF) {
if (index($i, "est/")!=0)
print $i
i=i+1 }
}')
echo -e $SFILE
SLFILE=$(echo -e "$LOGF" | awk -F" " 'BEGIN {ORS="\n"} {
i=1
while (i <= NF) {
if (index($i, "est/")!=0)
print $i
i=i+1 }
}')
echo -e $SLFILE
#verify that the correct target directory is used
verifytarget
echo -e "\n"
####
#Make sure we can use sftp to move the files to the common area.
#Send to screen what will happen.
####
if [ -z "$GSAID" ] || [ -z "$SSHPASS" ]; then
echo -e "Not transfering reports to GSA"
echo -e "GSAID or SSHPASS environment variables is missing\n"
TRSFRGSA="0"
else
echo -e "Moving reports and logs to the next GSA area:"
echo -e "$GSAID:$GSADIR"
fi
#####
# If posible start transfering all the selected files
# Start with the html reports.
#####
#set -x
if [ "$TRSFRGSA" == "1" ]; then
if [ "$SFILE" == "" ]; then
echo -e "There are no reports to transfer yet, please try later"
else
echo -e "starting report(s) transfer"
for i in $SFILE
do
echo -e "Tranfering report:"
echo -e $i
echo -e "to: $TARGET"
scp -p $USER@$RHOST":/home"$i $TARGET
echo put $TARGET"/"${i##*/} | sshpass -e sftp $GSAID":"$GSADIR
done
fi
#####
# Transfer logs
#####
if [ "$SLFILE" == "" ]; then
echo -e "there are no logs files to transfer yet, please try later"
else
echo -e "starting log(s) transfer"
for i in $SLFILE
do
echo -e "Tranfering log:"
echo -e $i
echo -e "to: $TARGET"
scp -p $USER@$RHOST":/home"$i $TARGET
echo put $TARGET"/"${i##*/} | sshpass -e sftp $GSAID":"$GSADIR
done
fi
fi
#set +x
echo -e "\n\n*********DONE*********"
exit 0
|
use Test::More;
plan skip_all => 'set TEST_ONLINE to enable this test'
unless $ENV{TEST_ONLINE};
use Rethinkdb;
# setup
r->connect->repl;
r->db('test')->drop->run;
r->db('test')->create->run;
r->db('test')->table('marvel')->create( primary_key => 'superhero' )->run;
# get an empty set
my $res = r->db('test')->table('marvel')->run;
isa_ok $res, 'Rethinkdb::Response', 'Correct class';
is $res->type, 2, 'Correct status code';
is scalar @{ $res->response }, 0, 'Correctly shows table empty';
# insert one entry
$res = r->table('marvel')
->insert( { superhero => 'Iron Man', superpower => 'Arc Reactor' } )->run;
isa_ok $res, 'Rethinkdb::Response', 'Correct class';
is $res->type, 1, 'Correct status code';
isa_ok $res->response, 'HASH', 'Response has correct type';
is $res->response->{inserted}, 1, 'Correct number of inserted';
# these are only set if the object we inserted did not have a primary_key value:
# is scalar @{$res->response->{generated_keys}}, 1, 'Response has correct number of keys';
# list table entries just to double-check
$res = r->db('test')->table('marvel')->run;
is scalar @{ $res->response }, 1, 'Table contains correct number of entries';
is $res->response->[0]->{superhero}, 'Iron Man',
'Table contains correct first entry';
# insert multiple entries
$res = r->table('marvel')->insert(
[
{ superhero => 'Wolverine', superpower => 'Adamantium' },
{ superhero => 'Spider-Man', superpower => 'Spidy Sense' }
]
)->run;
isa_ok $res, 'Rethinkdb::Response', 'Correct class';
is $res->type, 1, 'Correct status code';
isa_ok $res->response, 'HASH', 'Response has correct type';
is $res->response->{inserted}, 2, 'Correct number of inserted';
# these are only set if the object we inserted idd not have a primary_key value:
# is scalar @{$res->response->{generated_keys}}, 2, 'Correct number of generated keys';
# list table entries just to double-check
$res = r->db('test')->table('marvel')->run;
is scalar @{ $res->response }, 3, 'Table contains correct number of entries';
# should we check all the names?
# insert an entry with an existing primary_key should fail
$res = r->table('marvel')
->insert( { superhero => 'Iron Man', superpower => 'Arc Reactor' } )->run;
isa_ok $res, 'Rethinkdb::Response', 'Correct class';
is $res->response->{errors}, 1, 'Correct number of errors';
is $res->response->{inserted}, 0, 'Correct number of inserts';
$res = r->table('marvel')->insert(
[
{ superhero => 'Iron Man', superpower => 'Arc Reactor' },
{ superhero => 'Wolverine', superpower => 'Adamantium' },
{ superhero => 'Spider-Man', superpower => 'Spidy Sense' }
]
)->run;
isa_ok $res, 'Rethinkdb::Response', 'Correct class';
is $res->response->{errors}, 3, 'Correct number of errors';
is $res->response->{inserted}, 0, 'Correct number of inserts';
# forcing an insert should work tho
$res
= r->table('marvel')
->insert( { superhero => 'Iron Man', superpower => 'Mach 5' },
{ conflict => 'replace' } )->run;
isa_ok $res, 'Rethinkdb::Response', 'Correct class';
is $res->response->{errors}, 0, 'Correct number of errors';
is $res->response->{inserted}, 0, 'Correct number of inserts';
is $res->response->{replaced}, 1, 'Correct number replaced';
# forcing an insert should work with "true" value too
$res
= r->table('marvel')
->insert( { superhero => 'Iron Man', superpower => 'Arc Reactor' },
{ conflict => 'replace' } )->run;
isa_ok $res, 'Rethinkdb::Response', 'Correct class';
is $res->response->{replaced}, 1, 'Correct number replaced';
# Update
$res = r->table('marvel')->get('Iron Man')->update( { age => 30 } )->run;
isa_ok $res, 'Rethinkdb::Response', 'Correct class';
is $res->response->{replaced}, 1, 'Correct number of updates';
# TODO:
# $res = r->table('marvel')->update({ age => r->row('age')->add(1) })->run;
# Replace / Modify
$res = r->table('marvel')->get('Iron Man')
->replace( { superhero => 'Iron Man', age => 30 } )->run;
isa_ok $res, 'Rethinkdb::Response', 'Correct class';
is $res->response->{replaced}, 1, 'Correct number of modified documents';
# Delete one document
$res = r->table('marvel')->get('Iron Man')->delete->run;
isa_ok $res, 'Rethinkdb::Response', 'Correct class';
is $res->response->{deleted}, 1, 'Correct number of deleted documents';
# Delete all the documents
$res = r->table('marvel')->delete->run;
isa_ok $res, 'Rethinkdb::Response', 'Correct class';
is $res->response->{deleted}, 2, 'Correct number of deleted documents';
# clean up
r->db('test')->drop->run;
done_testing();
|
#!/bin/bash
xhost +
source env_for_project.sh $1
docker-compose run li3ds-prototype_tmux
# docker-compose up
xhost -
|
/***************************************************************************
cmdlineoptions.h - description
-------------------
begin : Sun Oct 13 2007
copyright : (C) 2007 by Andre Simon
email : andre.simon1@gmx.de
***************************************************************************/
/*
This file is part of ANSIFilter.
ANSIFilter is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
ANSIFilter is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with ANSIFilter. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef CMDLINEOPTIONS_H
#define CMDLINEOPTIONS_H
#include <string>
#include <map>
#include <cstdlib>
#include <iostream>
#include <fstream>
#include <vector>
#include "enums.h"
using namespace std;
/// handle command line options
class CmdLineOptions
{
public:
/**Constructor
\param argc Argument count
\param argv Argument strings
*/
CmdLineOptions( const int argc, const char *argv[] );
~CmdLineOptions();
/** \return Single output file name*/
string getSingleOutFilename();
/** \return Single input file name*/
string getSingleInFilename() const;
/** \return Output directory*/
string getOutDirectory() ;
/** \return Char set*/
string getEncoding() const;
/** \return Char set*/
string getFont() const;
/** \return Char set*/
string getFontSize() const;
/** \return True if version information should be printed*/
bool printVersion() const;
/** \return True if help information should be printed*/
bool printHelp() const;
/** \return True if output shluld be fragmented*/
bool fragmentOutput() const;
/** \return output file suffix */
string getOutFileSuffix() const;
/** \return Output file format */
ansifilter::OutputType getOutputType() const;
/** \return True if encoding specification should be omitted in output*/
bool omitEncoding() const;
/** \return True if formatting infotmation should not be outputted */
bool plainOutput() const;
/** \return True if input files should be raed after EOF occoured */
bool ignoreInputEOF() const;
/** \return Document title */
string getDocumentTitle() const ;
/** \return Document title */
string getStyleSheetPath() const ;
/** \return List of input file names*/
const vector <string> & getInputFileNames() const;
int getWrapLineLength() const;
private:
ansifilter::OutputType outputType;
bool opt_help;
bool opt_version ;
bool opt_fragment;
bool opt_plain;
bool opt_ignoreEOF;
// name of single output file
string outFilename;
string docTitle;
string encodingName;
string outDirectory;
string font;
string fontSize;
string styleSheetPath;
int wrapLineLen;
/** list of all input file names */
vector <string> inputFileNames;
/** \return Valid path name */
string validateDirPath(const string & path);
/** \return directory name of path */
string getDirName( const string & path);
};
#endif
|
package org.odk.collect.projects
import androidx.test.ext.junit.runners.AndroidJUnit4
import com.google.gson.Gson
import org.junit.runner.RunWith
import org.odk.collect.shared.Settings
import org.odk.collect.shared.UUIDGenerator
@RunWith(AndroidJUnit4::class)
class SharedPreferencesProjectsRepositoryTest : ProjectsRepositoryTest() {
override fun buildSubject(): ProjectsRepository {
return SharedPreferencesProjectsRepository(UUIDGenerator(), Gson(), InMemSettings(), "test")
}
}
private class InMemSettings : Settings {
val map = mutableMapOf<String, Any?>()
override fun save(key: String, value: Any?) {
map[key] = value
}
override fun remove(key: String) {
map.remove(key)
}
override fun getString(key: String): String? {
return map[key] as String?
}
override fun setDefaultForAllSettingsWithoutValues() {
TODO("Not yet implemented")
}
override fun saveAll(prefs: Map<String, Any?>) {
TODO("Not yet implemented")
}
override fun reset(key: String) {
TODO("Not yet implemented")
}
override fun clear() {
TODO("Not yet implemented")
}
override fun contains(key: String): Boolean {
TODO("Not yet implemented")
}
override fun getAll(): Map<String, *> {
TODO("Not yet implemented")
}
override fun getBoolean(key: String): Boolean {
TODO("Not yet implemented")
}
override fun getLong(key: String): Long {
TODO("Not yet implemented")
}
override fun getInt(key: String): Int {
TODO("Not yet implemented")
}
override fun getFloat(key: String): Float {
TODO("Not yet implemented")
}
override fun getStringSet(key: String): Set<String>? {
TODO("Not yet implemented")
}
override fun registerOnSettingChangeListener(listener: Settings.OnSettingChangeListener) {
TODO("Not yet implemented")
}
override fun unregisterOnSettingChangeListener(listener: Settings.OnSettingChangeListener) {
TODO("Not yet implemented")
}
}
|
package filter
import (
"testing"
"github.com/bio-routing/bio-rd/net"
"github.com/stretchr/testify/assert"
)
func TestInRange(t *testing.T) {
tests := []struct {
name string
prefix net.Prefix
pattern net.Prefix
begin uint8
end uint8
expected bool
}{
{
name: "matches and in range (22-24)",
prefix: net.NewPfx(net.IPv4FromOctets(1, 2, 1, 0), 23),
pattern: net.NewPfx(net.IPv4FromOctets(1, 2, 0, 0), 22),
begin: 22,
end: 24,
expected: true,
},
{
name: "matches begin of range (22-24)",
prefix: net.NewPfx(net.IPv4FromOctets(1, 2, 0, 0), 22),
pattern: net.NewPfx(net.IPv4FromOctets(1, 2, 0, 0), 22),
begin: 22,
end: 24,
expected: true,
},
{
name: "matches end of range (22-24)",
prefix: net.NewPfx(net.IPv4FromOctets(1, 2, 3, 0), 24),
pattern: net.NewPfx(net.IPv4FromOctets(1, 2, 0, 0), 22),
begin: 22,
end: 24,
expected: true,
},
{
name: "matches begin and end of range (24-24)",
prefix: net.NewPfx(net.IPv4FromOctets(1, 2, 0, 0), 24),
pattern: net.NewPfx(net.IPv4FromOctets(1, 2, 0, 0), 24),
begin: 24,
end: 24,
expected: true,
},
{
name: "smaller (22-24)",
prefix: net.NewPfx(net.IPv4FromOctets(1, 2, 0, 0), 16),
pattern: net.NewPfx(net.IPv4FromOctets(1, 2, 4, 0), 22),
begin: 22,
end: 24,
expected: false,
},
{
name: "longer (22-24)",
prefix: net.NewPfx(net.IPv4FromOctets(1, 2, 0, 128), 25),
pattern: net.NewPfx(net.IPv4FromOctets(1, 2, 0, 0), 22),
begin: 22,
end: 24,
expected: false,
},
{
name: "does not match",
prefix: net.NewPfx(net.IPv4FromOctets(2, 0, 0, 0), 23),
pattern: net.NewPfx(net.IPv4FromOctets(1, 2, 0, 0), 22),
expected: false,
},
}
for _, test := range tests {
t.Run(test.name, func(te *testing.T) {
f := NewRouteFilter(test.pattern, NewInRangeMatcher(test.begin, test.end))
assert.Equal(te, test.expected, f.Matches(test.prefix))
})
}
}
func TestExact(t *testing.T) {
tests := []struct {
name string
prefix net.Prefix
pattern net.Prefix
expected bool
}{
{
name: "matches (0.0.0.0/0)",
prefix: net.NewPfx(net.IPv4(0), 0),
pattern: net.NewPfx(net.IPv4(0), 0),
expected: true,
},
{
name: "matches (192.168.0.0)",
prefix: net.NewPfx(net.IPv4FromOctets(192, 168, 1, 1), 24),
pattern: net.NewPfx(net.IPv4FromOctets(192, 168, 1, 1), 24),
expected: true,
},
{
name: "does not match",
prefix: net.NewPfx(net.IPv4FromOctets(1, 0, 0, 0), 8),
pattern: net.NewPfx(net.IPv4FromOctets(0, 0, 0, 0), 0),
expected: false,
},
{
name: "longer",
prefix: net.NewPfx(net.IPv4FromOctets(1, 0, 0, 0), 8),
pattern: net.NewPfx(net.IPv4FromOctets(1, 0, 0, 0), 7),
expected: false,
},
{
name: "lesser",
prefix: net.NewPfx(net.IPv4FromOctets(1, 0, 0, 0), 7),
pattern: net.NewPfx(net.IPv4FromOctets(1, 0, 0, 0), 8),
expected: false,
},
}
for _, test := range tests {
t.Run(test.name, func(te *testing.T) {
f := NewRouteFilter(test.pattern, NewExactMatcher())
assert.Equal(te, test.expected, f.Matches(test.prefix))
})
}
}
func TestOrLonger(t *testing.T) {
tests := []struct {
name string
prefix net.Prefix
pattern net.Prefix
expected bool
}{
{
name: "longer",
prefix: net.NewPfx(net.IPv4FromOctets(1, 2, 3, 128), 25),
pattern: net.NewPfx(net.IPv4FromOctets(1, 2, 3, 0), 24),
expected: true,
},
{
name: "exact",
prefix: net.NewPfx(net.IPv4FromOctets(1, 2, 3, 0), 24),
pattern: net.NewPfx(net.IPv4FromOctets(1, 2, 3, 0), 24),
expected: true,
},
{
name: "lesser",
prefix: net.NewPfx(net.IPv4FromOctets(1, 2, 3, 0), 23),
pattern: net.NewPfx(net.IPv4FromOctets(1, 2, 3, 0), 24),
expected: false,
},
}
for _, test := range tests {
t.Run(test.name, func(te *testing.T) {
f := NewRouteFilter(test.pattern, NewOrLongerMatcher())
assert.Equal(te, test.expected, f.Matches(test.prefix))
})
}
}
func TestLonger(t *testing.T) {
tests := []struct {
name string
prefix net.Prefix
pattern net.Prefix
expected bool
}{
{
name: "longer",
prefix: net.NewPfx(net.IPv4FromOctets(1, 2, 3, 128), 25),
pattern: net.NewPfx(net.IPv4FromOctets(1, 2, 3, 0), 24),
expected: true,
},
{
name: "exact",
prefix: net.NewPfx(net.IPv4FromOctets(1, 2, 3, 0), 24),
pattern: net.NewPfx(net.IPv4FromOctets(1, 2, 3, 0), 24),
expected: false,
},
{
name: "lesser",
prefix: net.NewPfx(net.IPv4FromOctets(1, 2, 3, 0), 23),
pattern: net.NewPfx(net.IPv4FromOctets(1, 2, 3, 0), 24),
expected: false,
},
}
for _, test := range tests {
t.Run(test.name, func(te *testing.T) {
f := NewRouteFilter(test.pattern, NewLongerMatcher())
assert.Equal(te, test.expected, f.Matches(test.prefix))
})
}
}
|
import { run } from 'testring';
run(async (api) => {
await api.application.url('http://localhost:8080/cookie.html');
const cookieValue = await api.application.getCookie('test');
await api.application.assert.equal(cookieValue, 'TestData');
await api.application.deleteCookie('test');
await api.application.click(api.application.root.cookie_clear_button);
const cookieTextAfterDelete = await api.application.getText(api.application.root.cookie_found_text);
await api.application.assert.equal(cookieTextAfterDelete, '');
const cookieTextGetter = await api.application.getCookie('test');
await api.application.assert.equal(cookieTextGetter, undefined);
await api.application.setCookie({ 'name': 'foo', 'value': '1111' });
const cookieValueAfterAdd = await api.application.getCookie('foo');
await api.application.assert.equal(cookieValueAfterAdd, '1111');
const allCookies = await api.application.getCookie();
const expected = [{
'domain':'localhost',
'httpOnly':false,
'name':'foo',
'path':'/',
'secure':false,
'value':'1111',
}];
await api.application.assert.deepEqual(allCookies, expected);
await api.application.deleteCookie();
const allCookiesAfterDelete = await api.application.getCookie();
await api.application.assert.deepEqual(allCookiesAfterDelete, []);
});
|
package drewcarlson.coingecko.models.global
import kotlinx.serialization.Serializable
@Serializable
data class Global(
val data: GlobalData
)
|
import { Injectable } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { v4 as uuid } from 'uuid';
import { Repository } from 'typeorm';
import { Student } from './student.entity';
import { CreateStudentInput } from './student.input';
@Injectable()
export class StudentService {
constructor(
@InjectRepository(Student) private studentRepo: Repository<Student>,
) {}
async createStudent(studentInput: CreateStudentInput): Promise<Student> {
const student = this.studentRepo.create({
...studentInput,
id: uuid(),
});
return this.studentRepo.save(student);
}
async studentById(id: string): Promise<Student | null> {
const student = await this.studentRepo.findOne({ id });
if (!student) {
return null;
}
return student;
}
async getStudents(): Promise<Student[] | null> {
const students = await this.studentRepo.find();
if (!students) {
return null;
}
return students;
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.