text
stringlengths 27
775k
|
|---|
/**
* @file login
* @author mars
*/
|
import Grid from '@material-ui/core/Grid';
import Button from '@material-ui/core/Button';
import makeStyles from '@material-ui/core/styles/makeStyles';
import PureSearch from './puresearch';
const useStyle = makeStyles(theme => ({
button: {
height: "100%",
borderRadius: 0,
fontSize: '1.5rem',
lineHeight: '25px',
fontWeight: 550,
backgroundColor: '#b42d1d',
[theme.breakpoints.down("sm")]: {
marginTop: 10,
},
[theme.breakpoints.down('xs')]: {
fontSize: '1rem',
}
},
}));
const LocationSearch = (props) => {
const classes = useStyle();
return (
<Grid container item xs={12}>
<Grid item xs={12} md={10} container>
<PureSearch
searchValue={props.searchValue}
items={props.items}
handleLocation={props.handleLocation}
resetState={props.resetState}
submit={props.setSearch}
/>
</Grid>
<Grid item xs={12} md={2}>
<Button
fullWidth
className={classes.button}
onClick={props.setSearch}
variant="contained"
color="primary">
Watch
</Button>
</Grid>
</Grid>
);
};
export default LocationSearch;
|
# config valid only for current version of Capistrano
lock '3.9.1'
## Base
set :application, 'grape-skeleton'
set :repo_url, 'https://github.com/jbox-web/grape-skeleton.git'
set :deploy_to, '/data/www/grape-skeleton'
## SSH
set :ssh_options, {
keys: [File.join(Dir.home, '.ssh', 'id_rsa')],
forward_agent: true,
auth_methods: %w[publickey]
}
## RVM
set :rvm_ruby_version, '2.4.2'
## Bundler
set :bundle_flags, '--deployment'
## Rails
append :linked_dirs, 'log', 'tmp'
## Foreman
set :foreman_roles, :app
set :foreman_init_system, 'systemd'
set :foreman_services, %w[web]
set :foreman_export_path, "#{deploy_to}/.config/systemd/user"
set :foreman_options, {
template: "#{deploy_to}/.foreman/templates/systemd",
root: current_path,
timeout: 30,
}
## Deployment steps
namespace :deploy do
after 'deploy:check:linked_files', 'config:install'
after 'deploy:check:linked_files', 'foreman:install'
after 'deploy:published', 'bundler:clean'
after 'deploy:finished', 'foreman:export'
after 'deploy:finished', 'foreman:restart'
end
|
import React from "react";
import { Meta, Story } from "@storybook/react";
import { GreenLabel, GreenLabelProps } from "./greenLabel";
export default {
title: "Component/Text/Green Label",
component: GreenLabel,
} as Meta;
const Template: Story<GreenLabelProps> = (props) => <GreenLabel {...props} />;
export const Normal = Template.bind({});
Normal.args = {
children: "Selected Path",
};
|
namespace Xamarin.Forms.PlatformConfiguration.TizenSpecific
{
using FormsElement = Forms.Entry;
public static class Entry
{
public static readonly BindableProperty FontWeightProperty = BindableProperty.Create("FontWeight", typeof(string), typeof(FormsElement), FontWeight.None);
public static string GetFontWeight(BindableObject element)
{
return (string)element.GetValue(FontWeightProperty);
}
public static void SetFontWeight(BindableObject element, string weight)
{
element.SetValue(FontWeightProperty, weight);
}
public static string GetFontWeight(this IPlatformElementConfiguration<Tizen, FormsElement> config)
{
return GetFontWeight(config.Element);
}
public static IPlatformElementConfiguration<Tizen, FormsElement> SetFontWeight(this IPlatformElementConfiguration<Tizen, FormsElement> config, string weight)
{
SetFontWeight(config.Element, weight);
return config;
}
}
}
|
package gomvc
import (
"encoding/json"
"fmt"
"log"
"net/http"
"strconv"
"strings"
"github.com/getkin/kin-openapi/openapi3"
"github.com/jinzhu/inflection"
)
func NewCRUDActions(name string) []Action {
actions := []Action{}
title := strings.Title(name)
singular := inflection.Singular(title)
for _, action := range []Action{
{Resource: title, SingularResource: singular, Name: "Index", Method: http.MethodGet},
{Resource: title, SingularResource: singular, Name: "Create", Method: http.MethodPost},
} {
if strings.HasPrefix(name, "/") {
action.Path = strings.ToLower(name)
} else {
action.Path = "/" + strings.ToLower(name)
}
action.Handler = strings.Title(action.Name)
actions = append(actions, action)
}
for _, detailAction := range []Action{
{Resource: title, SingularResource: singular, Name: "Show", Method: http.MethodGet},
{Resource: title, SingularResource: singular, Name: "Update", Method: http.MethodPut},
{Resource: title, SingularResource: singular, Name: "Delete", Method: http.MethodDelete},
} {
detailAction.Path = fmt.Sprintf("/%s/:id", strings.ToLower(name))
detailAction.Handler = strings.Title(detailAction.Name)
actions = append(actions, detailAction)
}
return actions
}
type Action struct {
SingularResource string
// Resource is loosely related with what the Controller is and has many actions
Resource string
// Name is the function name bound to the Controller
Name string
// Method is the HTTP verb
Method string `json:"method,omitempty"`
// Path is the associated url path
Path string `json:"path,omitempty"`
// Handler is the generic resource action name e.g. Index, Create
Handler string `json:"handler,omitempty"`
}
type Response struct {
Name string
Code int
Ref string
}
// NewResponses creates a list of responses from an OA3 response ref
func NewResponses(specResponses map[string]*openapi3.ResponseRef) []Response {
var responses []Response
responseSet := map[string]bool{}
for statusCode, resRef := range specResponses {
r := NewResponse(statusCode, resRef)
if _, ok := responseSet[r.Name]; !ok {
responseSet[r.Name] = true
responses = append(responses, r)
}
}
return responses
}
// NewResponse is a constructor for the custom Response object
func NewResponse(statusCode string, resRef *openapi3.ResponseRef) Response {
code, _ := strconv.Atoi(statusCode)
return Response{
Code: code,
Ref: resRef.Ref,
Name: resolveResponseName(resRef),
}
}
func resolveResponseName(resRef *openapi3.ResponseRef) string {
if resRef.Ref == "" {
for _, obj := range resRef.Value.Content {
name := resolveSchemaName(obj.Schema)
// TODO: handle multiple
return name
}
}
return getComponentName(resRef.Ref)
}
func resolveSchemaName(schema *openapi3.SchemaRef) string {
if schema.Ref == "" {
return getComponentName(schema.Value.Items.Ref)
}
return getComponentName(schema.Ref)
}
func PrintJSON(v interface{}) {
b, _ := json.MarshalIndent(v, "", "\t")
log.Println(string(b))
}
|
var userRoutes = require('./userRoutes');
var techologieRoutes = require('./technologieRoutes');
module.exports = function(router) {
return {
userRoutes: userRoutes(router),
techologieRoutes: techologieRoutes(router)
};
};
|
Change log
==========
Here list all notable changes in GraphVite library.
v0.2.2 - 2020-03-11
-------------------
- New model QuatE and its benchmarks on 5 knowledge graph datasets.
- Add an option to skip `faiss` in compilation.
- Fix instructions for conda installation.
v0.2.1 - 2019-11-12
-------------------
- New dataset `Wikidata5m` and its benchmarks,
including TransE, DistMult, ComplEx, SimplE and RotatE.
- Add interface for loading pretrained models and save hyperparameters.
- Add weight clip in asynchronous self-adversarial negative sampling.
v0.2.0 - 2019-10-11
-------------------
- Add scalable multi-GPU prediction for node embedding and knowledge graph embedding.
Evaluation on link prediction is 4.6x faster than v0.1.0.
- New demo dataset `math` and entity prediction evaluation for knowledge graph.
- Support Kepler and Turing GPU architectures.
- Automatically choose the best episode size with regrad to RAM limit.
- Add template config files for applications.
- Change the update of global embeddings from average to accumulation. Fix a serious
numeric problem in the update.
- Move file format settings from graph to application. Now one can customize formats
and use comments in evaluation files. Add document for data format.
- Separate GPU implementation into training routines and models. Routines are in
`include/instance/gpu/*` and models are in `include/instance/model/*`.
v0.1.0 - 2019-08-05
-------------------
- Multi-GPU training of large-scale graph embedding
- 3 applications: node embedding, knowledge graph embedding and graph &
high-dimensional data visualization
- Node embedding
- Model: DeepWalk, LINE, node2vec
- Evaluation: node classification, link prediction
- Knowledge graph embedding
- Model: TransE, DistMult, ComplEx, SimplE, RotatE
- Evaluation: link prediction
- Graph & High-dimensional data visualization
- Model: LargeVis
- Evaluation: visualization(2D / 3D), animation(3D), hierarchy(2D)
|
import pytest, random, math, numpy, time, functools
import irr
def run_many(case):
@functools.wraps(case)
def wrapped():
for test in range(1000):
d, r = case()
assert irr.irr(d) == pytest.approx(r)
return wrapped
@run_many
def test_simple_bond():
r = math.exp(random.gauss(0, 1)) - 1
x = random.gauss(0, 1)
d = [x / (1 + r), -x]
return d, r
@run_many
def test_slightly_longer_bond(n=10):
r = math.exp(random.gauss(0, 1)) - 1
x = random.gauss(0, 1)
d = [x] + [0.0] * (n-2) + [-x * (1+r)**(n-1)]
return d, r
@run_many
def test_more_nonzero(n=10):
r = math.exp(random.gauss(0, 1)) - 1
d = [random.random() for i in range(n-1)]
d.append(-sum([x * (1+r)**(n-i-1) for i, x in enumerate(d)]))
return d, r
def test_performance():
us_times = []
np_times = []
ns = [10, 20, 50, 100]
for n in ns:
k = 100
sums = [0.0, 0.0]
for j in range(k):
r = math.exp(random.gauss(0, 1.0 / n)) - 1
x = random.gauss(0, 1)
d = [x] + [0.0] * (n-2) + [-x * (1+r)**(n-1)]
results = []
for i, f in enumerate([irr.irr, numpy.irr]):
t0 = time.time()
results.append(f(d))
sums[i] += time.time() - t0
if not numpy.isnan(results[1]):
assert results[0] == pytest.approx(results[1])
for times, sum in zip([us_times, np_times], sums):
times.append(sum/k)
try:
from matplotlib import pyplot
import seaborn
except ImportError:
return
pyplot.plot(ns, us_times, label='Our library')
pyplot.plot(ns, np_times, label='Numpy')
pyplot.xlabel('n')
pyplot.ylabel('time(s)')
pyplot.yscale('log')
pyplot.savefig('plot.png')
|
function createLoggingWritable (writableOrig) {
const proto = Object.getPrototypeOf(writableOrig);
function LoggingWritable(writableOrig) {
this.writableOrig = writableOrig;
}
LoggingWritable.prototype.write = function(chunk, encoding, callback) {
if (!callback && typeof encoding === 'function') {
callback = encoding;
encoding = undefined;
}
console.log('Writing ', chunk);
return this.writableOrig.write(chunk, encoding, function() {
console.log('Finished writing ', chunk);
callback && callback();
});
};
LoggingWritable.prototype.on = function() {
return this.writableOrig.on.apply(this.writableOrig, arguments);
};
LoggingWritable.prototype.end = function() {
return this.writableOrig.end.apply(this.writableOrig, arguments);
};
return new LoggingWritable(writableOrig);
}
const fs = require('fs');
const writable = fs.createWriteStream('test.txt');
const writableProxy = createLoggingWritable(writable);
writableProxy.write('First chunk');
writableProxy.write('Second chunk');
writable.write('This is not logged');
writableProxy.end();
|
// Created on: 2016-04-07
// Copyright (c) 2016 OPEN CASCADE SAS
// Created by: Oleg AGASHIN
//
// This file is part of Open CASCADE Technology software library.
//
// This library is free software; you can redistribute it and/or modify it under
// the terms of the GNU Lesser General Public License version 2.1 as published
// by the Free Software Foundation, with special exception defined in the file
// OCCT_LGPL_EXCEPTION.txt. Consult the file LICENSE_LGPL_21.txt included in OCCT
// distribution for complete text of the license and disclaimer of any warranty.
//
// Alternatively, this file may be used under the terms of Open CASCADE
// commercial license or contractual agreement.
#ifndef _IMeshTools_Parameters_HeaderFile
#define _IMeshTools_Parameters_HeaderFile
#include <IMeshTools_MeshAlgoType.hxx>
#include <Precision.hxx>
//! Structure storing meshing parameters
struct IMeshTools_Parameters {
//! Default constructor
IMeshTools_Parameters ()
:
MeshAlgo (IMeshTools_MeshAlgoType_DEFAULT),
Angle(0.5),
Deflection(0.001),
AngleInterior(-1.0),
DeflectionInterior(-1.0),
MinSize (-1.0),
InParallel (Standard_False),
Relative (Standard_False),
InternalVerticesMode (Standard_True),
ControlSurfaceDeflection (Standard_True),
CleanModel (Standard_True),
AdjustMinSize (Standard_False),
ForceFaceDeflection (Standard_False),
AllowQualityDecrease (Standard_False)
{
}
//! Returns factor used to compute default value of MinSize
//! (minimum mesh edge length) from deflection
static Standard_Real RelMinSize()
{
return 0.1;
}
//! 2D Delaunay triangulation algorithm factory to use
IMeshTools_MeshAlgoType MeshAlgo;
//! Angular deflection used to tessellate the boundary edges
Standard_Real Angle;
//!Linear deflection used to tessellate the boundary edges
Standard_Real Deflection;
//! Angular deflection used to tessellate the face interior
Standard_Real AngleInterior;
//! Linear deflection used to tessellate the face interior
Standard_Real DeflectionInterior;
//! Minimum size parameter limiting size of triangle's edges to prevent
//! sinking into amplification in case of distorted curves and surfaces.
Standard_Real MinSize;
//! Switches on/off multi-thread computation
Standard_Boolean InParallel;
//! Switches on/off relative computation of edge tolerance<br>
//! If true, deflection used for the polygonalisation of each edge will be
//! <defle> * Size of Edge. The deflection used for the faces will be the
//! maximum deflection of their edges.
Standard_Boolean Relative;
//! Mode to take or not to take internal face vertices into account
//! in triangulation process
Standard_Boolean InternalVerticesMode;
//! Parameter to check the deviation of triangulation and interior of
//! the face
Standard_Boolean ControlSurfaceDeflection;
//! Cleans temporary data model when algorithm is finished.
Standard_Boolean CleanModel;
//! Enables/disables local adjustment of min size depending on edge size.
//! Disabled by default.
Standard_Boolean AdjustMinSize;
//! Enables/disables usage of shape tolerances for computing face deflection.
//! Disabled by default.
Standard_Boolean ForceFaceDeflection;
//! Allows/forbids the decrease of the quality of the generated mesh
//! over the existing one.
Standard_Boolean AllowQualityDecrease;
};
#endif
|
package gorequires
import (
"os"
"testing"
"github.com/dk1027/gotesttools"
)
func TestEnv(t *testing.T) {
os.Setenv("myvar", "123")
val := Env("myvar")
gotesttools.AssertTrue(t, val == "123")
}
func TestEnv2(t *testing.T) {
doAssert := gotesttools.AssertPanic(t)
defer doAssert()
val := Env("myvar2")
gotesttools.AssertTrue(t, val == "123")
}
func TestFile(t *testing.T) {
filename := "gorequires_test.go"
fi := File(filename)
gotesttools.AssertTrue(t, filename == fi.Name())
}
func TestFile2(t *testing.T) {
filename := "../gorequires/gorequires_test.go"
fi := File(filename)
gotesttools.AssertTrue(t, "gorequires_test.go" == fi.Name())
}
func TestFile3(t *testing.T) {
doAssert := gotesttools.AssertPanic(t)
defer doAssert()
filename := "does-not-exist"
fi := File(filename)
gotesttools.AssertTrue(t, filename == fi.Name())
}
|
from numpy import repeat
from numpy import where
from numpy import zeros
from gwlfe.BMPs.Stream.FilterEff import FilterEff
from gwlfe.BMPs.Stream.FilterEff import FilterEff_f
from gwlfe.Input.LandUse.NLU import NLU
from gwlfe.Input.WaterBudget.Water import Water
from gwlfe.Input.WaterBudget.Water import Water_f
from gwlfe.Memoization import memoize
from gwlfe.MultiUse_Fxns.Discharge.AdjUrbanQTotal import AdjUrbanQTotal
from gwlfe.MultiUse_Fxns.Discharge.AdjUrbanQTotal import AdjUrbanQTotal_f
from gwlfe.MultiUse_Fxns.Runoff.RetentionEff import RetentionEff
from gwlfe.MultiUse_Fxns.Runoff.RetentionEff import RetentionEff_f
from gwlfe.Output.Loading.SurfaceLoad import SurfaceLoad
from gwlfe.Output.Loading.SurfaceLoad import SurfaceLoad_f
@memoize
def DisSurfLoad(NYrs, DaysMonth, InitSnow_0, Temp, Prec, Nqual, NRur, NUrb, Area, CNI_0, AntMoist_0, Grow_0, CNP_0,
Imper, ISRR, ISRA, Qretention, PctAreaInfil, LoadRateImp, LoadRatePerv, Storm,
UrbBMPRed, DisFract, FilterWidth, PctStrmBuf):
result = zeros((NYrs, 12, 31, 16, Nqual))
water = Water(NYrs, DaysMonth, InitSnow_0, Temp, Prec)
nlu = NLU(NRur, NUrb)
adjurbanqtotal = AdjUrbanQTotal(NYrs, DaysMonth, Temp, InitSnow_0, Prec, NRur, NUrb, Area, CNI_0, AntMoist_0,
Grow_0, CNP_0,
Imper, ISRR, ISRA, Qretention, PctAreaInfil)
surfaceload = SurfaceLoad(NYrs, DaysMonth, InitSnow_0, Temp, Prec, NRur, NUrb, Area, CNI_0, AntMoist_0, Grow_0,
CNP_0, Imper, ISRR, ISRA, Qretention, PctAreaInfil, Nqual, LoadRateImp, LoadRatePerv,
Storm, UrbBMPRed)
retentioneff = RetentionEff(NYrs, DaysMonth, InitSnow_0, Temp, Prec, Qretention, NRur, NUrb, Area, CNI_0,
AntMoist_0, Grow_0, CNP_0, Imper, ISRR, ISRA, PctAreaInfil)
filtereff = FilterEff(FilterWidth)
for Y in range(NYrs):
for i in range(12):
for j in range(DaysMonth[Y][i]):
if Temp[Y][i][j] > 0 and water[Y][i][j] > 0.01:
if adjurbanqtotal[Y][i][j] > 0.001:
for l in range(NRur, nlu):
for q in range(Nqual):
result[Y][i][j][l][q] = DisFract[l][q] * surfaceload[Y][i][j][l][q]
result[Y][i][j][l][q] *= (1 - retentioneff[Y][i][j]) * (1 - (filtereff * PctStrmBuf))
else:
pass
else:
pass
return result
@memoize
def DisSurfLoad_f(NYrs, DaysMonth, InitSnow_0, Temp, Prec, Nqual, NRur, NUrb, Area, CNI_0, AntMoist_0, Grow_0, CNP_0,
Imper, ISRR, ISRA, Qretention, PctAreaInfil, LoadRateImp, LoadRatePerv,
Storm, UrbBMPRed, DisFract, FilterWidth, PctStrmBuf):
nlu = NLU(NRur, NUrb)
result = zeros((NYrs, 12, 31, nlu - NRur, Nqual))
water = Water_f(NYrs, DaysMonth, InitSnow_0, Temp, Prec)
adjurbanqtotal = AdjUrbanQTotal_f(NYrs, DaysMonth, Temp, InitSnow_0, Prec, NRur, NUrb, Area, CNI_0, AntMoist_0,
Grow_0, CNP_0,
Imper, ISRR, ISRA, Qretention, PctAreaInfil)
surfaceload = SurfaceLoad_f(NYrs, DaysMonth, InitSnow_0, Temp, Prec, NRur, NUrb, Area, CNI_0, AntMoist_0, Grow_0,
CNP_0,
Imper, ISRR, ISRA, Qretention, PctAreaInfil, Nqual, LoadRateImp, LoadRatePerv, Storm,
UrbBMPRed)
retentioneff = RetentionEff_f(NYrs, DaysMonth, InitSnow_0, Temp, Prec, Qretention, NRur, NUrb, Area, CNI_0,
AntMoist_0, Grow_0, CNP_0, Imper, ISRR, ISRA, PctAreaInfil)[:, :, :, None, None]
retentioneff = repeat(repeat(retentioneff, nlu - NRur, axis=3), Nqual, axis=4)
filtereff = FilterEff_f(FilterWidth)
nonzero = where((Temp > 0) & (water > 0.01) & (adjurbanqtotal > 0.001))
result[nonzero] = surfaceload[nonzero] * DisFract[NRur:] * (1 - retentioneff[nonzero]) * (
1 - (filtereff * PctStrmBuf))
return result
|
#ifndef GUI_H
#define GUI_H
#include "GLES2/gl2.h"
#include "game.h"
#include "util/text.h"
struct Rect;
class Element;
using std::vector;
class Layout
{
private:
GLuint program;
GLuint u_MvpMatrixHandle;
GLuint a_PositionHandle;
GLuint a_ColorHandle;
vector<unique_ptr<Element>> elements;
vector<GLfloat> vertices;
textUnit name;
int touchIndex = -1;
float px, py;
int token;
void init ();
public:
Layout();
void draw ();
void addButton(const string &label, Rect rect, vec4 bgColor, vec4 fgColor, function<void()> f = []{});
void setName (const string layoutName);
void touchDown(float x, float y);
void touchMove(float x, float y);
void touchUp(float x, float y);
void addList(const string name, const vector<string> listElements,
Rect rect, vec4 bgColor, vec4 fgColor, function<void()> f = []{});
Element& getById(const string id);
};
#endif // GUI_H
|
use differential_dataflow::{difference::Semigroup, AsCollection, Collection};
use timely::{
dataflow::{
channels::pact::Pipeline, operators::generic::builder_rc::OperatorBuilder, Scope, Stream,
},
Data,
};
pub trait FlatSplit<D, Left, Right> {
type LeftStream;
type RightStream;
fn flat_split<L, LeftIter, RightIter>(&self, logic: L) -> (Self::LeftStream, Self::RightStream)
where
L: FnMut(D) -> (LeftIter, RightIter) + 'static,
LeftIter: IntoIterator<Item = Left>,
RightIter: IntoIterator<Item = Right>,
{
self.flat_split_named("FlatSplit", logic)
}
fn flat_split_named<L, LeftIter, RightIter>(
&self,
name: &str,
logic: L,
) -> (Self::LeftStream, Self::RightStream)
where
L: FnMut(D) -> (LeftIter, RightIter) + 'static,
LeftIter: IntoIterator<Item = Left>,
RightIter: IntoIterator<Item = Right>;
}
impl<S, D, Left, Right> FlatSplit<D, Left, Right> for Stream<S, D>
where
S: Scope,
D: Data,
Left: Data,
Right: Data,
{
type LeftStream = Stream<S, Left>;
type RightStream = Stream<S, Right>;
fn flat_split_named<L, LeftIter, RightIter>(
&self,
name: &str,
mut logic: L,
) -> (Self::LeftStream, Self::RightStream)
where
L: FnMut(D) -> (LeftIter, RightIter) + 'static,
LeftIter: IntoIterator<Item = Left>,
RightIter: IntoIterator<Item = Right>,
{
let mut buffer = Vec::new();
let mut builder = OperatorBuilder::new(name.to_owned(), self.scope());
builder.set_notify(false);
let mut input = builder.new_input(self, Pipeline);
let (mut left_out, left_stream) = builder.new_output();
let (mut right_out, right_stream) = builder.new_output();
builder.build(move |_capabilities| {
move |_frontiers| {
let (mut left_out, mut right_out) = (left_out.activate(), right_out.activate());
input.for_each(|capability, data| {
data.swap(&mut buffer);
let (mut left_session, mut right_session) = (
left_out.session(&capability),
right_out.session(&capability),
);
for data in buffer.drain(..) {
let (left, right) = logic(data);
left_session.give_iterator(left.into_iter());
right_session.give_iterator(right.into_iter());
}
});
}
});
(left_stream, right_stream)
}
}
impl<S, D, R, Left, Right> FlatSplit<D, Left, Right> for Collection<S, D, R>
where
S: Scope,
S::Timestamp: Clone,
D: Data,
R: Semigroup + Clone,
Left: Data,
Right: Data,
{
type LeftStream = Collection<S, Left, R>;
type RightStream = Collection<S, Right, R>;
fn flat_split_named<L, LeftIter, RightIter>(
&self,
name: &str,
mut logic: L,
) -> (Self::LeftStream, Self::RightStream)
where
L: FnMut(D) -> (LeftIter, RightIter) + 'static,
LeftIter: IntoIterator<Item = Left>,
RightIter: IntoIterator<Item = Right>,
{
let (left, right) = self
.inner
.flat_split_named(name, move |(data, time, diff)| {
let (left, right) = logic(data);
let (left_time, left_diff) = (time.clone(), diff.clone());
let left = left
.into_iter()
.map(move |left| (left, left_time.clone(), left_diff.clone()));
let right = right
.into_iter()
.map(move |right| (right, time.clone(), diff.clone()));
(left, right)
});
(left.as_collection(), right.as_collection())
}
}
|
package entities
import custommath.Complex
import physics.Polygon
/**
* A Bullet of the owner's team that passes through a BulletAmplifier sees its radius double, dealing more damage.
*
* transformedBullets is the list of the ids of the bullets for which the BulletAmplifier already amplified. It is
* stored in descending order of ids. The idea is that most often than not, the newly arriving bullet will have an id
* bigger, so it is much quicker to check if a bullet already has been amplified, and also quicker to add it to the
* list.
*/
class BulletAmplifier(
val id: Long,
val creationTime: Long,
val ownerId: Long,
val xPos: Double,
val yPos: Double,
val shape: Polygon,
val rotation: Double,
transformedBullets: List[Long]
) extends Body {
def isBulletAmplified(bulletId: Long, idsList: List[Long] = transformedBullets): Boolean =
// the transformedBullets list will always be sorted from bigger id the smallest
(!(idsList.isEmpty || idsList.head < bulletId)) && (
idsList.head == bulletId || isBulletAmplified(bulletId, idsList.tail)
)
// if idsList is empty or the first element of idsList is smaller than bulletId, then bulletId can not be in the
// list, and we return false in that case.
// Otherwise, we check if bulletId is equal to the head of the list. In that case, of course, bulletId is in the
// list and we return true.
// Finally, it just means that bulletId is maybe in the tail of the idsList, so we return the function with the tail
def addBulletAmplified(bulletId: Long): List[Long] = {
val splitIds = transformedBullets.indexWhere(_ < bulletId)
val (bigger, smaller) = transformedBullets.splitAt(splitIds)
bigger ++ (bulletId +: smaller)
}
}
object BulletAmplifier {
val amplifyingCoefficient: Double = 2.0
private val length: Double = 100
private val width: Double = 20
private val vertices: Vector[Complex] = Vector(
Complex(width / 2, -length / 2),
Complex(width / 2, length / 2),
Complex(-width / 2, length / 2),
Complex(-width / 2, -length / 2)
)
val bulletAmplifierShape: Polygon = Polygon(vertices, convex = true)
val lifeTime: Long = 10000
}
|
# JRCLUST
## Note
**JRCLUST is no longer being actively maintained.**
If you still want to use JRCLUST, we recommend using the [latest release](https://github.com/JaneliaSciComp/JRCLUST/releases/tag/v4.1.0) or cloning from `main`, rather than `master`.
JRCLUST is a scalable and customizable package for spike sorting on [high-density silicon probes](https://www.nature.com/articles/nature24636).
It is written in MATLAB and CUDA.
JRCLUST was originally developed by [James Jun](https://www.simonsfoundation.org/team/james-jun/).
## Installing JRCLUST
If you'd like to
test the latest development code, you can [clone the
repository](https://help.github.com/articles/cloning-a-repository/) to
your computer. If you want to stay on a release, head to the [releases
page](https://github.com/JaneliaSciComp/JRCLUST/releases) and download
the latest release.
Run the following command in MATLAB (you may want to add it to your [startup
script](https://www.mathworks.com/help/matlab/ref/startup.html)):
```matlab
addpath('/path/to/JRCLUST');
```
You may also need to recompile your CUDA codes if you're not on Windows.
Do this with
```matlab
jrclust.CUDA.compileCUDA();
```
## Questions?
Read the documentation [here](https://jrclust.readthedocs.io/en/latest/index.html) and the original bioRxiv paper [here](https://www.biorxiv.org/content/early/2017/01/30/101030).
Still have questions?
Visit our [Gitter community](https://gitter.im/JRCLUST/community) and ask!
|
docker exec mycluster-master hadoop fs -fs hdfs://mycluster-master:9000 -copyToLocal /time_query1/ /myapp/
docker exec mycluster-master hadoop fs -fs hdfs://mycluster-master:9000 -copyToLocal /time_query2/ /myapp/
|
ok 1 - use Math::MySum;
ok 2 - Math::MySum->can('my_sum')
ok 3 - Sum of 1 and 3 is 4
ok 4 - String has mel in it
1..4
|
/* This file is part of Jellyfish.
Jellyfish is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Jellyfish is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Jellyfish. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef __JELLYFISH_ALIGNED_VALUE_HPP__
#define __JELLYFISH_ALIGNED_VALUE_HPP__
#include <jellyfish/invertible_hash_array.hpp>
#include <jellyfish/direct_indexing_array.hpp>
namespace jellyfish {
namespace aligned_values {
template<typename _key_t, typename _val_t, typename atomic, typename mem_block_t>
class array : public storage_t {
public:
typedef _key_t key_t;
typedef _val_t val_t;
private:
typedef typename ::jellyfish::invertible_hash::array<key_t, atomic, mem_block_t> key_ary_t;
typedef typename ::jellyfish::direct_indexing::array<size_t, val_t, atomic, mem_block_t> val_ary_t;
key_ary_t keys;
val_ary_t vals;
public:
array(size_t _size, uint_t _key_len, uint_t _reprobe_limit,
size_t *_reprobes) :
keys(_size, _key_len, 0, _reprobe_limit, _reprobes),
vals(keys.get_lsize())
{ }
array(char *keys_map, char *vals_map,
size_t _size, uint_t _key_len, uint_t _reprobe_limit,
size_t *_reprobes, SquareBinaryMatrix &hash_matrix,
SquareBinaryMatrix &hash_inv_matrix) :
keys(keys_map, _size, _key_len, 0, _reprobe_limit, _reprobes,
hash_matrix, hash_inv_matrix),
vals(vals_map, keys.get_lsize())
{ }
void set_matrix(SquareBinaryMatrix &m) {
keys.set_matrix(m);
}
size_t get_size() const { return keys.get_size(); }
uint_t get_key_len() const { return keys.get_key_len(); }
uint_t get_val_len() const { return keys.get_val_len(); }
uint_t get_max_reprobe() const { return keys.get_max_reprobe(); }
size_t get_max_reprobe_offset() const {
return keys.get_max_reprobe_offset();
}
uint_t get_block_len() const { return keys.get_block_len(); }
uint_t get_block_word_len() const {
return keys.get_block_word_len() + keys.get_block_len() * sizeof(val_t);
}
size_t floor_block(size_t entries, size_t &blocks) const {
return keys.floor_block(entries, blocks);
}
void zero_keys(const size_t start, const size_t length) {
keys.zero_blocks(start, length);
}
void zero_values(const size_t start, const size_t length) {
vals.zero(start, length);
}
void write_keys_blocks(std::ostream *out, size_t start, size_t length) const {
keys.write_blocks(out, start, length);
}
void write_values(std::ostream *out, size_t start, size_t length) const {
vals.write(out, start, length);
}
void write_matrices(std::ostream *out) {
keys.write_ary_header(out);
}
template<typename add_t>
bool add(key_t key, const add_t &val, val_t *oval = 0) {
bool is_new;
size_t id;
if(!keys.set(key, &is_new, &id))
return false;
vals.add(id, val, oval);
return true;
}
bool get_val(key_t key, val_t &val, bool full = true) const {
key_t v_ignore;
size_t key_id;
if(!keys.get_val(key, key_id, v_ignore, false))
return false;
vals.get_val(key_id, val);
return true;
}
class iterator {
typename key_ary_t::iterator key_it;
const val_ary_t *const vals;
public:
iterator(typename key_ary_t::iterator _key_it, const val_ary_t *_vals) :
key_it(_key_it), vals(_vals) {}
uint64_t get_hash() const { return key_it.get_hash(); }
uint64_t get_pos() const { return key_it.get_pos(); }
uint64_t get_start() const { return key_it.get_start(); }
uint64_t get_end() const { return key_it.get_end(); }
key_t get_key() const { return key_it.get_key(); }
val_t get_val() const { return (*vals)[get_id()]; }
size_t get_id() const { return key_it.get_id(); }
char *get_dna_str() { return key_it.get_dna_str(); }
bool next() { return key_it.next(); }
};
iterator iterator_all() const {
return iterator(keys.iterator_all(), &vals);
}
iterator iterator_slice(size_t slice_number, size_t number_of_slice) const {
return iterator(keys.iterator_slice(slice_number, number_of_slice),
&vals);
}
};
}
}
#endif
|
# Run the server
The first time, get all the dependencies loaded via
```
npm install
```
Then, run the server via
```
npm run site-dev-server
Open http://localhost:8080/
```
Anytime you change the contents, just refresh the page and it's going to be updated.
# Publish the website
Just run the publish script, this will setup your environment if it's not already then it'll automatically build a static version of the site and publish it to gh-pages.
```
npm run site-publish
```
|
/*
* Global atomic header.
*
* @author Michel Megens
* @email dev@bietje.net
*/
#pragma once
#include <stdint.h>
#include <lwiot.h>
#ifdef HAVE_SYNC_FETCH
#include <lwiot/detail/atomic_sync.h>
#else
#include <lwiot/detail/atomic_crit.h>
#endif
namespace lwiot
{
typedef lwiot::Atomic<uint8_t> atomic_uint8_t;
typedef lwiot::Atomic<uint16_t> atomic_uint16_t;
typedef lwiot::Atomic<uint32_t> atomic_uint32_t;
typedef lwiot::Atomic<uint64_t> atomic_uint64_t;
typedef lwiot::Atomic<short> atomic_short_t;
typedef lwiot::Atomic<int> atomic_int_t;
typedef lwiot::Atomic<long> atomic_long_t;
typedef lwiot::Atomic<long long> atomic_long_long_t;
class AtomicBool : public lwiot::detail::Atomic<uint8_t> {
public:
explicit AtomicBool(bool value = false) : detail::Atomic<uint8_t>((uint8_t)value)
{
}
AtomicBool& operator=(bool v) noexcept
{
this->store(uint8_t(v));
return *this;
}
AtomicBool& operator=(const AtomicBool& ab)
{
bool v = ab.load() != 0;
*this = v;
return *this;
}
operator bool() noexcept
{
bool value = this->load();
return value != 0;
}
};
typedef AtomicBool atomic_bool_t;
}
|
package goolog2
import (
"os"
"sync"
"sync/atomic"
)
type simpleFile struct {
writer FileWriter
mutex sync.Mutex
sync bool
refcount int32
}
// Create new simple file holder
//
// Parameters:
// filepath: name of the logging file
// sync: if true, the stream is flushed after every message
// Returns:
// the new file holder
// Note: the reference counter is set to 1. You have to invoke Unref()
// to clean up the holder.
func NewSimpleFile(
filepath string,
sync bool,
) FileHolder {
holder := &simpleFile{
sync: sync,
refcount: 1,
}
// I ignore the error here - if the file cannot be opened, the logging
// just simply doesn't work. However, if I cannot open the logging
// file, I cannot report the error.
file, _ := os.OpenFile(
filepath, os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0644)
holder.writer = newSimpleFileWriter(file, true)
return holder
}
// Create new simple file holder working over an opened file handle
//
// Parameters:
// file: the opened file
// sync: if true, the stream is flushed after every message
// Returns:
// the new file holder
// Note: the reference counter is set to 1. You have to invoke Unref()
// to clean up the holder.
func NewSimpleFileHandle(
file *os.File,
sync bool,
) FileHolder {
return &simpleFile{
writer: newSimpleFileWriter(file, false),
sync: sync,
refcount: 1,
}
}
func (this *simpleFile) AccessWriter(
functor func(writer FileWriter),
) {
if this.writer != nil {
/* -- The lock avoids inter-mixing of logging lines */
this.mutex.Lock()
defer this.mutex.Unlock()
functor(this.writer)
if this.sync {
this.writer.Sync()
}
}
}
func (this *simpleFile) Ref() FileHolder {
atomic.AddInt32(&this.refcount, 1)
return this
}
func (this *simpleFile) Unref() {
refcount := atomic.AddInt32(&this.refcount, -1)
if refcount == 0 && this.writer != nil {
this.writer.Close()
this.writer = nil
}
}
|
/*
* GNU GENERAL PUBLIC LICENSE
* Version 2, June 1991
*
* Copyright (C) 2016 @arlenebatada
*
* Copyright (C) 1989, 1991 Free Software Foundation, Inc., <http://fsf.org/>
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
* Everyone is permitted to copy and distribute verbatim copies
* of this license document, but changing it is not allowed.
*
* MODIFIED: Muhammad Haseeb, 2020
*
*/
#pragma once
#include "common.hpp"
#include <limits>
#include <iostream>
/* Defines */
#define ERROR_SIZE_LESS_THAN_1 -2
#define ERROR_SIZE_INCREASED -3
#define ERROR_POSITION_GREATER_THAN_HEAP_SIZE size
#define ERROR_SIZE_DECREASED -4
#define ERROR_POSITION_LESS_THAN_0 -1
#define ERROR_HEAP_FULL capacity
/* Main class */
template<class T>
class minHeap //the main min heap class
{
private:
int size;
int capacity;
T* array;
int heapify(int element_position);
void swap(T&, T&);
void swap(int, int);
public:
minHeap()
{
this->capacity = 0;
this->size = 0;
this->array = NULL;
}
minHeap(int capacity)
{
this->capacity = capacity;
this->size = 0;
this->array = new T[this->capacity];
}
~minHeap()
{
this->capacity = 0;
this->size = 0;
if (this->array != NULL)
{
delete[] this->array;
this->array = NULL;
}
}
int init(int capacity);
int reset();
int insert(T &element);
int get_capacity();
int get_size();
T extract_min();
int decrease_key(int element_position, T new_value);
int increase_key(int element_position, T new_value);
int heap_sort(T *output_array);
T show_element(int element_position);
T getMax();
};
template<class T>
int minHeap<T>::init(int capacity)
{
this->capacity = capacity;
this->size = 0;
this->array = new T[this->capacity];
return 0;
}
template<class T>
int minHeap<T>::reset()
{
this->size = 0;
for (int i = 0; i < this->capacity; i++)
{
T fresh;
array[i] = fresh;
}
return 0;
}
template<class T>
int minHeap<T>::insert(T &element)
{
if (size == capacity)
{
return increase_key(0, element);
}
array[size++] = element;
for (int i = (size - 1) / 2; i >= 0; i--)
{
heapify(i);
}
return 0;
}
template<class T>
int minHeap<T>::get_capacity() { return capacity; }
template<class T>
int minHeap<T>::get_size() { return size; }
template<class T>
void minHeap<T>::swap(T& t1, T& t2)
{
T temp = t1;
t1 = t2;
t2 = temp;
}
template<class T>
void minHeap<T>::swap(int p1, int p2)
{
T&& temp = array[p1];
array[p1] = array[p2];
array[p2] = temp;
}
template<class T>
int minHeap<T>::heapify(int element_position)
{
int rchild_pos = (element_position + 1) * 2;
int lchild_pos = rchild_pos - 1;
int smallest_element_position = element_position;
T smallest_element = array[smallest_element_position];
if (lchild_pos < size && array[lchild_pos] < smallest_element)
{
smallest_element_position = lchild_pos;
smallest_element = array[lchild_pos];
}
if (rchild_pos < size && array[rchild_pos] < smallest_element)
{
smallest_element_position = rchild_pos;
smallest_element = array[rchild_pos];
}
if (smallest_element_position != element_position)
{
swap(array[smallest_element_position], array[element_position]);
heapify(smallest_element_position);
}
return 0;
}
template<class T>
T minHeap<T>::extract_min()
{
if (size < 1)
{
return ERROR_SIZE_LESS_THAN_1;
}
T min = array[0];
swap(array[0], array[size - 1]);
size--;
heapify(0);
return min;
}
template<class T>
int minHeap<T>::decrease_key(int element_position, T new_value)
{
if (new_value > array[element_position]) //if an attempt to increase the value of the element
{
return ERROR_SIZE_INCREASED;
}
array[element_position] = new_value;
int parent_position = (element_position + 1 >> 1) - 1;
while ((array[parent_position] > array[element_position]) && (parent_position >= 0))
{
swap(array[parent_position], array[element_position]);
element_position = parent_position;
parent_position = (parent_position - 1) >> 1;
}
return 0;
}
template<class T>
int minHeap<T>::increase_key(int element_position, T new_value)
{
if (new_value < array[element_position])
{
return ERROR_SIZE_DECREASED;
}
array[element_position] = new_value;
heapify(element_position);
return 0;
}
template<class T>
int minHeap<T>::heap_sort(T *output_array)
{
if (size < 1)
{
return ERROR_SIZE_LESS_THAN_1;
}
int max_loop_count = size;
for (int i = 0; i < max_loop_count; i++)
{
output_array[i] = extract_min();
}
}
template<class T>
T minHeap<T>::show_element(int element_position)
{
if (element_position > size - 1)
{
return ERROR_POSITION_GREATER_THAN_HEAP_SIZE;
}
return array[element_position];
}
template<class T>
T minHeap<T>::getMax()
{
if (size <= 1)
{
return array[0];
}
T maxe = array[size - 1];
// the limit here was k > 0 instead of size/2
for (int_t k = size-2; k >= size/2; k --)
{
if (array[k] > maxe)
{
maxe = array[k];
}
}
return maxe;
}
|
module StocksHelper
def render_button_for(stock)
@stock = stock
if stock.followed_by?(current_user)
render "unfollow_button"
else
render "follow_button"
end
end
end
|
// MIT License. Copyright (c) 2020 CQFN
// https://github.com/g4s8/go-matchers/blob/master/LICENSE
// Package matchers provide testing primitives for matching targets against expected behavior.
package matchers
import (
"testing"
)
// Matcher verifies target
type Matcher interface {
Check(interface{}) bool
String() string
}
// Assertion is a statement with assert
type Assertion interface {
That(stmt string, target interface{}, matcher Matcher)
}
type assertion struct {
t *testing.T
}
func checker(target interface{}, matcher Matcher) func(*testing.T) {
return func(t *testing.T) {
t.Helper()
if !matcher.Check(target) {
t.Fatalf("expected value `%s`, but was `%v`", matcher, target)
}
}
}
func (a *assertion) That(stmt string, target interface{}, matcher Matcher) {
a.t.Helper()
a.t.Run(stmt, checker(target, matcher))
}
// Assert creates new assertion
func Assert(t *testing.T) Assertion {
return &assertion{t}
}
|
package no.ntnu.ihb.sspgen.dsl
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable
import java.io.File
@EnabledIfEnvironmentVariable(named = "VDMCHECK_DIR", matches = ".*vdmcheck-1.*$")
class VDMTest {
@Test
fun test() {
ssp("QuarterTruck") {
resources {
val cl = QuarterTruckTest::class.java.classLoader
val fmuPath = cl.getResource("quarter-truck")!!.file
file("$fmuPath/chassis.fmu")
file("$fmuPath/wheel.fmu")
file("$fmuPath/ground.fmu")
}
ssd("quarter-truck") {
}
}.validate(pathToVdm)
}
private companion object {
private val vdmDir = File(System.getenv("VDMCHECK_DIR"))
private val pathToVdm: File = vdmDir.listFiles()!!.filter {
it.name.startsWith("fmi2vdm")
}.first()
}
}
|
# run job scripts within the tool outputs directory
cd ${REPORT_FILES_PATH}
#========== build and run job 1 script ============
cat >curl-download.sh <<EOF
if [ $(wc -l <$X_f) -gt 2 ]; then
cp $X_f $X_O
else
if [ "$X_e" = "twobit" ]; then
cp $TOOL_INSTALL_DIR/twoBitToFa twoBitToFa
chmod +x twoBitToFa
./twoBitToFa $(head -1 $X_f) $X_O
elif [ "$X_e" = "gz" ]; then
curl $(head -1 $X_f) > output.fa.gz
gunzip -c output.fa.gz > $X_O
else
curl $(head -1 $X_f) > $X_O
fi
fi
EOF
sh curl-download.sh
|
package com.dmdirc.ktirc.messages.processors
import com.dmdirc.ktirc.events.IrcEvent
import com.dmdirc.ktirc.model.IrcMessage
internal interface MessageProcessor {
/**
* The messages which this handler can process.
*/
val commands: Array<String>
/**
* Processes the given message.
*/
fun process(message: IrcMessage): List<IrcEvent>
}
internal val messageProcessors = listOf(
AccountProcessor(),
AwayProcessor(),
AuthenticationProcessor(),
BatchProcessor(),
CapabilityProcessor(),
ChangeHostProcessor(),
ISupportProcessor(),
JoinProcessor(),
KickProcessor(),
ModeProcessor(),
MotdProcessor(),
NamesProcessor(),
NickChangeErrorProcessor(),
NickProcessor(),
NoticeProcessor(),
PartProcessor(),
PingProcessor(),
PrivmsgProcessor(),
TopicProcessor(),
QuitProcessor(),
WelcomeProcessor()
)
|
#!/bin/bash
# Make sure only root can run our script
if [[ $EUID -ne 0 ]]; then
echo "This script must be run as root" 1>&2
exit 1
fi
export VERSION='v3.0.1'
function pull_git_tag() {
# Try to run the commands as the cypress user, if we get a nonzero return value then try again
# as root.
sudo -u cypress git -c user.name=tmp -c user.email=tmp@tmp.com stash
rc=$?
if [[ $rc == 0 ]]; then
echo "We have permission to pull cypress as the user cypress, using cypress to run git commands."
sudo -u cypress git fetch --tags
sudo -u cypress git checkout tags/$VERSION
# sudo -u cypress git fetch --all
# sudo -u cypress git checkout origin/cypress301
sudo -u cypress git stash pop
else
echo "We do NOT have permission to pull cypress as the user cypress, using root to run git commands."
git -c user.name=tmp -c user.name=tmp -c user.email=tmp@tmp.com stash
git fetch --tags
git checkout tags/$VERSION
# git fetch --all
# git checkout origin/cypress301
git stash pop
fi
}
function cypress_cvu_shared_upgrade_commands() {
export RAILS_ENV=production
# This fixes permission issues caused by the previous upgrade script.
sudo chown -R cypress:cypress tmp public
# Try to run the commands as the cypress user, if we get a nonzero return value then try again
# as root.
sudo -E -u cypress env PATH=$PATH bundle install > /dev/null
rc=$?
if [[ $rc != 0 ]]; then
echo "We do NOT have permission to run bundle as the user cypress, using root to run bundle install."
bundle install
fi
sudo -E -u cypress env PATH=$PATH bundle exec rake tmp:clear
sudo -E -u cypress env PATH=$PATH bundle exec rake assets:clobber
sudo -E -u cypress env PATH=$PATH bundle exec rake assets:precompile
}
function upgrade_cypress() {
echo "Running Cypress Upgrade Commands"
cypress_cvu_shared_upgrade_commands
}
function upgrade_cvu() {
echo "Running Cypress Validation Utility Upgrade Commands"
cypress_cvu_shared_upgrade_commands
}
# If we find an /opt/cypress directory with a git repo in it then assume that cypress was installed via the chef recipe
if [ -d "/opt/cypress/.git" ]; then
export PATH=/opt/ruby_build/builds/opt/cypress/bin:$PATH
cd /opt/cypress
export CYPRESS_FOUND=true
elif [ -d "/home/cypress/cypress/.git" ]; then
export PATH=/home/cypress/.rbenv/bin:/home/cypress/.rbenv/shims:$PATH
eval "$(rbenv init -)"
# This is not the correct secret key, however we don't actually need the correct secret key, as none of the commands in
# cypress_cvu_shared_upgrade_commands actually require the real secret key.
export SECRET_KEY_BASE="xxxxxxxxxxxxx"
cd /home/cypress/cypress
export CYPRESS_FOUND=true
else
echo "Could not find Cypress. This warning can be safely ignored unless you had Cypress installed."
export CYPRESS_FOUND=false
fi
if [ "$CYPRESS_FOUND" = "true" ]; then
# We have established the location of cypress above, we can now run our commands to pull the correct tag and run upgrade commands
echo "Found Cypress directory"
echo "Fetching Cypress $VERSION"
pull_git_tag
echo "Running upgrade commands"
upgrade_cypress
echo "Restarting Cypress service..."
systemctl restart cypress
echo "Restarting Cypress Delayed Worker service..."
systemctl restart cypress_delayed_worker
fi
if [ -d "/opt/cypress-validation-utility/.git" ]; then
echo "Found cypress validation utility install directory."
export PATH=/opt/ruby_build/builds/opt/cypress-validation-utility/bin:$PATH
cd /opt/cypress-validation-utility
export CVU_FOUND=true
elif [ -d "/home/cypress/cypress-validation-utility/.git" ]; then
echo "Found cypress validation utility install directory."
cd /home/cypress/cypress-validation-utility
export CVU_FOUND=true
else
echo "Could not find the Cypress Validation Utility. This warning can be safely ignored unless you had the Cypress Validation Utility installed."
export CVU_FOUND=false
fi
if [ "$CVU_FOUND" = "true" ]; then
# We have established the location of the cypress validation utility and should be in its directory, now we can upgrade it
echo "Fetching Cypress Validation Utility $VERSION"
pull_git_tag
echo "Running upgrade commands..."
upgrade_cvu
echo "Restarting Cypress Validation Utility service..."
systemctl restart cypress-validation-utility
fi
echo "Restarting NGINX service..."
systemctl restart nginx
echo "Done!"
|
# Define stage
set :stage, :staging
# Server info
server 'www.rgentpl.com', user: 'deploy', group: 'console', roles: %w(web app db)
set :server_name, 'www.rgentpl.com rgentpl.com'
# Deploy branch
set :branch, :develop
|
class Api
{
static const String TEST_LIST1 ="list1";
static const String TEST_LIST2 ="list2";
}
|
package com.kylins.obj.copy;
import android.text.TextUtils;
import com.kylins.obj.copy.annotation.DeepCopy;
import com.kylins.obj.copy.annotation.Ignore;
import com.kylins.obj.copy.annotation.Name;
import java.lang.reflect.Array;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.ParameterizedType;
import java.util.ArrayList;
import java.util.Collection;
/**
* Object Copy
* Created by zhouliangshun on 2016/8/4.
*/
public class ObjectCopy {
/**
* ObjectCopy,Only copy values
*
* @param targetClass target class
* @param source source the values from the instance
* @param <T> convert to the type
* @return Target class instance
*/
public static <T> T copy(Class<T> targetClass, Object source) {
return copy(newInstance(targetClass), source);
}
public static <T> T copy(Class<T> targetClass, Object source, Object... args) {
if (targetClass == null || source == null)
return null;
try {
Class[] argCls = new Class[args.length];
for (int i = 0; i < argCls.length; i++) {
argCls[i] = args[i].getClass();
}
return (T) copy(targetClass.getDeclaredConstructor(argCls).newInstance(), source);
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (NoSuchMethodException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
return null;
}
public static <T> T copy(T target, Object source) {
if (target == null || source == null)
return null;
try {
Class<T> targetClass = (Class<T>) target.getClass();
Field[] fields = targetClass.getDeclaredFields();
for (Field field : fields) {
String name = field.getName();
String sourceName = name;
if (field.getAnnotation(Ignore.class) != null)
continue;
Name changeName = field.getAnnotation(Name.class);
if (changeName != null) {
if (!TextUtils.isEmpty(changeName.name())) {
sourceName = changeName.name();
}
}
DeepCopy deepCopy = field.getAnnotation(DeepCopy.class);
if (deepCopy == null) {
setFieldValue(field, target, getFieldValue(sourceName, source));
} else {
if (!field.getType().isArray()) {
if (Collection.class.isAssignableFrom(field.getType())) {
setWithCollection(field, target, getFieldValue(sourceName, source));
} else {
setFieldValue(field, target, copy(field.getType(), getFieldValue(sourceName, source)));
}
} else {
setWithArray(field, target, getFieldValue(sourceName, source));
}
}
}
} catch (Exception e) {
e.printStackTrace();
}
return target;
}
public static Object getFieldValue(String field, Object object) {
try {
Field sourceField = object.getClass().getDeclaredField(field);
return getFieldValue(sourceField, object);
} catch (NoSuchFieldException e) {
e.printStackTrace();
}
return null;
}
public static Object getFieldValue(Field field, Object object) {
try {
field.setAccessible(true);
return field.get(object);
} catch (IllegalAccessException e) {
e.printStackTrace();
}
return null;
}
public static void setWithCollection(Field field, Object object, Object value) {
try {
if (value != null) {
Collection collection = (Collection) value;
ParameterizedType integerListType = (ParameterizedType) field.getGenericType();
Class<?> type = (Class<?>) integerListType.getActualTypeArguments()[0];
Collection newCollection = newInstance(ArrayList.class);
for (Object t : collection) {
newCollection.add(copy(type, t));
}
field.setAccessible(true);
field.set(object, newCollection);
}
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (Exception ex) {
ex.printStackTrace();
}
}
public static void setWithArray(Field field, Object object, Object value) {
try {
if (value != null) {
int leng = Array.getLength(value);
Object newArray = Array.newInstance(field.getType().getComponentType(), leng);
for (int i = 0; i < leng; i++) {
Object t = Array.get(value, i);
Array.set(newArray, i, copy(field.getType().getComponentType(), t));
}
field.setAccessible(true);
field.set(object, newArray);
}
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (Exception ex) {
ex.printStackTrace();
}
}
public static void setFieldValue(Field field, Object object, Object value) {
try {
if (value != null) {
field.setAccessible(true);
field.set(object, value);
}
} catch (IllegalAccessException e) {
e.printStackTrace();
}
}
public static Field getField(String field, Class cls) {
if (cls != null) {
try {
return cls.getDeclaredField(field);
} catch (NoSuchFieldException e) {
e.printStackTrace();
getField(field, cls.getSuperclass());
}
}
return null;
}
public static <T> T newInstance(Class<T> cls) {
Constructor constructors = null;
try {
constructors = cls.getConstructor();
return (T) constructors.newInstance();
} catch (NoSuchMethodException e) {
// e.printStackTrace();
try {
return UnsafeAllocator.create().newInstance(cls);
} catch (Exception e1) {
e1.printStackTrace();
}
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InstantiationException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
return null;
}
}
|
package com.dhorowitz.openmovie.discover
import com.dhorowitz.openmovie.discover.data.model.MovieDTO
import com.dhorowitz.openmovie.discover.domain.model.Movie
import com.dhorowitz.openmovie.discover.presentation.model.DiscoverViewEntity
internal fun movieDTO(
id: String = "id",
overview: String = "id",
title: String = "id",
voteAverage: Double = 0.0,
voteCount: Int = 0,
posterPath: String = "posterPath"
) = MovieDTO(
id,
overview,
title,
voteAverage,
voteCount,
posterPath
)
internal fun movie(
id: String = "id",
overview: String = "id",
title: String = "id",
voteAverage: Double = 0.0,
voteCount: Int = 0,
image: String = "https://image.tmdb.org/t/p/w500posterPath"
) = Movie(
id,
title,
overview,
image,
voteCount,
voteAverage
)
internal fun discoverViewEntity(
id: String = "id",
title: String = "id",
image: String = "https://image.tmdb.org/t/p/w500posterPath"
) = DiscoverViewEntity(id, title, image)
|
require "test_helper"
class RBS::FactoryTest < Minitest::Test
include TestHelper
def test_type_name
factory = RBS::Factory.new()
assert_equal type_name("Foo"), factory.type_name("Foo")
assert_equal type_name("::Foo::Bar"), factory.type_name("::Foo::Bar")
end
end
|
using egg.JsonBean;
using System;
namespace test {
class Program {
static void Main(string[] args) {
Console.WriteLine("Hello World!");
Classmate cm1;
Classmate cm2 = new Classmate();
Classmate cm3 = new Classmate();
using (Classmate nv = new Classmate()) {
nv.Name = "2-1";
People jim = new People();
jim.Name = "Jim";
jim.Age = 18;
jim.IsMale = true;
nv.Peoples.Add(jim);
nv.Peoples.Add(new People() {
Name = "Han Meimei",
Age = 17,
IsMale = false
});
Console.WriteLine(cm2.ToJson());
cm2.SetJson(nv.ToJson());
cm2.Name = "2-2";
cm2.Manager = new People();
Console.WriteLine(nv.ToJson());
Console.WriteLine(cm2.ToJson());
cm2.CloneTo(cm3);
cm3.Name = "2-3";
Console.WriteLine("------------");
Console.WriteLine(nv.ToJson());
Console.WriteLine(cm2.ToJson());
eggs.DebugLine("============begin=========");
Console.WriteLine(cm3.ToJson());
eggs.DebugLine("============end=========");
cm1 = (Classmate)nv.Clone();
}
cm3["Item"] = new JString("OK");
cm3.Rename = "3-3";
Classmate cm4 = new Classmate();
cm3.CloneTo(cm4);
cm4.Name = "2-4";
Console.WriteLine("------------");
Console.WriteLine(cm1.ToJson());
Console.WriteLine(cm2.ToJson());
Console.WriteLine(cm3.ToJson());
Console.WriteLine(cm4.ToJson());
var jim2 = new People();
jim2.CloneFrom((JObject)cm4.Peoples[1]);
Console.WriteLine(jim2.Name);
Classmate cm5 = (Classmate)Parser.Parse(cm4.ToJson(), typeof(Classmate));
cm5.Name = "2-5";
Console.WriteLine("------------");
Console.WriteLine(cm5.ToJson());
var cm5abc = cm5["abc"];
Console.WriteLine("cm5.abc=" + cm5abc.GetNumber());
string url = "https://www.amazon.com/";
string html = eggs.GetHttpText(url);
Console.WriteLine(html);
string ts = eggs.GetNow().ToString();
//egg.File.UTF8File.WriteAllText(it.GetWorkPath($"/html/{ts}.log"), html);
using (var doc = egg.Html.Parser.GetDocument(html)) {
var lis = doc.Body.GetElementsByClassName("a-spacing-micro");
foreach (var li in lis) {
Console.WriteLine(li.InnerHTML);
}
}
}
}
}
|
-- @testpoint: opengauss关键字serializable(非保留),作为同义词对象名,部分测试点合理报错
--前置条件
drop table if exists explain_test;
create table explain_test(id int,name varchar(10));
--关键字不带引号-成功
drop synonym if exists serializable;
create synonym serializable for explain_test;
insert into serializable values (1,'ada'),(2, 'bob');
update serializable set serializable.name='cici' where serializable.id=2;
select * from serializable;
drop synonym if exists serializable;
--关键字带双引号-成功
drop synonym if exists "serializable";
create synonym "serializable" for explain_test;
drop synonym if exists "serializable";
--关键字带单引号-合理报错
drop synonym if exists 'serializable';
create synonym 'serializable' for explain_test;
insert into 'serializable' values (1,'ada'),(2, 'bob');
update 'serializable' set 'serializable'.name='cici' where 'serializable'.id=2;
select * from 'serializable';
--关键字带反引号-合理报错
drop synonym if exists `serializable`;
create synonym `serializable` for explain_test;
insert into `serializable` values (1,'ada'),(2, 'bob');
update `serializable` set `serializable`.name='cici' where `serializable`.id=2;
select * from `serializable`;
drop table if exists explain_test;
|
package api
import (
"net/http"
"github.com/gorilla/mux"
"github.com/red010b37/bamboo/cmd/app/conf"
)
// InitMetaHandlers starts the meta api handlers
func InitMetaHandlers(r *mux.Router, prefix string) {
nameSpace := "meta"
metaErrorCodePath := RouteBuilder(prefix, nameSpace, "v1", "errorcodes")
OpenRouteHandler(metaErrorCodePath, r, metaErrorDisplayHandler())
metaCoinPath := RouteBuilder(prefix, nameSpace, "v1", "coins")
OpenRouteHandler(metaCoinPath, r, coinMetaHandler())
}
// metaErrorDisplayHandler displays all the application errors to frontend
func coinMetaHandler() http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
appResp := Response{}
appResp.Data = conf.AppConf.Coins
appResp.Send(w)
})
}
// metaErrorDisplayHandler displays all the application errors to frontend
func metaErrorDisplayHandler() http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
appResp := Response{}
appResp.Data = AppRespErrors
appResp.Send(w)
})
}
|
package de.htwdd.htwdresden.utils.holders
import android.content.Context
import android.content.SharedPreferences
import android.util.Base64
import androidx.core.content.edit
import androidx.security.crypto.EncryptedSharedPreferences
import androidx.security.crypto.MasterKeys
import de.htwdd.htwdresden.utils.extensions.guard
import io.reactivex.subjects.BehaviorSubject
import java.nio.charset.Charset
class CryptoSharedPreferencesHolder private constructor() {
private object Holder { val INSTANCE = CryptoSharedPreferencesHolder() }
private lateinit var masterKeyAlias: String
private lateinit var sharedPreferences: SharedPreferences
sealed class SubscribeType {
object StudyToken: SubscribeType()
object AuthToken: SubscribeType()
object Crashlytics: SubscribeType()
}
companion object {
private val subject = BehaviorSubject.create<SubscribeType>()
val instance: CryptoSharedPreferencesHolder by lazy { Holder.INSTANCE }
fun init(context: Context) {
instance.masterKeyAlias = MasterKeys.getOrCreate(MasterKeys.AES256_GCM_SPEC)
instance.sharedPreferences = EncryptedSharedPreferences.create(
"htw_encrypted_shared_prefs",
instance.masterKeyAlias,
context,
EncryptedSharedPreferences.PrefKeyEncryptionScheme.AES256_SIV,
EncryptedSharedPreferences.PrefValueEncryptionScheme.AES256_GCM)
}
private const val STUDY_TOKEN = "STUDY_TOKEN"
private const val AUTH_TOKEN = "AUTH_TOKEN"
private const val IS_FIRST_RUN = "IS_FIRST_RUN"
private const val HAS_CRASHLYTICS = "HAS_CRASHLYTICS"
}
fun putStudyToken(studyToken: String) {
sharedPreferences.edit {
subject.onNext(SubscribeType.StudyToken)
putString(STUDY_TOKEN, studyToken)
}
}
fun getStudyToken() = sharedPreferences.getString(STUDY_TOKEN, "")
fun getStudyAuth() = readAuthToken(getStudyToken())
fun putAuthToken(authToken: String) {
sharedPreferences.edit {
subject.onNext(SubscribeType.AuthToken)
putString(AUTH_TOKEN, authToken)
}
}
fun getAuthToken() = sharedPreferences.getString(AUTH_TOKEN, "")
fun setOnboarding(isNeeded: Boolean) = sharedPreferences.edit { putBoolean(IS_FIRST_RUN, isNeeded) }
fun needsOnboarding() = sharedPreferences.getBoolean(IS_FIRST_RUN, true)
fun hasCrashlytics() = sharedPreferences.getBoolean(HAS_CRASHLYTICS, false)
fun setCrashlytics(active: Boolean) {
sharedPreferences.edit {
subject.onNext(SubscribeType.Crashlytics)
putBoolean(HAS_CRASHLYTICS, active)
}
}
fun onChanged() = subject
fun clear() {
sharedPreferences.edit {
remove(STUDY_TOKEN)
subject.onNext(SubscribeType.StudyToken)
remove(AUTH_TOKEN)
subject.onNext(SubscribeType.AuthToken)
remove(HAS_CRASHLYTICS)
subject.onNext(SubscribeType.Crashlytics)
remove(IS_FIRST_RUN)
}
}
private fun readAuthToken(token: String?): StudyAuth? {
token.guard { return null }
val rawToken = String(Base64.decode(token, Base64.DEFAULT), Charset.forName("UTF-8"))
val chunks = rawToken.split(":")
if (chunks.size < 4) { return null }
return StudyAuth(
chunks[0],
chunks[1],
chunks[2],
chunks[3].first().toString()
)
}
//---------------------------------------------------------------------------------------------- Data Class
data class StudyAuth(val studyYear: String, val major: String, val group: String, val graduation: String)
}
|
---
templateKey: blog-post
featuredpost: false
date: 2020-03-01T19:23:23.711Z
featuredimage: /img/quest_bg5.png
imgBg: quest_bg5
title: Jodi's Request Quest
description: Jodi needs a fresh cauliflower for a recipe she's making. She's asking you to bring her one.
reward: 350 & 1 Heart Jodi
tags:
- Mail
- spring
- Spring 19
- Jodi
- Cauliflower
- quest
---
|
# frozen_string_literal: true
require_relative '../../../test_helper'
SingleCov.covered!
# needs Integration at the end for minitest-spec-rails
describe 'Warden::Strategies::BasicStrategy Integration' do
def perform_get(authorization)
get "/", headers: {HTTP_AUTHORIZATION: authorization}
end
before do
# UI wants to show github status
stub_request(:get, "#{Rails.application.config.samson.github.status_url}/api/status.json").to_timeout
end
let!(:user) { users(:admin) }
let(:valid_header) { "Basic #{Base64.encode64(user.email + ':' + user.token).strip}" }
it "logs the user in" do
perform_get valid_header
assert_response :success
end
it "does not set a session since basic auth requests are not suppoed log in a browser" do
perform_get valid_header
response.headers['Set-Cookie'].must_be_nil
end
it "does not check and fails without header" do
assert_sql_queries(0) { perform_get nil }
assert_response :redirect
end
it "checks and fails with invalid header" do
assert_sql_queries(1) { perform_get(valid_header + Base64.encode64('foo')) }
assert_response :redirect
end
it "does not check and fails with non matching header" do
assert_sql_queries(0) { perform_get "oops" + valid_header }
assert_response :redirect
end
end
|
package de.qaware.refactobot.extractor.java.visitors
import com.github.javaparser.ast.CompilationUnit
import com.github.javaparser.ast.body.ConstructorDeclaration
import de.qaware.refactobot.extractor.java.JavaSimpleTypeReference
import de.qaware.refactobot.extractor.java.ReferenceExtractionContext
import de.qaware.refactobot.extractor.java.UnitReferenceVisitor
/**
* Visitor for constructor declarations. Names of constructors must be adapted when the class is renamed.
*
* @author Alexander Krauss alexander.krauss@qaware.de
*/
class ConstructorDeclarationVisitor(context: ReferenceExtractionContext) : UnitReferenceVisitor(context) {
override fun visit(decl: ConstructorDeclaration?, arg: Unit) {
if (decl != null) {
if (decl.parentNode.get().parentNode.get() is CompilationUnit) {
// this is a constructor of the top-level type
emit(JavaSimpleTypeReference(context.getCurrentFile(), context.getCurrentFile(), decl.name.toSpan()))
}
}
super.visit(decl, arg)
}
}
|
# payment_sample
A sample application to learn how to develop a payment feature.
## Note
This sample refers to the following book.
[Take My Money](https://pragprog.com/book/nrwebpay/take-my-money)
|
pub mod mpv {
extern crate execute;
use crate::api_structs::VolumeControl;
use crate::settings;
use std::io::prelude::*;
use std::os::unix::net::UnixStream;
use std::process::Command;
use serde_json::json;
use serde::{Serialize, Deserialize};
fn send_command(command: serde_json::Value) -> serde_json::Value {
return serde_json::from_str(write_to_socket(command.to_string() + "\n").unwrap().as_str()).unwrap();
}
pub fn event_resume() -> Property {
let command = json!({ "command": ["set_property", String::from("pause"), false] });
let result = send_command(command);
let me = Property {
error : result["error"].to_string().replace("\"", ""),
data : result["data"].to_string()
};
return me;
}
pub fn event_load(target: &str, mode: &str) -> Property {
let command = json!({ "command": ["loadfile", format!("{}",target), mode] });
let result = send_command(command);
let me = Property {
error : String::from("success"),
data : result["event"].to_string()
};
return me;
}
pub fn event_pause() -> Property {
let command = json!({ "command": ["set_property", String::from("pause"), true] });
let result = send_command(command);
let me = Property {
error : result["error"].to_string().replace("\"", ""),
data : result["data"].to_string()
};
return me;
}
fn update_video_status() {
use mpv_webrpc::models::NewVideoStatus;
let path = event_property("path".to_string(), None);
if path.error == String::from("success") {
let time_json:String = event_property("time-pos".to_string(), None).data;
let path_json:String = event_property("path".to_string(), None).data;
// serde json supports only f64 - diesel supports only f32 for fields - *sigh*
let time : f64= time_json.parse().unwrap();
let convert = time as f32;
let video_status = NewVideoStatus {
path: &path_json.replace("\"", ""),
time: &convert,
};
video_status.upsert();
}
}
pub fn event_stop() -> Property {
update_video_status();
// Show the next playlist item (the backdrop image) instead of stopping
let command = json!({ "command": ["playlist-next"] });
let result = send_command(command);
let me = Property {
error : String::from("success"),
data : result["event"].to_string()
};
return me;
}
pub fn event_volume() -> Property {
event_property(String::from("volume"), None)
}
pub fn event_volume_change(volume_control: VolumeControl) -> Property {
event_property(String::from("volume"), Some(volume_control.value))
}
pub fn event_property(property: String, value:Option<String>) -> Property {
let command = match value {
None => {
json!({ "command": ["get_property", property] })
},
Some(value) => {
json!({ "command": ["set_property", property, value] })
},
};
let result = send_command(command);
let me = Property {
error : result["error"].to_string().replace("\"", ""),
data : result["data"].to_string()
};
return me;
}
#[derive( Debug, Serialize, Deserialize)]
pub struct Property {
pub error : String,
pub data : String
}
pub fn init() {
let settings = settings::init();
let title = std::env::var("TITLE").unwrap_or("MediaMate Player".to_string());
let mut mpv = Command::new("mpv");
let ipc_param = format!("--input-ipc-server={}", settings.socket);
println!("Starting parameter for mpv: {}", ipc_param);
mpv.arg("--idle=yes")
.arg("--title=".to_owned() + &title)
.arg(ipc_param)
.arg("--hwdec=mmal-copy")
.arg("--fullscreen")
.arg("--vo=gpu")
.arg("--keep-open")
.arg("--image-display-duration=inf")
.spawn()
.expect("OK");
}
pub fn write_to_socket(content: String) -> std::io::Result<String> {
let settings = settings::init();
let socket = settings.socket;
let mut stream = match UnixStream::connect(&socket) {
Err(e) => panic!("could not connect to socket {} - {}", e, &socket),
Ok(stream) => stream,
};
stream.write_all(&content.as_bytes())?;
let mut buf = [0; 1024];
let count = stream.read(&mut buf).unwrap();
let response = String::from_utf8(buf[..count].to_vec()).unwrap();
Ok(response)
}
}
|
"""Sorts the magic architecture candidate performance.
Parameters
----------
N : int
Cluster size.
tech : float
Technology node (16, 7, 5, 4, 3.0, 3.1).
3.0 corresponds to F3a in the paper and 3.1 to F3b.
log_dir : str
Directory holding the run logs.
arc_dir : str
Directory holding the architecture descriptions.
out_file : str
Specifies where to write the results.
sort_key : str
Specifies the sorting key: {delay, area, apd}.
ignore_circs : Optiona[str], ''
A space-separated list of circuits to ignore in sorting.
If the first word is ~, the list will be inverted.
get_median_dict Optional[bool], default = False
Instead of a single number, print the dictionary of median delays for each circuit.
Returns
-------
None
"""
import os
import argparse
import sys
sys.path.insert(0,'..')
from conf import *
parser = argparse.ArgumentParser()
parser.add_argument("--N")
parser.add_argument("--tech")
parser.add_argument("--log_dir")
parser.add_argument("--arc_dir")
parser.add_argument("--out_file")
parser.add_argument("--sort_key")
parser.add_argument("--ignore_circs")
parser.add_argument("--get_median_dict")
args = parser.parse_args()
get_tech = lambda f : f.split('_')[1][1:]
get_N = lambda f : int(f.split('_')[2][1:])
get_wire = lambda f : f.split('_')[3][1:]
get_circ = lambda f : f.split('_')[-2]
get_seed = lambda f : f.split('_')[-1].rsplit('.', 1)[0]
get_arc = lambda f : '_'.join(f.split('_')[:4])
ignore_circs = []
try:
ignore_circs = list(args.ignore_circs.split())
except:
pass
INVERT_IGNORE = False
try:
if ignore_circs[0] == "~":
INVERT_IGNORE = True
except:
pass
GET_MEDIAN_DICT = False
try:
GET_MEDIAN_DICT = int(args.get_median_dict)
except:
pass
seed_no = len(seeds)
circ_no = len(ignore_circs) - 1 if INVERT_IGNORE else len(grid_sizes[8]) - len(ignore_circs)
res_dict = {}
for f in os.listdir(args.log_dir):
try:
if int(args.N) != get_N(f):
continue
except:
continue
if args.tech != get_tech(f):
continue
arc = get_arc(f)
circ = get_circ(f)
if (INVERT_IGNORE and not circ in ignore_circs) or (not INVERT_IGNORE and circ in ignore_circs):
continue
try:
with open("%s/%s" % (args.log_dir, f), "r") as inf:
td = float(inf.read().strip())
except:
continue
try:
res_dict[arc][circ].append(td)
except:
try:
res_dict[arc].update({circ : [td]})
except:
res_dict.update({arc : {circ : [td]}})
removal_list = set()
processed_res_dict = {}
for arc in res_dict:
if len(res_dict[arc]) < circ_no:
continue
skip = False
geom_td = 1.0
for circ in res_dict[arc]:
if len(res_dict[arc][circ]) < seed_no:
removal_list.add(arc)
skip = True
break
median = sorted(res_dict[arc][circ])[seed_no / 2]
geom_td *= median
if GET_MEDIAN_DICT:
res_dict[arc][circ] = median
if skip:
continue
geom_td **= (1.0 / circ_no)
dim = grid_sizes[get_N(arc)][circ]
try:
with open("%s/%s_W%d_H%d_padding.log" % (args.arc_dir, arc, dim, dim), "r") as inf:
lines = inf.readlines()
except:
continue
for line in lines:
if line.startswith("Active dimensions:"):
wa = int(line.split()[-4])
ha = int(line.split()[-2])
elif line.startswith("Metal dimensions:"):
wm = int(line.split()[-4])
hm = int(line.split()[-2])
break
a = max(wa, wm) * max(ha, hm) / 1000000.0
tech = get_tech(arc)
try:
processed_res_dict[tech].append((arc, geom_td, a, geom_td * a))
except:
processed_res_dict.update({tech : [(arc, geom_td, a, geom_td * a)]})
##########################################################################
def log_sort(res_dict):
"""Logs the sorting results.
Parameters
----------
res_dict : Dict[str, list]
A dictionary of performance results per technology.
Returns
-------
None
"""
txt = "#arc delay[ns] area[um2] apd[nsum2]\n"
for line in res_dict[tech]:
for entry in line:
txt += str(entry) + ' '
txt += "\n"
with open(args.out_file, "w") as outf:
outf.write(txt[:-1])
##########################################################################
if GET_MEDIAN_DICT:
print(res_dict)
sort_keys = ["delay", "area", "apd"]
processed_res_dict[args.tech].sort(key = lambda arc : [arc[1 + sort_keys.index(args.sort_key)], arc])
log_sort(processed_res_dict)
|
require 'spec_helper'
describe API do
describe "HTTP errors" do
it "must raise exceptions" do
API::ERRORS.each_pair do |code, exception|
stub_api_request(:any, 'endpoint') { "HTTP/1.1 #{code}\n" }
proc { API.get 'endpoint' }.must_raise exception
end
end
end
end
|
package main
import (
"github.com/liamg/tml"
)
func main() {
tml.Printf(
"<red>%s</red> <yellow>%s</yellow> <green><bold>%s</bold></green>\n",
"ERROR",
"WARNING",
"SUCCESS",
)
}
|
# Categories
categories = [
'Community',
'Funding',
'Education & Training',
'Incubation/Acceleration',
'R & D',
'Services',
'Space',
'Startup',
]
Category.create(categories.map{ |c| { name: c } })
# Stages
stages = [
'Startup',
'Idea',
'Growth',
'Mature',
]
Stage.create(stages.map{ |s| { name: s } })
|
<div class="alert alert-danger" style="background-color: {!! $color !!}; padding-left: 100px;border: black 1px solid">
<div>title:{!! $title !!}</div>
<div>myglobal:{!! $myglobal !!}</div>
{!! $slot !!}
<div>end title:{!! $title !!}</div>
</div>
<br>
|
package basicauth
import (
"encoding/base64"
"net/http"
"strings"
"github.com/gobuffalo/buffalo"
"github.com/pkg/errors"
)
var (
// ErrNoCreds is returned when no basic auth credentials are defined
ErrNoCreds = errors.New("no basic auth credentials defined")
// ErrAuthFail is returned when the client fails basic authentication
ErrAuthFail = errors.New("invalid basic auth username or password")
// ErrUnauthorized is returned in any case the basic authentication fails
// ErrUnauthorized is returned when basic authentication failed
ErrUnauthorized = errors.New("Unauthorized")
)
// Authorizer is used to authenticate the basic auth username/password.
// Should return true/false and/or an error.
type Authorizer func(buffalo.Context, string, string) (bool, error)
// Middleware enables basic authentication
func Middleware(auth Authorizer) buffalo.MiddlewareFunc {
return func(next buffalo.Handler) buffalo.Handler {
return func(c buffalo.Context) error {
token := strings.SplitN(c.Request().Header.Get("Authorization"), " ", 2)
if len(token) != 2 {
c.Response().Header().Set("WWW-Authenticate", `Basic realm="Basic Authentication"`)
return c.Error(http.StatusUnauthorized, ErrUnauthorized)
}
b, err := base64.StdEncoding.DecodeString(token[1])
if err != nil {
return c.Error(http.StatusUnauthorized, ErrUnauthorized)
}
pair := strings.SplitN(string(b), ":", 2)
if len(pair) != 2 {
return c.Error(http.StatusUnauthorized, ErrUnauthorized)
}
success, err := auth(c, pair[0], pair[1])
if err != nil {
return errors.WithStack(err)
}
if !success {
return c.Error(http.StatusUnauthorized, ErrUnauthorized)
}
return next(c)
}
}
}
|
"use strict";
function mod() {
const data = babelHelpers.interopRequireDefault(require("mod"));
mod = function () {
return data;
};
return data;
}
mod().named;
mod();
|
#!/bin/sh
npm install --save-dev \
rollup @rollup/plugin-node-resolve \
rollup-plugin-jsy rollup-plugin-dgnotify \
qsrv
echo "You may now remove '$0'"
|
module Mumukit
module WithCommandLine
SIGINT = 2
SIGSEGV = 11
SIGXCPU = 24
def command_size_limit
Mumukit.config.command_size_limit
end
def command_time_limit
Mumukit.config.command_time_limit
end
def limit_command
Mumukit.config.limit_script
end
def run_command(command)
out = %x{#{limit_command} #{command_size_limit} #{command_time_limit} $(cat <<EOLIMIT
#{command}
EOLIMIT
)}
case $?.exitstatus
when 0 then [out, :passed]
when signal_status(SIGINT) then [I18n.t('mumukit.memory_exceeded'), :aborted]
when signal_status(SIGSEGV) then [I18n.t('mumukit.memory_exceeded'), :aborted]
when signal_status(SIGXCPU) then [I18n.t('mumukit.time_exceeded', limit: command_time_limit), :aborted]
else [out, :failed]
end
end
def signal_status(signal)
# see http://tldp.org/LDP/abs/html/exitcodes.html
128 + signal
end
end
end
|
@extends('layouts.profile')
@section('title', 'View Recruiter Profile')
@section('content')
<h1><?php echo $profile[0]->first_name.' '.$profile[0]->last_name ?></h1>
<div>
<div>Email</div>
</div>
<div>
<div><?php echo $profile[0]->email ?></div>
</div>
<div>
<div>Phone</div>
</div>
<div>
<div><?php echo $profile[0]->phone ?></div>
</div>
<div>
<div>Company</div>
</div>
<div>
<div><?php echo $profile[0]->company ?></div>
</div>
<div>
<div>Street Address</div>
</div>
<div>
<div><?php echo $profile[0]->street_address1 ?></div>
</div>
<div>
<div>Street Address 2</div>
</div>
<div>
<div><?php echo $profile[0]->street_address2 ?></div>
</div>
<div>
<div>City</div>
</div>
<div>
<div><?php echo $profile[0]->city ?></div>
</div>
<div>
<div>State</div>
</div>
<div>
<div><?php echo $profile[0]->state ?></div>
</div>
<div>
<div>Zip Code</div>
</div>
<div>
<div><?php echo $profile[0]->zip ?></div>
</div>
@endsection
|
#region BSD License
/*
* Use of this source code is governed by a BSD-style
* license that can be found in the LICENSE.md file or at
* https://github.com/Wagnerp/Krypton-Toolkit-Suite-Extended-NET-5.471/blob/master/LICENSE
*
*/
#endregion
namespace Core.VersionFramework.Classes
{
public class AssemblyInformation
{
#region Variables
private string _name, _version, _fullName;
#endregion
#region Properties
public string Name { get { return _name; } set { _name = value; } }
public string Version { get { return _version; } set { _version = value; } }
public string FullName { get { return _fullName; } set { _fullName = value; } }
#endregion
}
}
|
import React from 'react';
import classnames from 'classnames';
import numeral from 'numeral';
import { Typography } from '@material-ui/core';
import useTranslation from 'next-translate/useTranslation';
import {
AreaChart,
Area,
XAxis,
YAxis,
CartesianGrid,
Tooltip,
ResponsiveContainer,
} from 'recharts';
import {
Box, CustomToolTip,
} from '@components';
import dayjs, { formatDayJs } from '@utils/dayjs';
import { useRecoilValue } from 'recoil';
import { readDate } from '@recoil/settings';
import { useStyles } from './styles';
import { usePrice } from './hooks';
const Price: React.FC<ComponentDefault> = (props) => {
const {
classes, theme,
} = useStyles();
const { t } = useTranslation('home');
const {
state,
tickPriceFormatter,
formatTime,
} = usePrice();
const dateFormat = useRecoilValue(readDate);
const formatItems = state.items.map((x) => {
return ({
time: formatTime(dayjs.utc(x.time), dateFormat),
fullTime: formatDayJs(dayjs.utc(x.time), dateFormat),
value: x.value,
});
});
return (
<Box className={classnames(props.className, classes.root)}>
<Typography variant="h2" className={classes.label}>
{t('priceHistory')}
</Typography>
<div className={classes.chart}>
<ResponsiveContainer width="99%">
<AreaChart
data={formatItems}
margin={{
top: 20,
right: 30,
left: 0,
bottom: 0,
}}
>
<defs>
<linearGradient id="colorUv" x1="0" y1="0" x2="0" y2="1">
<stop
offset="5%"
stopColor={theme.palette.custom.primaryData.one}
stopOpacity={0.8}
/>
<stop
offset="95%"
stopColor={theme.palette.custom.primaryData.one}
stopOpacity={0.1}
/>
</linearGradient>
</defs>
<CartesianGrid stroke={theme.palette.divider} />
<XAxis
dataKey="time"
tickLine={false}
/>
<YAxis
tickLine={false}
tickFormatter={tickPriceFormatter}
tickCount={9}
// domain={[0, 'dataMax + 40']}
domain={['dataMin - 10', 'dataMax + 10']}
/>
<Tooltip
cursor={false}
content={(
<CustomToolTip>
{(x) => {
return (
<>
<Typography variant="caption">
{x.fullTime}
</Typography>
<Typography variant="body1">
$
{numeral(x.value).format('0,0.00')}
</Typography>
</>
);
}}
</CustomToolTip>
)}
/>
<Area
type="monotone"
dataKey="value"
stroke={theme.palette.custom.primaryData.one}
// fill={Color(theme.palette.custom.primaryData.one).alpha(0.7).toString()}
fillOpacity={1}
fill="url(#colorUv)"
/>
</AreaChart>
</ResponsiveContainer>
</div>
</Box>
);
};
export default Price;
|
YTSimple
========
A plain text API that will return the most popular video ID for a search.
Includes Redis caching.
## Configuration:
This app must be configured before it can be used.
**config.json**
```
{
"redis": {
"host": "127.0.0.1", // Redis hostname
"port": 6379, // Redis port
"pass": "" // Redis password
},
"port": 8080, // Application port (where it will bind to)
"addr": "0.0.0.0", // Application address (where it will bind to)
"api": {
"key": "ytAPIKeyGoesHere" // Youtube API key: https://console.developers.google.com/
}
}
```
|
#!/usr/bin/env bash
FILE=~/.android/debug.keystore
#######################################
for arg in "$@"; do
case $arg in
--file=*)
FILE=${arg#*=}
shift
;;
esac
done
#######################################
if [ ! -f $FILE ]; then
>&2 echo "File not found"
fi
keytool -list -v -keystore $FILE -alias androiddebugkey -storepass android -keypass android
|
import { LinkButtonData } from "./linkButotndata.models";
import { Link } from "./style";
function LinkButton({ tolink = "/", value = "value" }: LinkButtonData) {
return <Link to={tolink}>{value}</Link>;
}
export default LinkButton;
|
---
layout: post
microblog: true
audio:
photo:
date: 2010-01-13 18:00:00 -0600
guid: http://craigmcclellan.micro.blog/2010/01/14/t7757368368.html
---
Perfect time to die iPhone, right before I go out of town for 4 days. Pray that I get it going later.
|
# `@telecraft/parser`
Default parser for the Telecraft project. Supports Vanilla 1.16.2 and PaperMC 1.16.2.
|
package v1alpha1
import (
"harmonycloud.cn/stellaris/pkg/apis/multicluster/common"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
)
// +genclient
// +genclient:nonNamespaced
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
// +kubebuilder:resource:scope="Cluster"
// +kubebuilder:subresource:status
type Cluster struct {
metav1.TypeMeta `json:",inline"`
metav1.ObjectMeta `json:"metadata,omitempty"`
Spec ClusterSpec `json:"spec,omitempty"`
Status ClusterStatus `json:"status,omitempty"`
}
type ClusterSpec struct {
Addons []ClusterAddons `json:"addons,omitempty"`
}
type ClusterStatus struct {
Conditions []common.Condition `json:"conditions,omitempty"`
LastReceiveHeartBeatTimestamp metav1.Time `json:"lastReceiveHeartBeatTimestamp,omitempty"`
LastUpdateTimestamp metav1.Time `json:"lastUpdateTimestamp,omitempty"`
Healthy bool `json:"healthy,omitempty"`
Status ClusterStatusType `json:"status,omitempty"`
}
type ClusterAddons struct {
Name string `json:"name"`
// +kubebuilder:pruning:PreserveUnknownFields
Info *runtime.RawExtension `json:"info"`
}
type ClusterStatusType string
const (
OnlineStatus ClusterStatusType = "online"
OfflineStatus ClusterStatusType = "offline"
)
// +genclient:nonNamespaced
// +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object
type ClusterList struct {
metav1.TypeMeta `json:",inline"`
metav1.ListMeta `json:"metadata"`
Items []Cluster `json:"items"`
}
|
import Vue from 'vue'
import Vuex from 'vuex'
import Metadata from 'src/store/metadata'
import * as actions from './actions'
import * as getters from './getters'
import * as mutations from './mutations'
let Config = require('../config/index')
const debug = process.env.NODE_ENV !== 'production'
if (debug) Vue.use(Vuex)
var nodeInfo = {}
const state = {
app: {
userInfo: {}
},
setting: Object.assign({
demoMode: false,
phoneSize: {
name: 'Mobile S',
width: '320px',
height: '500px'
},
open: true, // 网格开关
line: true, // 智能参考线显示
color: '#409EFF', // 参考线颜色
phoneline: true, // 手机框
sorb: true // 是否拖拽吸附
}, (() => {
let editorSetting = {}
try {
editorSetting = JSON.parse(window.localStorage.getItem('EditorSetting')) || {}
} catch (error) {}
delete editorSetting.phoneSize
delete editorSetting.demoMode
return editorSetting
})()),
RootNodeInfo: nodeInfo,
componentList: {},
componentMap: {},
pageInfo: {
lastPage: '',
currentPage: ''
},
DataHub: {},
Config,
Metadata,
viewOption: {
currentLayout: (() => window.localStorage.getItem('dockLayout_type') || 'default')(),
pageType: 0,
layouts: [
{
type: 'default',
name: '默认布局'
},
{
type: 'desktop',
name: 'PC 布局'
},
{
type: 'flutter',
name: 'flutter布局'
},
{
type: 'develop',
name: '开发者布局'
},
{
type: 'custom',
name: '自定义'
}
],
widgets: {
'widgetComponent': '组件列表',
'widgetCombinedComponents': '组合组件',
'widgetComponentTree': '组件树',
'widgetPageTemplate': '页面模板',
'widgetScene': '场景',
'widgetComponentInfo': '组件详情',
'widgetPageInfo': '页面设置',
'widgetAttribute': '属性',
'widgetStyleEditor': '样式',
'widgetScriptEditor': '脚本',
'widgetAnimate': '动画',
'widgetCodePanel': '脚本编辑',
'widgetShop': '商城'
}
}
}
export default new Vuex.Store({
strict: debug,
state,
actions,
getters,
mutations
})
|
#!/usr/bin/env ruby
require 'yard'
require 'json'
require 'optparse'
require 'stringio'
require 'active_support/core_ext/object/blank'
# Parses YARD output for AnalyticsEvents methods
class AnalyticsEventsDocumenter
DEFAULT_DATABASE_PATH = '.yardoc'
PREVIOUS_EVENT_NAME_TAG = :'identity.idp.previous_event_name'
DOCUMENTATION_OPTIONAL_PARAMS = %w[
pii_like_keypaths
]
attr_reader :database_path
# @return [(String, Integer)] returns a tuple of (output, exit_status)
def self.run(argv)
exit_status = 0
output = StringIO.new
check = false
json = false
help = false
parser = OptionParser.new do |opts|
opts.on('--check', 'Checks that all params are documented, will exit 1 if missing') do
check = true
end
opts.on('--json') do
json = true
end
opts.on('--help', 'print this help message') do
help = true
end
end
parser.parse!(argv)
documenter = new(argv.first)
if help || (!check && !json)
output.puts parser
elsif check
missing_documentation = documenter.missing_documentation
if missing_documentation.present?
output.puts missing_documentation
exit_status = 1
else
output.puts 'All AnalyticsEvents methods are documented! 🚀'
end
elsif json
output.puts JSON.pretty_generate(documenter.as_json)
end
[ output.string.presence, exit_status ]
end
def initialize(database_path)
@database_path = database_path || DEFAULT_DATABASE_PATH
end
# Checks for params that are missing documentation, and returns a list of
# @return [Array<String>]
def missing_documentation
analytics_methods.flat_map do |method_object|
param_names = method_object.parameters.map { |p| p.first.chomp(':') }
documented_params = method_object.tags('param').map(&:name)
missing_attributes = param_names - documented_params - DOCUMENTATION_OPTIONAL_PARAMS
error_prefix = "#{method_object.file}:#{method_object.line} #{method_object.name}"
errors = []
if !extract_event_name(method_object)
errors << "#{error_prefix} event name not detected in track_event"
end
missing_attributes.each do |attribute|
next if attribute.start_with?('**')
errors << "#{error_prefix} #{attribute} (undocumented)"
end
if param_names.size > 0 && !param_names.last.start_with?('**')
errors << "#{error_prefix} missing **extra"
end
if method_object.signature.end_with?('*)')
errors << "#{error_prefix} don't use * as an argument, remove all args or name args"
end
errors
end
end
# @return [{ events: Array<Hash>}]
def as_json
events_json_summary = analytics_methods.map do |method_object|
attributes = method_object.tags('param').map do |tag|
{
name: tag.name,
types: tag.types,
description: tag.text.presence,
}
end.compact
{
event_name: extract_event_name(method_object),
previous_event_names: method_object.tags(PREVIOUS_EVENT_NAME_TAG).map(&:text),
description: method_object.docstring.presence,
attributes: attributes,
}
end
{ events: events_json_summary }
end
private
# Naive attempt to pull tracked event string from source code
def extract_event_name(method_object)
m = /track_event\(\s*["'](?<event_name>[^"']+)["']/.match(method_object.source)
m && m[:event_name]
end
def database
@database ||= YARD::Serializers::YardocSerializer.new(database_path).deserialize('root')
end
# @return [Array<YARD::CodeObjects::MethodObject>]
def analytics_methods
database.select do |_k, object|
object.type == :method && object.namespace&.name == :AnalyticsEvents
end.values
end
end
# rubocop:disable Rails/Output
# rubocop:disable Rails/Exit
if $PROGRAM_NAME == __FILE__
output, status = AnalyticsEventsDocumenter.run(ARGV)
puts output
exit status
end
# rubocop:enable Rails/Exit
# rubocop:enable Rails/Output
|
<?php
use Illuminate\Support\Facades\Route;
/*
|--------------------------------------------------------------------------
| Web Routes
|--------------------------------------------------------------------------
|
| Here is where you can register web routes for your application. These
| routes are loaded by the RouteServiceProvider within a group which
| contains the "web" middleware group. Now create something great!
|
*/
Route::group(['middleware'=>'mid_siswa'],function(){
Route::group(['prefix'=>'siswa'],function(){
Route::group(['prefix'=>'dashboard'],function(){
Route::get('/','Siswa\DashboardController@main')->name('dashboard_siswa');
});
Route::group(['prefix'=>'rapor_akhir'],function(){
Route::get('/','Siswa\RaporController@main')->name('rapor_akhir');
Route::post('/data','Siswa\RaporController@data')->name('data_rapor_akhir');
});
Route::group(['prefix'=>'nilai'],function(){
Route::get('/','Siswa\IsianController@main')->name('nilai_siswa');
Route::get('/main/{id_mengajar}','Siswa\IsianController@mengajar')->name('mengajar_nilai_siswa');
Route::post('/get_pages','Siswa\IsianController@get_pages')->name('get_pages_siswa');
Route::post('/pages1','Siswa\IsianController@pages1')->name('pages1_siswa');
Route::post('/pages2','Siswa\IsianController@pages2')->name('pages2_siswa');
Route::post('/simpan11','Siswa\IsianController@simpan11')->name('simpan_nilai_siswa_11');
Route::post('/simpankd','Siswa\IsianController@simpankd')->name('simpan_nilai_siswa_kd');
Route::post('/simpansd','Siswa\IsianController@simpansd')->name('simpan_nilai_siswa_sd');
});
});
});
|
import { Location } from 'history'
import React, { useCallback, useContext } from 'react'
import { useMutation } from '@apollo/client'
import { useModal } from 'react-modal-hook'
import { Link, useHistory, useLocation } from 'react-router-dom'
import ConfirmDialog from '../../components/ConfirmDialog'
import DropdownMenu from '../../components/DropdownMenu'
import Kbd from '../../components/Kbd'
import LinkIcon from '../../components/LinkIcon'
import { LocalConfigurationContext, SortBy, SortOrder } from '../../context/LocalConfigurationContext'
import { MessageContext } from '../../context/MessageContext'
import { getGQLError } from '../../helpers'
import { GetArticlesRequest, MarkAllArticlesAsReadRequest, MarkAllArticlesAsReadResponse } from '../models'
import { MarkAllArticlesAsRead } from '../queries'
import { updateCacheAfterMarkAllAsRead } from '../cache'
type Variant = 'unread' | 'history' | 'starred' | 'offline'
interface Props {
refresh: () => void
req: GetArticlesRequest
variant: Variant
}
function revertSortOrder(order: SortOrder | null) {
return order === 'asc' ? 'desc' : 'asc'
}
function revertSortBy(by: SortBy | null) {
return by === 'key' ? 'stars' : 'key'
}
function revertStatus(status: string | null) {
return status === 'unread' ? 'read' : 'unread'
}
function getSortByMessage(req: GetArticlesRequest) {
return req.sortBy === 'stars' ? 'Sort by date' : 'Sort by stars'
}
function getSortOrderMessage(req: GetArticlesRequest) {
if (req.sortBy === 'stars') {
return req.sortOrder === 'asc' ? 'More stars first' : 'Less stars first'
}
return req.sortOrder === 'asc' ? 'Recent articles first' : 'Older articles first'
}
function getLocationWithSortByParam(loc: Location, by: SortBy) {
const params = new URLSearchParams(loc.search)
params.set('by', by)
return { ...loc, search: params.toString() }
}
function getLocationWithSortOrderParam(loc: Location, order: SortOrder) {
const params = new URLSearchParams(loc.search)
params.set('sort', order)
return { ...loc, search: params.toString() }
}
function getLocationWithStatusParam(loc: Location, status: 'read' | 'unread') {
const params = new URLSearchParams(loc.search)
params.set('status', status)
return { ...loc, search: params.toString() }
}
export default (props: Props) => {
const { refresh, req, variant } = props
const loc = useLocation()
const { push } = useHistory()
const { showErrorMessage } = useContext(MessageContext)
const { localConfiguration, updateLocalConfiguration } = useContext(LocalConfigurationContext)
const [markAllArticlesAsReadMutation] = useMutation<MarkAllArticlesAsReadResponse, MarkAllArticlesAsReadRequest>(
MarkAllArticlesAsRead
)
const markAllAsRead = useCallback(async () => {
try {
await markAllArticlesAsReadMutation({
variables: { category: req.category },
update: updateCacheAfterMarkAllAsRead,
})
await refresh()
} catch (err) {
showErrorMessage(getGQLError(err))
}
}, [markAllArticlesAsReadMutation, req, refresh, showErrorMessage])
const updateLocalConfigSortBy = useCallback(() => {
const { sorting } = localConfiguration
const by = revertSortBy(req.sortBy)
const key = req.category ? `cat_${req.category}` : variant
if (!Object.prototype.hasOwnProperty.call(sorting, key)) {
sorting[key] = { order: 'desc', by }
} else if (sorting[key].by !== by) {
sorting[key].by = by
} else {
return
}
updateLocalConfiguration({ ...localConfiguration, sorting })
}, [req, variant, localConfiguration, updateLocalConfiguration])
const updateLocalConfigSortOrder = useCallback(() => {
const { sorting } = localConfiguration
const order = revertSortOrder(req.sortOrder)
const key = req.category ? `cat_${req.category}` : variant
if (!Object.prototype.hasOwnProperty.call(sorting, key)) {
sorting[key] = { order, by: 'key' }
} else if (sorting[key].order !== order) {
sorting[key].order = order
} else {
return
}
updateLocalConfiguration({ ...localConfiguration, sorting })
}, [req, variant, localConfiguration, updateLocalConfiguration])
const toggleSortBy = useCallback(
(event: KeyboardEvent) => {
event.preventDefault()
updateLocalConfigSortBy()
push(getLocationWithSortByParam(loc, revertSortBy(req.sortBy)))
return false
},
[loc, req, push, updateLocalConfigSortBy]
)
const toggleSortOrder = useCallback(
(event: KeyboardEvent) => {
event.preventDefault()
updateLocalConfigSortOrder()
push(getLocationWithSortOrderParam(loc, revertSortOrder(req.sortOrder)))
return false
},
[loc, req, push, updateLocalConfigSortOrder]
)
const toggleStatus = useCallback(
(event: KeyboardEvent) => {
event.preventDefault()
push(getLocationWithStatusParam(loc, revertStatus(req.status)))
return false
},
[loc, req, push]
)
const [showMarkAllAsReadDialog, hideMarkAllAsReadDialog] = useModal(() => (
<ConfirmDialog
title="Mark all as read?"
confirmLabel="Do it"
onConfirm={() => markAllAsRead().then(hideMarkAllAsReadDialog)}
onCancel={hideMarkAllAsReadDialog}
>
Are you sure to mark ALL articles as read?
</ConfirmDialog>
))
return (
<DropdownMenu title="Page options...">
<ul>
<li>
<LinkIcon onClick={refresh} icon="refresh">
<span>Refresh</span>
<Kbd keys="shift+r" onKeypress={refresh} />
</LinkIcon>
</li>
{variant === 'starred' && (
<li>
<LinkIcon
as={Link}
to={getLocationWithSortByParam(loc, revertSortBy(req.sortBy))}
onClick={updateLocalConfigSortBy}
icon="swap_horiz"
>
<span>{getSortByMessage(req)}</span>
<Kbd keys="shift+b" onKeypress={toggleSortBy} />
</LinkIcon>
</li>
)}
<li>
<LinkIcon
as={Link}
to={getLocationWithSortOrderParam(loc, revertSortOrder(req.sortOrder))}
onClick={updateLocalConfigSortOrder}
icon="sort"
>
<span>{getSortOrderMessage(req)}</span>
<Kbd keys="shift+o" onKeypress={toggleSortOrder} />
</LinkIcon>
</li>
{req.status === 'unread' && (
<li>
<LinkIcon onClick={showMarkAllAsReadDialog} icon="done_all">
<span>Mark all as read</span>
<Kbd keys="shift+m" onKeypress={showMarkAllAsReadDialog} />
</LinkIcon>
</li>
)}
{!!req.category && !!req.status && (
<li>
<LinkIcon as={Link} to={getLocationWithStatusParam(loc, revertStatus(req.status))} icon="history">
<span>{req.status === 'read' ? 'View unread articles' : 'View read articles'}</span>
<Kbd keys="shift+h" onKeypress={toggleStatus} />
</LinkIcon>
</li>
)}
</ul>
</DropdownMenu>
)
}
|
require 'logger'
module Nebula
class Logger < ::Logger
def initialize(class_name)
super(File.open(Nebula.log_path, "w+"))
end
def with_logging(level, msg, &block)
block.call.tap { send(level, msg) }
end
end
end
|
#!/bin/bash
#
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Copyright (c) 2015 Intel Corporation. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file is a tuned-down version of the scripts used to build Chrome Debian
# packages. See src/chrome/installer/linux/debian for the original.
set -e
set -o pipefail
if [ "$VERBOSE" ]; then
set -x
fi
set -u
# Create the Debian changelog file needed by dpkg-gencontrol.
# There is some sed/awk trickery involved to get the git commit log formatted
# and appended to the regular list of changes.
gen_changelog() {
rm -f "${DEB_CHANGELOG}"
process_template "${SCRIPTDIR}/changelog.template" "${DEB_CHANGELOG}"
# Add a line so debchange can update the timestamp and have one changelog
# line, then use sed to add a summary of the changes since the last version
# bump.
debchange -a --nomultimaint -m --changelog "${DEB_CHANGELOG}" \
"New release."
if [ -f "${BUILDDIR}/installer/changes.txt" ]; then
local formatted_changes=`awk '{printf " - %s\\\\\n",$0}' ${BUILDDIR}/installer/changes.txt`
sed -i '/New release/{n;c\
'"${formatted_changes}"'
}' ${DEB_CHANGELOG}
fi
GZLOG="${STAGEDIR}/usr/share/doc/${PACKAGE}/changelog.Debian.gz"
mkdir -p "$(dirname "${GZLOG}")"
gzip -9 -c "${DEB_CHANGELOG}" > "${GZLOG}"
chmod 644 "${GZLOG}"
}
# Create the Debian control file needed by dpkg-deb.
gen_control() {
dpkg-gencontrol -v"${VERSIONFULL}" -c"${DEB_CONTROL}" -l"${DEB_CHANGELOG}" \
-f"${DEB_FILES}" -p"${PACKAGE}" -P"${STAGEDIR}" \
-O > "${STAGEDIR}/DEBIAN/control"
rm -f "${DEB_CONTROL}"
}
# Setup the installation directory hierachy in the package staging area.
prep_staging_debian() {
prep_staging_common
install -m 755 -d "${STAGEDIR}/DEBIAN" \
"${STAGEDIR}/usr/share/doc/${PACKAGE}"
}
# Put the package contents in the staging area.
stage_install_debian() {
local USR_BIN_SYMLINK_NAME="xwalk"
prep_staging_debian
stage_install_common
}
# Actually generate the package file.
do_package() {
echo "Packaging ${ARCHITECTURE}..."
# Need a dummy debian/control file for dpkg-shlibdeps.
DUMMY_STAGING_DIR="${TMPFILEDIR}/dummy_staging"
mkdir "$DUMMY_STAGING_DIR"
pushd "$DUMMY_STAGING_DIR"
mkdir debian
touch debian/control
# Generate the dependencies,
# TODO(mmoss): This is a workaround for a problem where dpkg-shlibdeps was
# resolving deps using some of our build output shlibs (i.e.
# out/Release/lib.target/libfreetype.so.6), and was then failing with:
# dpkg-shlibdeps: error: no dependency information found for ...
# It's not clear if we ever want to look in LD_LIBRARY_PATH to resolve deps,
# but it seems that we don't currently, so this is the most expediant fix.
SAVE_LDLP=${LD_LIBRARY_PATH:-}
unset LD_LIBRARY_PATH
DPKG_SHLIB_DEPS=$(dpkg-shlibdeps -O "${STAGEDIR}/${INSTALLDIR}/${PROGNAME}" | \
sed 's/^shlibs:Depends=//')
if [ -n "$SAVE_LDLP" ]; then
LD_LIBRARY_PATH=$SAVE_LDLP
fi
rm -rf "$DUMMY_STAGING_DIR"
popd
# Additional dependencies not in the dpkg-shlibdeps output.
# - Pull a more recent version of NSS than required by runtime linking, for
# security and stability updates in NSS.
ADDITION_DEPS="ca-certificates, libnss3 (>= 3.14.3), lsb-base (>=3.2), \
xdg-utils (>= 1.0.2)"
# Fix-up libnspr dependency due to renaming in Ubuntu (the old package still
# exists, but it was moved to "universe" repository, which isn't installed by
# default).
DPKG_SHLIB_DEPS=$(sed \
's/\(libnspr4-0d ([^)]*)\), /\1 | libnspr4 (>= 4.9.5-0ubuntu0), /g' \
<<< $DPKG_SHLIB_DEPS)
COMMON_DEPS="${DPKG_SHLIB_DEPS}, ${ADDITION_DEPS}"
COMMON_PREDEPS="dpkg (>= 1.14.0)"
PREDEPENDS="$COMMON_PREDEPS"
DEPENDS="${COMMON_DEPS}"
REPLACES=""
CONFLICTS=""
PROVIDES=""
gen_changelog
process_template "${SCRIPTDIR}/control.template" "${DEB_CONTROL}"
export DEB_HOST_ARCH="${ARCHITECTURE}"
if [ -f "${DEB_CONTROL}" ]; then
gen_control
fi
fakeroot dpkg-deb -Zxz -z9 -b "${STAGEDIR}" .
}
# Remove temporary files and unwanted packaging output.
cleanup() {
echo "Cleaning..."
rm -rf "${STAGEDIR}"
rm -rf "${TMPFILEDIR}"
}
usage() {
echo "usage: $(basename $0) [-a target_arch] [-o 'dir'] "
echo " [-b 'dir']"
echo "-a arch package architecture (ia32 or x64)"
echo "-o dir package output directory [${OUTPUTDIR}]"
echo "-b dir build input directory [${BUILDDIR}]"
echo "-h this help message"
}
process_opts() {
while getopts ":o:b:c:a:h" OPTNAME
do
case $OPTNAME in
o )
OUTPUTDIR=$(readlink -f "${OPTARG}")
mkdir -p "${OUTPUTDIR}"
;;
b )
BUILDDIR=$(readlink -f "${OPTARG}")
;;
a )
TARGETARCH="$OPTARG"
;;
h )
usage
exit 0
;;
\: )
echo "'-$OPTARG' needs an argument."
usage
exit 1
;;
* )
echo "invalid command-line option: $OPTARG"
usage
exit 1
;;
esac
done
}
#=========
# MAIN
#=========
SCRIPTDIR=$(readlink -f "$(dirname "$0")")
OUTPUTDIR="${PWD}"
STAGEDIR=$(mktemp -d -t deb.build.XXXXXX) || exit 1
TMPFILEDIR=$(mktemp -d -t deb.tmp.XXXXXX) || exit 1
DEB_CHANGELOG="${TMPFILEDIR}/changelog"
DEB_FILES="${TMPFILEDIR}/files"
DEB_CONTROL="${TMPFILEDIR}/control"
# Default target architecture to same as build host.
if [ "$(uname -m)" = "x86_64" ]; then
TARGETARCH="x64"
else
TARGETARCH="ia32"
fi
# call cleanup() on exit
trap cleanup 0
process_opts "$@"
BUILDDIR=${BUILDDIR:=$(readlink -f "${SCRIPTDIR}/../../../../out/Release")}
source ${BUILDDIR}/installer/common/installer.include
get_version_info
VERSIONFULL="${VERSION}-${PACKAGE_RELEASE}"
source ${BUILDDIR}/installer/common/crosswalk.info
# Some Debian packaging tools want these set.
export DEBFULLNAME="${MAINTNAME}"
export DEBEMAIL="${MAINTMAIL}"
# Make everything happen in the OUTPUTDIR.
cd "${OUTPUTDIR}"
case "$TARGETARCH" in
ia32 )
export ARCHITECTURE="i386"
stage_install_debian
;;
x64 )
export ARCHITECTURE="amd64"
stage_install_debian
;;
* )
echo
echo "ERROR: Don't know how to build DEBs for '$TARGETARCH'."
echo
exit 1
;;
esac
do_package
|
#!/bin/bash
function main {
local image=$1
local branch=${TRAVIS_BRANCH:-$(git rev-parse --abbrev-ref HEAD)}
local tag=$(git show -s --format=%ct-%h HEAD)
if [ "${DOCKER_USERNAME}" != "" -a "${DOCKER_PASSWORD}" != "" ]
then
docker login -u "${DOCKER_USERNAME}" -p "${DOCKER_PASSWORD}"
fi
if [ "${branch}" = "master" ]
then
docker push "${image}:latest"
else
tag="${tag}-${branch}"
fi
docker tag "${image}:latest" "${image}:${tag}"
docker push "${image}:${tag}"
}
set -e
main "$@"
|
require "spec_helper"
describe ItemStatus do
describe "the index operator - ItemStatus[:foo]" do
it "returns the id for the corresponding status symbol" do
ItemStatus[:unknown].should == 1
ItemStatus[:valid].should == 2
ItemStatus[:invalid].should == 3
end
it "does not hit the database on the second request for a key" do
ItemStatus[:unknown] #first time
ItemStatus.stub(:find_by_status)
ItemStatus.should_not_receive(:find_by_status)
ItemStatus[:unknown] #second time
end
end
end
|
/*
* COPYRIGHT (C) 2017 Alpine Data Labs Inc. All Rights Reserved.
*/
package com.alpine.model.pack.ast.expressions
import com.alpine.common.serialization.json.TypeWrapper
import com.alpine.sql.SQLGenerator
import com.alpine.transformer.sql.ColumnarSQLExpression
/**
* Created by Jennifer Thompson on 3/1/17.
*/
trait ASTExpression {
def inputNames: Set[String]
def execute(input: Map[String, Any]): Any
def toColumnarSQL(sqlGenerator: SQLGenerator): ColumnarSQLExpression
}
object ASTExpression {
implicit def extractValue(t: TypeWrapper[ASTExpression]): ASTExpression = t.value
implicit def wrapValue(value: ASTExpression): TypeWrapper[ASTExpression] = TypeWrapper(value)
}
abstract class SingleArgumentASSTExpression extends ASTExpression {
def argument: TypeWrapper[ASTExpression]
def inputNames: Set[String] = argument.inputNames
}
abstract class BinaryASSTExpression extends ASTExpression {
def left: TypeWrapper[ASTExpression]
def right: TypeWrapper[ASTExpression]
@transient lazy val inputNames: Set[String] = left.value.inputNames ++ right.value.inputNames
}
abstract class BinaryASSTExpressionWithNullHandling extends BinaryASSTExpression {
final override def execute(input: Map[String, Any]): Any = {
val leftVal = left.value.execute(input)
lazy val rightVal = right.value.execute(input)
if (leftVal == null || rightVal == null) {
null
} else {
executeForNonNullValues(leftVal, rightVal)
}
}
def executeForNonNullValues(leftVal: Any, rightVal: Any): Any
}
case class NameReferenceExpression(name: String) extends ASTExpression {
override def inputNames: Set[String] = Set(name)
override def execute(input: Map[String, Any]): Any = input(name)
override def toColumnarSQL(sqlGenerator: SQLGenerator): ColumnarSQLExpression = {
val x = sqlGenerator.quoteIdentifier(name)
ColumnarSQLExpression(x)
}
}
|
@extends('master')
@section('title')
Rumah Tajwid Indonesia
@stop
@section('header')
@include('front.partials.logo')
@include('front.partials.nav')
@include('front.partials.news')
@stop
@section('content')
@include('front.partials.slide')
@include('front.partials.blogs')
@stop
@section('sidebar')
@include('front.partials.sidebar')
@stop
@section('footer')
@include('front.partials.footer')
@stop
|
import { recursiveObject } from "../common/common";
/**
* Library to process array
* @example
* const arr = new ObjectCoreProcessor<InterfaceModel>([{1:100},{2:200}]);
*
*/
export class ObjectCoreProcessor<T> {
private actualArray: T;
private curData: T;
/**
* Library to process array
* @example
* const arr = new ObjectCoreProcessor<InterfaceModel>({1:100,2:200});
*
*/
constructor(arr: T) {
this.curData = this.actualArray = arr;
}
/**
* Function excutes the reviver for each key value pair
* @param fn - Reviver Function
*/
public forEach(fn: (key: string, value: Object, index: number) => void): this {
const keys = Object.keys(this.curData);
let idx = 0;
for (const key of keys) {
if (this.curData.hasOwnProperty(key)) {
const value = this.curData[key];
fn(key, value, idx);
}
idx = ++idx;
}
return this;
}
/**
* Function helps to clone object into different reference
* @param deep - Specifies the deep cloning
*/
public clone(deep = false): T {
let obj: T = {} as T;
if (deep === true) {
obj = recursiveObject<T>(this.curData)
} else {
obj = Object.assign(obj, this.curData);
}
return obj;
}
/**
* Function helps to clone deeply with different reference
*/
public deepClone(): T {
return this.clone(true);
}
/**
* Function helps to merge the another object into current object and retrun value
* @param value
*/
public merge<G>(value: G): T & G {
return Object.assign(this.curData, value);
}
/**
* Function used to achive custom activity and pipe more internal object
* @param fn - reviver function to call with object
*/
public process(fn: (object: T, instance: this) => void): this {
fn(this.curData, this);
return this;
}
/**
* Delete object value by namespace key
* @param nameSpace - name space string or string array
*/
public deleteByNS(nameSpace: string | string[]): this {
let obj;
let fNS = '';
if (nameSpace instanceof Array) {
obj = this.getByNS(nameSpace.slice(0, nameSpace.length - 1));
fNS = nameSpace[nameSpace.length - 1];
} else {
const ns = nameSpace.split('.');
obj = this.getByNS(ns.slice(0, ns.length - 1));
fNS = ns[ns.length - 1];
}
delete obj[fNS];
return this;
}
/**
* Returns the value by spacifed name space
* @param nameSpace name space string or string array
*/
public getByNS<G>(nameSpace: string | string[]): G {
let val: T | G = this.curData;
const ns = (typeof nameSpace === 'string' ? nameSpace.split('.') : nameSpace);
for (const name of ns) {
val = val[name];
if (typeof val === 'undefined') { break; }
}
return val as G;
}
/**
* Set the value by specified name space
* @param nameSpace - name space
* @param value - value
*/
public setByNS<G>(nameSpace: string, value: G): this {
let val: T | G = this.curData;
const ns = nameSpace.split('.');
let idx = 0;
for (const name of ns.slice(0, ns.length - 1)) {
if (val.hasOwnProperty(name) !== true) { val[name] = {}; val = val[name]; }
else { val = val[name] };
idx += 1;
}
val[ns[ns.length - 1]] = value;
return this;
}
/**
* Returns the specifed object
*/
public get(): T {
return this.curData;
}
}
export function ObjectProcessor<T>(arr: T): ObjectCoreProcessor<T> {
return new ObjectCoreProcessor<T>(arr);
}
|
# Pokemon Directory
A small piece of work to show off my Android development skills
## App demo
<img src="app-demo.gif" width="200px" style="margin: 0 0 0 30px" alt="App Demo" />
## Technical skills used
- Built with :heart: in `Kotlin`
- Follows `Model View Presenter` architecture
- `Retrofit` for networking
- `Glide` for image downloading and caching
- `Repository pattern` to access the data
- `EventBus` for event based messaging between the components
- `Dagger2` for DI
- `Room` Data Persistance library for data storage
- `Unit Tests` included
- `Instrumentation Tests` included
## API used
- [pokeapi.co](https://pokeapi.co) for getting the data about the Pokemons
## Recommendations for improvements
- List should be paginated. When scrolled down, it should fetch new items
- Making all data available locally by storing in Sqlite DB using Room so as to make the app work offline as well
- Implementing Sync operations using WorkManager to keep the data up-todate all time
- Testing using Firebase Test Lab to improve the quality of the app
- Writing more tests
## `main` source directory structure
Due to screen size constraints adding only the structure of hte `main` source directory here
<img src="https://raw.githubusercontent.com/jaydeepw/pokemon-directory/master/main-tree.png"
alt="Main source tree" />
|
package interval
import (
"gopkg.in/nowk/assert.v2"
"testing"
)
func TestInterval(t *testing.T) {
assert.Equal(t, Interval("every_minute"), Every.Minute())
assert.Equal(t, Interval("every_5_minutes"), Every.Minute(5))
assert.Equal(t, Interval("every_hour"), Every.Hour())
assert.Equal(t, Interval("every_5_hours"), Every.Hour(5))
assert.Equal(t, Interval("every_day"), Every.Day())
assert.Equal(t, Interval("every_5_days"), Every.Day(5))
assert.Equal(t, Interval("every_week"), Every.Week())
assert.Equal(t, Interval("every_5_weeks"), Every.Week(5))
assert.Equal(t, Interval("every_month"), Every.Month())
assert.Equal(t, Interval("every_5_months"), Every.Month(5))
assert.Equal(t, Interval("every_year"), Every.Year())
assert.Equal(t, Interval("every_5_years"), Every.Year(5))
}
func TestZeroReturnsSingular(t *testing.T) {
every := New("every")
assert.Equal(t, Interval("every_month"), every.when("month", 0))
}
func BenchmarkNoInt(b *testing.B) {
i := 0
for ; i < b.N; i++ {
Every.Month()
}
}
func BenchmarkWithInt(b *testing.B) {
i := 0
for ; i < b.N; i++ {
Every.Month(i)
}
}
// BechmarkNoInt 2000000 888 ns/op
// BenchmarkWithInt 1000000 1277 ns/op
|
SET ECHO off
REM NAME:TFSTSFRM.SQL
REM USAGE:"@path/tfstsfgm"
REM ------------------------------------------------------------------------
REM REQUIREMENTS:
REM SELECT ON DBA_FREE_SPACE
REM ------------------------------------------------------------------------
REM PURPOSE:
REM The following is a script that will determine how many extents
REM of contiguous free space you have in Oracle as well as the
REM total amount of free space you have in each tablespace. From
REM these results you can detect how fragmented your tablespace is.
REM
REM The ideal situation is to have one large free extent in your
REM tablespace. The more extents of free space there are in the
REM tablespace, the more likely you will run into fragmentation
REM problems. The size of the free extents is also very important.
REM If you have a lot of small extents (too small for any next
REM extent size) but the total bytes of free space is large, then
REM you may want to consider defragmentation options.
REM ------------------------------------------------------------------------
REM DISCLAIMER:
REM This script is provided for educational purposes only. It is NOT
REM supported by Oracle World Wide Technical Support.
REM The script has been tested and appears to work as intended.
REM You should always run new scripts on a test instance initially.
REM ------------------------------------------------------------------------
REM Main text of script follows:
create table SPACE_TEMP (
TABLESPACE_NAME CHAR(30),
CONTIGUOUS_BYTES NUMBER)
/
declare
cursor query is select *
from dba_free_space
order by tablespace_name, block_id;
this_row query%rowtype;
previous_row query%rowtype;
total number;
begin
open query;
fetch query into this_row;
previous_row := this_row;
total := previous_row.bytes;
loop
fetch query into this_row;
exit when query%notfound;
if this_row.block_id = previous_row.block_id + previous_row.blocks then
total := total + this_row.bytes;
insert into SPACE_TEMP (tablespace_name)
values (previous_row.tablespace_name);
else
insert into SPACE_TEMP values (previous_row.tablespace_name,
total);
total := this_row.bytes;
end if;
previous_row := this_row;
end loop;
insert into SPACE_TEMP values (previous_row.tablespace_name,
total);
end;
.
/
set pagesize 60
set newpage 0
set echo off
ttitle center 'Contiguous Extents Report' skip 3
break on "TABLESPACE NAME" skip page duplicate
spool contig_free_space.lis
rem
column "CONTIGUOUS BYTES" format 999,999,999
column "COUNT" format 999
column "TOTAL BYTES" format 999,999,999
column "TODAY" noprint new_value new_today format a1
rem
select TABLESPACE_NAME "TABLESPACE NAME",
CONTIGUOUS_BYTES "CONTIGUOUS BYTES"
from SPACE_TEMP
where CONTIGUOUS_BYTES is not null
order by TABLESPACE_NAME, CONTIGUOUS_BYTES desc;
select tablespace_name, count(*) "# OF EXTENTS",
sum(contiguous_bytes) "TOTAL BYTES"
from space_temp
group by tablespace_name;
spool off
drop table SPACE_TEMP
/
|
# Credits: https://github.com/sriinampudi
import string
class Solution:
def checkIfPangram(self, sentence: str) -> bool:
c = 0
for i in range (0,26):
if(string.ascii_lowercase[i] not in sentence):
c = c+1
if (c != 0):
return(bool(False))
else:
return(bool(True))
'''
run a for loop for i = 0 to 26
acsii_lowercase converts the int to ascii
it check if any alphabet from a to z is in the sentance
if alphabet is not in the sentence the count flag is incremented
the value of count flag is net checked to returen flase or true
'''
|
/*
* Copyright [2013-2020] PayPal Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ml.shifu.shifu.util;
import java.util.concurrent.ThreadLocalRandom;
/**
* A {@link Shuffler} implementation that provides permutation index mapping. A permutation of [0, {@link #recordSize})
* is created internally to provide the mapping functionality.
*
* @author Junshi Guo
*/
public class PermutationShuffler implements Shuffler {
/**
* Max index range. Should be set on construction and never change for one instance.
*/
private final int recordSize;
/**
* Internally held permutation mapping from index -> permutation[index].
*/
private final int[] permutation;
public PermutationShuffler(int recordSize) {
assert recordSize > 0;
this.recordSize = recordSize;
this.permutation = new int[recordSize];
this.refresh();
}
/**
* Generate permutation array.
*/
@Override
public void refresh() {
for(int i = 0; i < recordSize; i++) {
this.permutation[i] = i;
}
int temp, pos;
for(int i = recordSize; i > 1; i--) {
pos = ThreadLocalRandom.current().nextInt(i);
temp = permutation[pos];
permutation[pos] = permutation[i - 1];
permutation[i - 1] = temp;
}
}
@Override
public int getIndex(int i) {
assert i >= 0 && i < recordSize;
return permutation[i];
}
@Override
public int getRecordSize() {
return this.recordSize;
}
}
|
module Fission
module Action
module Snapshot
class Lister
# Internal: Creates a new SnapshotLister object. This accepts a VM
# object.
#
# vm - An instance of VM
#
# Examples:
#
# Fission::Action::SnapshotLister.new @my_vm
#
# Returns a new SnapshotLister object
def initialize(vm)
@vm = vm
end
# Internal: List the snapshots for a VM.
#
# Examples
#
# @lister.snapshots.data
# # => ['snap 1', 'snap 2']
#
# Returns a Response with the result.
# If successful, the Repsonse's data attribute will be an Array of the
# snapshot names (String).
# If there is an error, an unsuccessful Response will be returned.
def snapshots
unless @vm.exists?
return Response.new :code => 1, :message => 'VM does not exist'
end
conf_file_response = @vm.conf_file
return conf_file_response unless conf_file_response.successful?
command = "#{vmrun_cmd} listSnapshots "
command << "'#{conf_file_response.data}' 2>&1"
command_exec = Fission::Action::ShellExecutor.new command
result = command_exec.execute
response = Response.new :code => result['process_status'].exitstatus
if response.successful?
response.data = parse_snapshot_names_from_output result['output']
else
response.message = result['output']
end
response
end
private
# Internal: Helper for getting the configured vmrun_cmd value.
#
# Examples
#
# @lister.vmrun_cmd
# # => "/foo/bar/vmrun -T fusion"
#
# Returns a String for the configured value of
# Fission.config['vmrun_cmd'].
def vmrun_cmd
Fission.config['vmrun_cmd']
end
# Internal: Parses the output of the listSnapshot command.
#
# output - The output of the vmrun listSnapshot command.
#
# Examples:
# @lister.parse_snapshot_names_from_output cmd_output
# # => ['snap_1', 'snap_2']
#
# Returns an Array with the list of snapshot names.
def parse_snapshot_names_from_output(cmd_output)
header_text = 'Total snapshots:'
snaps = cmd_output.split("\n").select { |s| !s.include? header_text }
snaps.map { |s| s.strip }
end
end
end
end
end
|
/**
* Subnet wrapper class with protected member and basic checking/processing.
*/
export class Subnet{
constructor(private address: number, private mask: number, private wildcard: number){}
/**
* Return the network address.
*/
public getAddress(): number{
return this.address;
}
/**
* Return the network mask.
*/
public getMask(): number{
return this.mask;
}
/**
* Return the wildcard or host mask.
*/
public getWildcard(): number{
return this.wildcard;
}
/**
* Return first address in subnet
*/
public getLowerBound(): number{
return this.address;
}
/**
* Return last address in subnet
*/
public getUpperBound(): number{
return this.address ^ this.wildcard;
}
/**
* Return the amount of bits used for the network.
*/
public getMaskBits(): number{
const binary = (this.mask >>> 0).toString(2);
const matches = binary.match(/1/g) || [];
return matches.length;
}
/**
* Function to check if the target overlaps with the this subnet's network mask.
*
* @param network
*/
public overlaps(network: Subnet): boolean{
// Get bits from network address according to this mask.
const overlappingBits = network.getAddress() & this.mask;
// Xor between current and network bits should be an exact match.
return (overlappingBits ^ this.address) <= 0;
}
}
|
class Task:
"""A class which represents a task.
Arguments:
name(str): The task name as defined in the configuration file.
params(dict, optional): If not `None`, stores the task-relevant
arguments as well.
"""
def __init__(self, name, params=None):
assert name.count(':') == name.count('-') == 1, \
"Task name should be in <prefix>:<src1,...,srcN>-<trg1,...,trgN> format"
self.name = name
self.params = params
self.prefix, topology = self.name.split(':')
srcs, trgs = topology.split('-')
self.sources = srcs.split(',')
self.targets = trgs.split(',')
self.multi_source = len(self.sources) > 1
self.multi_target = len(self.targets) > 1
self.src, self.trg = None, None
if not self.multi_source:
self.src = self.sources[0]
if not self.multi_target:
self.trg = self.targets[0]
def __repr__(self):
return f'Task({self.name}, srcs={self.sources!r}, trgs={self.targets!r})'
class Batch:
r"""A custom batch object used through data representation.
Args:
data(dict): A dictionary with keys representing the data sources
as defined by the configuration file and values being the
already collated `torch.Tensor` objects for a given batch.
task(str): The task name associated with the batch.
device(`torch.device`, optional): If given, the batch will be moved
to the device right after instance creation.
non_blocking(bool, optional): If `True`, the tensor copies will
use the `non_blocking` argument.
Notes:
In the current code, moving to an appropriate device is handled
by an explicit call to `.to()` method from the main loop or
other relevant places.
Returns:
a `Batch` instance indexable with bracket notation. The batch size
is accessible through the `.size` attribute.
"""
def __init__(self, data, task, device=None, non_blocking=False):
if device:
self.data = {k: v.to(device, non_blocking=non_blocking) for k, v in data.items()}
else:
self.data = data
self.task = Task(task)
dim1s = set([x.size(1) for x in data.values()])
assert len(dim1s) == 1, \
"Incompatible batch dimension (1) between modalities."
self.size = dim1s.pop()
def __getitem__(self, key):
return self.data[key]
def to(self, device, non_blocking=False):
self.data.update(
{k: v.to(device, non_blocking=non_blocking) for k, v in self.data.items()})
def __repr__(self):
s = f"Batch(size={self.size}, task={self.task})\n"
for key, value in self.data.items():
s += f" {key:10s} -> {value.shape} - {value.device}\n"
return s[:-1]
|
import * as React from 'react'
// import {getRootFontSize} from '../../resources/js/utils'
// import Toast from '../common/components/Toast'
import {loadComponentStyle} from '../common/utils'
import styles from '../resources/admin/scss/index.scss'
import CustomLink from './components/CustomLink'
import adminConfig from './config'
export default class App extends React.Component {
componentDidMount () {
const setRootFontSize = () => {
let rootEleWidth = document.documentElement.clientWidth || document.body.clientWidth
let rootFontSize = rootEleWidth * 16 / 1200
// let rootFontSize = getRootFontSize();
// if (appHeaderWidth >= 930) {
// rootFontSize = appHeaderWidth * 16 / 920
// } else if () {
// }
document.documentElement.style.fontSize = `${rootFontSize}px`
}
setRootFontSize()
window.addEventListener('resize', setRootFontSize)
// this.showToast('success', '创建成功!')
loadComponentStyle(styles)
}
render () {
let links = adminConfig.links.map((route, i) => (
<CustomLink key={i} to={route.to} iconType={route.iconType} />
))
return (
<div className="admin-app">
<aside>
<div className="admin-nav-wrapper">
<div className="admin-avatar-wrapper">
<i className="admin-avatar"></i>
<p className="admin-username">Jack</p>
</div>
<nav>
<ul className="admin-nav">
{links}
</ul>
</nav>
</div>
<a href="javascript:void(0);" className="login-out">
<i className="admin-icon admin-login-out-icon"></i>
</a>
</aside>
{this.props.children}
</div>
)
}
}
|
// pages/feedback/feedback.js
var app = getApp();
Page({
data: {
docinfo: {},
message: '',
},
onLoad: function (options) {
var that = this
var userinfo = wx.getStorageSync('userinfo') || {};
var phone = wx.getStorageSync('phone')
that.getDocterMsg(phone)
},
getDocterMsg(phone) {
var that = this
wx.request({
url: 'https://mfkapi.39yst.com/appInterface/kangaiduo/getExpertInfo/',
data: {
appid: app.globalData.appid,
phone: phone
},
header: {
'content-type': 'application/json'
},
method: 'POST',
success: (res) => {
console.log(res)
that.setData({
docinfo: res.data.msg,
message: res.data.msg.introduction
})
console.log(that.data.docinfo)
}
})
},
submitMsg() {
var that = this
wx.request({
// url: app.globalData.ip + '?type=doctor_save',
url:'https://mfkapi.39yst.com/appInterface/kangaiduo/saveExpertInfo/',
data: {
appid:app.globalData.appid,
name: that.data.docinfo.name,
mobile: that.data.docinfo.mobile,
hospital: that.data.docinfo.hospital,
department: that.data.docinfo.department,
adept: that.data.docinfo.adept,
identity_code: that.data.docinfo.identity_code,
title: that.data.docinfo.title,
qualification: that.data.docinfo.qualification,
qualification2: that.data.docinfo.qualification2,
cooperation: that.data.docinfo.cooperation,
identity: that.data.docinfo.identity,
identity2: that.data.docinfo.identity,
doctor_rank: that.data.docinfo.doctor_rank,
describe: that.data.docinfo.describe,
introduction: that.data.message
},
header: {
'content-type': 'application/json'
},
method: 'POST',
success: function (res) {
console.log(res.data);
that.setData({
message: ''
})
wx.navigateBack({
delta: 1
})
}
})
},
savemessage: function (e) {
var that = this;
that.setData({
message: e.detail.value
})
},
loading: function () {
wx.showToast({
title: '加载中',
icon: 'loading',
duration: 10000
})
},
})
|
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Runtime.InteropServices.WindowsRuntime;
using Windows.Foundation;
using Windows.Foundation.Collections;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Controls;
using Windows.UI.Xaml.Controls.Primitives;
using Windows.UI.Xaml.Data;
using Windows.UI.Xaml.Input;
using Windows.UI.Xaml.Media;
using Windows.UI.Xaml.Navigation;
using RPNCalc.Dialogs;
using RPNCalc.Controls;
namespace RPNCalc.Keyboards {
public sealed partial class Registers : UserControl {
public Registers() {
InitializeComponent();
}
private async void OnInsertRegister(object sender, RoutedEventArgs e) {
var dialog = new InsertRegister();
await dialog.ShowAsync();
var reg = dialog.RegisterName;
if(reg != null) {
var struc = new CalcCommand($"f:$r{reg}", reg.ToLower());
CalcCommandGlobalEvent.Emit(struc);
}
}
}
}
|
import numpy as np
import pandas as pd
from scipy.spatial import distance
class Graph:
def __init__(self, points_df, bonds, list_atoms, charges):
self.points = points_df[['x', 'y', 'z']].values
self.bonds = bonds
self.charges = charges[['eem', 'mmff94', 'gasteiger', 'qeq', 'qtpie',
'eem2015ha', 'eem2015hm', 'eem2015hn',
'eem2015ba', 'eem2015bm', 'eem2015bn']].values
self.dists = distance.cdist(self.points, self.points)
self.add_features = np.asarray(points_df[['in_ring', 'aromatic']].values, dtype=np.float32)
self.adj = self.dists < 1.5
self.num_nodes = len(points_df)
self.connect_table = list(np.arange(self.num_nodes)[r] for r in self.adj)
self.atoms = points_df['atom']
self.dict_atoms = {at: i for i, at in enumerate(list_atoms)}
def get_atoms_array(self):
atom_index = [self.dict_atoms[atom] for atom in self.atoms]
one_hot = np.identity(len(self.dict_atoms))[atom_index]
bond = np.sum(self.adj, 1) - 1
bonds = np.identity(len(self.dict_atoms))[bond - 1]
return np.concatenate([one_hot, bonds, self.add_features, self.charges], axis=1).astype(np.float32)
def get_bond_features(self):
bond_types = np.zeros((self.num_nodes, self.num_nodes, 5))
bond_aromatic = np.zeros((self.num_nodes, self.num_nodes, 1))
bond_in_ring = np.zeros((self.num_nodes, self.num_nodes, 1))
bond_conjugated = np.zeros((self.num_nodes, self.num_nodes, 1))
for i, row in self.bonds.iterrows():
bond_types[row['atom_index_0'], row['atom_index_1'], row['bond_type']] = 1
bond_types[row['atom_index_1'], row['atom_index_0'], row['bond_type']] = 1
if row['aromatic']:
bond_aromatic[row['atom_index_1'], row['atom_index_0'], 0] = 1
bond_aromatic[row['atom_index_0'], row['atom_index_1'], 0] = 1
if row['in_ring']:
bond_in_ring[row['atom_index_1'], row['atom_index_0'], 0] = 1
bond_in_ring[row['atom_index_0'], row['atom_index_1'], 0] = 1
if row['conjugated']:
bond_conjugated[row['atom_index_1'], row['atom_index_0'], 0] = 1
bond_conjugated[row['atom_index_0'], row['atom_index_1'], 0] = 1
return np.concatenate([bond_types, bond_aromatic, bond_in_ring, bond_conjugated], axis=2).astype(np.float32)
def get_points(self):
return self.points.astype(np.float32)
def get_connect(self):
return self.connect_table
def get_dists(self):
return self.dists.astype(np.float32)
def show(self):
print('\npoint')
print(self.points)
print('\nconnect table')
print(self.connect_table)
print('\natoms')
print(self.atoms)
def main():
structures = pd.read_csv('../../../input/structures.csv')
strs_gp = structures.groupby('molecule_name')
bonds = pd.read_csv('../../../input/bonds.csv')
bonds_gp = bonds.groupby('molecule_name')
train_charges = pd.read_csv('../../../input/train_ob_charges.csv')
train_charges_gp = train_charges.groupby('molecule_name')
list_atoms = list(set(structures['atom']))
g = Graph(strs_gp.get_group('dsgdb9nsd_000001'),
bonds_gp.get_group('dsgdb9nsd_000001'),
list_atoms,
train_charges_gp.get_group('dsgdb9nsd_000001'))
g.show()
print(g.get_atoms_array())
if __name__ == '__main__':
main()
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
namespace Iot.Device.Max31865
{
[Flags]
internal enum Configuration : byte
{
Filter60HZ = 0b_0000_0000,
Filter50HZ = 0b_0000_0001,
FaultStatus = 0b_0000_0010,
TwoFourWire = 0b_0000_0000,
ThreeWire = 0b_0001_0000,
OneShot = 0b_0010_0000,
ConversionModeAuto = 0b_0100_0000,
Bias = 0b_1000_0000
}
}
|
// ignore_for_file: constant_identifier_names
class Urls {
static const String APPWRITE_ENDPOINT =
'http://164.92.185.108/v1';
static const String APPWRITE_PROJECTID = '6208205e15fb18447ce3';
static const String DOMAIN = 'http://138.197.186.138';
static const String BASE_API = DOMAIN + '';
static const String IMAGES_ROOT = DOMAIN + '/upload/';
static const UPLOAD_API = BASE_API + '/uploadfile';
static const SIGN_UP_API = BASE_API + '/clientregister';
static const OWNER_PROFILE_API = BASE_API + '/userprofile';
static const CREATE_TOKEN_API = BASE_API + '/login_check';
static const REPORT_API = BASE_API + '/report';
static const NOTIFICATION_API = BASE_API + '/notificationtoken';
static const COMPANYINFO_API = BASE_API + '/companyinfoforuser';
static const NOTIFICATIONNEWCHAT_API = BASE_API + '/notificationnewchat';
static const NOTIFICATIONTOADMIN_API = BASE_API + '/notificationtoadmin';
static const GET_TOP_PRODUCTS_API = BASE_API + '/productstopwanted';
static const GET_STORE_CATEGORIES_API = BASE_API + '/storecategories';
static const GET_STORE_CATEGORY_LIST_API =
BASE_API + '/storeownerbycategoryid/';
static const GET_MOST_WANTED_STORE_PRODUCTS =
BASE_API + '/productstopwantedofspecificstoreowner';
static const GET_PRODUCTS_CATEGORY_API = BASE_API + '/storeProductsCategory';
static const GET_PRODUCTS_BY_CATEGORY_API =
BASE_API + '/productsbycategoryidandstoreownerprofileid';
static const GET_BEST_STORES_API = BASE_API + '/storeOwnerBest';
static const GET_STORE_PROFILE = BASE_API + '/storeownerprofilebyid/';
static const GET_MY_ORDERS = BASE_API + '/ordersbyclientid';
static const GET_MY_NOTIFICATION = BASE_API + '/notificationsLocal';
static const GET_ORDER_LOGS = BASE_API + '/orderLogs';
static const GET_ORDER_DETAILS = BASE_API + '/orderstatusbyordernumber';
static const POST_CLIENT_ORDER_API = BASE_API + '/clientorder';
static const POST_CLIENT_SEND_IT_ORDER_API = BASE_API + '/clientsendorder';
static const POST_CLIENT_PRIVATE_ORDER_API = BASE_API + '/clientSpecialOrder';
static const DELETE_CLIENT_ORDER_API = BASE_API + '/ordercancel';
static const UPDATE_CLIENT_ORDER_API = BASE_API + '/orderUpdatebyclient';
static const UPDATE_SPECIAL_CLIENT_ORDER_API =
BASE_API + '/orderSpecialUpdateByClient';
static const UPDATE_SEND_CLIENT_ORDER_API =
BASE_API + '/orderSendUpdateByClient';
static const GET_PROFILE_API = BASE_API + '/clientProfile';
static const POST_PROFILE_API = BASE_API + '/clientprofile';
static const CHECK_USER_ROLE = BASE_API + '/checkUserType';
static const GET_SEARCH_RESULT = BASE_API + '/clientFilter/';
static const RATE_STORE = BASE_API + '/ratingStoreByClient';
static const RATE_PRODUCT = BASE_API + '/ratingProductByClient';
static const RATE_CAPTAIN = BASE_API + '/ratingCaptainByClient';
static const GET_SUBCATEGORIES_API =
BASE_API + '/subcategoriesbystorecategoryid/';
static const GET_PRODUCTS_BY_CATEGORIES =
BASE_API + '/productsbystoreproductcategoryid/';
static const GET_PRODUCTS_BY_SUBCATEGORIES =
BASE_API + '/storeproductscategoryleveltwowithproducts/';
static const GET_PRODUCTS_BY_MAIN_CATEGORIES =
BASE_API + '/productsbystorecategory/';
static const GET_PRODUCT_DETAILS_API = BASE_API + '/product';
}
|
package sg.edu.nus.leaveapplication.securityconfig;
import javax.sql.DataSource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.builders.WebSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.core.userdetails.UserDetailsService;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.security.web.authentication.AuthenticationSuccessHandler;
import sg.edu.nus.leaveapplication.util.UserDetailsServiceImpl;
@Configuration
@EnableWebSecurity
@ComponentScan(basePackageClasses = UserDetailsServiceImpl.class)
public class WebSecurityConfig extends WebSecurityConfigurerAdapter {
@Autowired
private UserDetailsServiceImpl userDetailsService;
@Autowired
private DataSource dataSource;
@Bean
public BCryptPasswordEncoder passwordEncoder() {
BCryptPasswordEncoder bCryptPasswordEncoder = new BCryptPasswordEncoder();
return bCryptPasswordEncoder;
}
@Bean
public AuthenticationSuccessHandler myAuthenticationSuccessHandler(){
return new MyAuthenticationSuccessHandler();
}
// @Value("${spring.queries.users-query}")
// private String usersQuery;
//
// @Value("${spring.queries.roles-query}")
// private String rolesQuery;
@Override
protected void configure(AuthenticationManagerBuilder auth) throws Exception {
auth.inMemoryAuthentication()
.withUser("user1").password(passwordEncoder().encode("user1Pass")).roles("STAFF")
.and()
.withUser("user2").password(passwordEncoder().encode("user2Pass")).roles("STAFF")
.and()
.withUser("admin").password(passwordEncoder().encode("adminPass")).roles("ADMIN");
auth.jdbcAuthentication().dataSource(dataSource)
.usersByUsernameQuery("select username,password,1 from credentials where username = ? ")
.authoritiesByUsernameQuery("SELECT username, role from credentials where username = ? ")
.passwordEncoder(passwordEncoder());
}
@Override
protected void configure(HttpSecurity http) throws Exception {
http.csrf().disable();
http
.authorizeRequests()
.antMatchers("/resources/**", "/static/**", "/css/**", "/js/**", "/images/**", "/fonts/**").permitAll()
.and()
.authorizeRequests().antMatchers("/home","/create**","/leaveedit**", "/leavehistory**"
, "/leaveupdate**","/cancel**", "/claim**", "/leaveapplicaiton**").hasAnyRole("STAFF,MANAGER")
.and()
.authorizeRequests().antMatchers("/manager**", "/subleavehistory**").hasRole("MANAGER")
.and()
.authorizeRequests().antMatchers("/adminhome", "/edit**", "/delete**", "/update**","/leavetype**", "/adduser"
,"/addleavetype**","/leavetype**","/publicholiday**","/emplist").hasRole("ADMIN")
.and()
.formLogin()
.loginProcessingUrl("/j_spring_security_check")
.loginPage("/login")
.permitAll()
.defaultSuccessUrl("/home")
.failureUrl("/login?error=true")
.successHandler(myAuthenticationSuccessHandler())
.and()
.logout().invalidateHttpSession(true).clearAuthentication(true)
.logoutSuccessUrl("/login?logout=true")
.permitAll()
.and().exceptionHandling().accessDeniedPage("/denyaccess");
}
@Bean
public AuthenticationManager customAuthenticationManager() throws Exception {
return authenticationManager();
}
// @Autowired
// public void configureGlobal(AuthenticationManagerBuilder auth) throws Exception {
// auth.userDetailsService(userDetailsService).passwordEncoder(passwordEncoder());
// }
}
|
# stream jenkins logs
[](https://crates.io/crates/stream-jenkins-logs)
a quasi rewrite of [jenkins-log-stream](https://github.com/r-hub/jenkins-log-stream) in rust
## Installation
```bash
cargo install stream-jenkins-logs
```
## Usage
```bash
stream-jenkins-logs https://user:password@some.jenkins some-build
```
|
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
namespace Microsoft.Azure.IIoT.OpcUa.Protocol.Runtime {
using Microsoft.Azure.IIoT.Utils;
using Microsoft.Extensions.Configuration;
using System.Runtime.InteropServices;
/// <summary>
/// Security configuration
/// </summary>
public class SecurityConfig : ConfigBase, ISecurityConfig {
/// <summary>
/// Configuration
/// </summary>
#pragma warning disable CS1591 // Missing XML comment for publicly visible type or member
public const string ApplicationCertificateStorePathKey = "ApplicationCertificateStorePath";
public const string ApplicationCertificateStoreTypeKey = "ApplicationCertificateStoreType";
public const string ApplicationCertificateSubjectNameKey = "ApplicationCertificateSubjectName";
public const string TrustedIssuerCertificatesPathKey = "TrustedIssuerCertificatesPath";
public const string TrustedIssuerCertificatesTypeKey = "TrustedIssuerCertificatesType";
public const string TrustedPeerCertificatesPathKey = "TrustedPeerCertificatesPath";
public const string TrustedPeerCertificatesTypeKey = "TrustedPeerCertificatesType";
public const string RejectedCertificateStorePathKey = "RejectedCertificateStorePath";
public const string RejectedCertificateStoreTypeKey = "RejectedCertificateStoreType";
public const string AutoAcceptUntrustedCertificatesKey = "AutoAcceptUntrustedCertificates";
public const string RejectSha1SignedCertificatesKey = "RejectSha1SignedCertificates";
public const string MinimumCertificateKeySizeKey = "MinimumCertificateKeySize";
#pragma warning restore CS1591 // Missing XML comment for publicly visible type or member
/// <inheritdoc/>
public CertificateInfo ApplicationCertificate => new CertificateInfo {
StorePath = GetStringOrDefault(ApplicationCertificateStorePathKey,
() => RuntimeInformation.IsOSPlatform(OSPlatform.Windows) ?
"CurrentUser\\My" : "/pki/own"),
StoreType = GetStringOrDefault(ApplicationCertificateStoreTypeKey,
() => RuntimeInformation.IsOSPlatform(OSPlatform.Windows) ?
"X509Store" : "Directory"),
SubjectName = GetStringOrDefault(ApplicationCertificateSubjectNameKey,
() => "CN=Microsoft.Azure.IIoT, C=DE, S=Bav, O=Microsoft, DC=localhost")
};
/// <inheritdoc/>
public CertificateStore TrustedIssuerCertificates => new CertificateStore {
StorePath = GetStringOrDefault(TrustedIssuerCertificatesPathKey, () => "/pki/trusted"),
StoreType = GetStringOrDefault(TrustedIssuerCertificatesTypeKey, () => "Directory"),
};
/// <inheritdoc/>
public CertificateStore TrustedPeerCertificates => new CertificateStore {
StorePath = GetStringOrDefault(TrustedPeerCertificatesPathKey, () => "/pki/trusted"),
StoreType = GetStringOrDefault(TrustedPeerCertificatesTypeKey, () => "Directory"),
};
/// <inheritdoc/>
public CertificateStore RejectedCertificateStore => new CertificateStore {
StorePath = GetStringOrDefault(RejectedCertificateStorePathKey, () => "/pki/trusted"),
StoreType = GetStringOrDefault(RejectedCertificateStoreTypeKey, () => "Directory"),
};
/// <inheritdoc/>
public bool AutoAcceptUntrustedCertificates =>
GetBoolOrDefault(AutoAcceptUntrustedCertificatesKey, () => false);
/// <inheritdoc/>
public bool RejectSha1SignedCertificates =>
GetBoolOrDefault(RejectSha1SignedCertificatesKey, () => false);
/// <inheritdoc/>
public ushort MinimumCertificateKeySize =>
(ushort)GetIntOrDefault(MinimumCertificateKeySizeKey, () => 1024);
/// <summary>
/// Create configuration
/// </summary>
/// <param name="configuration"></param>
public SecurityConfig(IConfiguration configuration) :
base(configuration) {
}
}
}
|
import Ractive from 'lib/ractive';
import emitter from 'lib/emitter';
import LS from 'lib/wallet/localStorage';
import initChoose from './choose';
import initCreate from './create';
import initCreatePassphrase from './create-passphrase';
import initCreatePassphraseConfirm from './create-passphrase-confirm';
import security from 'lib/wallet/security';
import template from './index.ract';
export default function(el) {
const ractive = new Ractive({
el,
template,
});
const steps = {
choose: initChoose(ractive.find('#auth_choose')),
create: initCreate(ractive.find('#auth_create')),
createPassphrase: initCreatePassphrase(ractive.find('#auth_create_passphrase')),
createPassphraseConfirm: initCreatePassphraseConfirm(ractive.find('#auth_create_passphrase_confirm')),
};
let currentStep = steps.choose;
ractive.on('before-show', () => {
if (LS.isRegistered()) {
steps.choose.showPin();
} else {
showStep(steps.choose);
}
});
ractive.on('before-hide', () => {
currentStep.hide();
security.lock();
const { passphraseWidget, pinWidget } = currentStep;
if (passphraseWidget && !passphraseWidget.torndown) passphraseWidget.close();
if (pinWidget && !pinWidget.torndown) pinWidget.close();
});
emitter.on('change-auth-step', (step, data) => {
showStep(steps[step], data);
});
function showStep(step, data) {
setTimeout(() => {
currentStep.hide();
step.show(data);
currentStep = step;
});
}
return ractive;
}
|
package main
import (
"context"
"net/http"
"time"
"github.com/google/go-github/github"
)
// githubTimeout defines how long to wait for a response from GitHub
// when checking for new SAM Local versions.
const githubTimeout = 3
// checkVersionResult contains information on the current version of AWS SAM CLI, and
// whether there are any newer versions available to upgrade to.
type checkVersionResult struct {
IsUpToDate bool
LatestVersion string
}
// checkVersion checks whether the current version of AWS SAM CLI is the latest
func checkVersion() (*checkVersionResult, error) {
const RepoOwner = "awslabs"
const RepoName = "aws-sam-local"
// Create a HTTP client with appropriate timeouts configured
client := &http.Client{
Timeout: time.Duration(githubTimeout * time.Second),
}
// Get the latest version details from Github release
gh := github.NewClient(client)
releases, _, err := gh.Repositories.ListReleases(context.Background(), RepoOwner, RepoName, nil)
if err != nil || len(releases) == 0 {
return &checkVersionResult{}, err
}
// Grab the latest release - without the first 'v' character from the tag
// ie. v0.0.1 -> 0.0.1
latest := releases[0]
latestVersion := (*latest.TagName)[1:]
return &checkVersionResult{
LatestVersion: latestVersion,
IsUpToDate: version == latestVersion,
}, nil
}
|
#include<stdio.h>
int fact(int);
int main()
{
int i,x,n;
float sum=0;
printf("\nEnter x and n subsequently: ");
scanf("%d %d",&x,&n);
sum=0;
for(i=0;i<=n;i++)
{
sum=sum+pow(x,i)/fact(i);
}
printf("\nRequired sum:%f",sum);
return 0;
}
int fact(int j)
{
int mul=1;
for(j=1;i>=j;i--)
{
mul=mul*i;
}
return mul;
}
|
using UnityEngine;
using System;
using UnityEditor;
/// <summary>
/// Animation clip extensions.
/// </summary>
public static class AnimationClipExtensions
{
static EditorCurveBinding _spriteCurveBinding = new EditorCurveBinding
{
type = typeof(SpriteRenderer),
path = "",
propertyName = "m_Sprite"
};
/// <summary>
/// Edits the settings.
/// </summary>
/// <param name="clip">Clip.</param>
/// <param name="editCallback">The edit callback to change the settings.</param>
public static void EditSettings(this AnimationClip clip, Action<AnimationClipSettings> editCallback)
{
var clipSettings = AnimationUtility.GetAnimationClipSettings(clip);
editCallback(clipSettings);
AnimationUtility.SetAnimationClipSettings(clip, clipSettings);
}
/// <summary>
/// Gets the sprite key frames of the animation clip.
/// </summary>
/// <returns>The key frames.</returns>
/// <param name="clip">Clip.</param>
public static ObjectReferenceKeyframe[] GetKeyFrames(this AnimationClip clip)
{
return AnimationUtility.GetObjectReferenceCurve(clip, _spriteCurveBinding);
}
/// <summary>
/// Sets the sprite key frames of the animation clip.
/// </summary>
/// <param name="clip">Clip.</param>
/// <param name="keyFrames">Key frames.</param>
public static void SetKeyFrames(this AnimationClip clip, ObjectReferenceKeyframe[] keyFrames)
{
AnimationUtility.SetObjectReferenceCurve(clip, _spriteCurveBinding, keyFrames);
}
}
|
package de.metas.materialtracking.qualityBasedInvoicing.ic.spi.impl;
import java.util.Iterator;
import java.util.Properties;
import org.adempiere.ad.dao.ICompositeQueryFilter;
import org.adempiere.ad.dao.IQueryBL;
import org.adempiere.ad.dao.IQueryBuilder;
import org.adempiere.ad.dao.IQueryFilter;
import org.adempiere.model.PlainContextAware;
import org.compiere.model.IQuery;
import org.eevolution.model.I_PP_Order;
import de.metas.document.engine.IDocument;
import de.metas.materialtracking.model.I_C_Invoice_Detail;
import de.metas.materialtracking.model.I_M_Material_Tracking;
import de.metas.util.Services;
/*
* #%L
* de.metas.materialtracking
* %%
* Copyright (C) 2015 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
public class PP_Order_MaterialTracking_HandlerDAO
{
/* package */PP_Order_MaterialTracking_HandlerDAO()
{
}
/**
* Gets a filter which accepts only those {@link I_PP_Order}s which are invoiceable.
*
* More precisely, manufacturing orders which:
* <ul>
* <li>reference a M_Material_Tracking and
* <li>are closed
* </ul>
*
* @return
*/
private IQueryFilter<I_PP_Order> getPP_OrderInvoiceableFilter(final Object contextProvider)
{
final IQueryBL queryBL = Services.get(IQueryBL.class);
final ICompositeQueryFilter<I_PP_Order> filters = queryBL.createCompositeQueryFilter(I_PP_Order.class)
// Only those manufacturing orders which are closed...
.addEqualsFilter(I_PP_Order.COLUMN_DocStatus, IDocument.STATUS_Closed)
// ...and only those which we didn't already explicitly look at
.addEqualsFilter(de.metas.materialtracking.model.I_PP_Order.COLUMNNAME_IsInvoiceCandidate, false);
//
// only if they reference a M_Material_Tracking that is not yet processed
{
final IQuery<I_M_Material_Tracking> unprocessedMaterialTrackingsQuery = queryBL
.createQueryBuilder(I_M_Material_Tracking.class, contextProvider)
.addOnlyActiveRecordsFilter()
.addEqualsFilter(I_M_Material_Tracking.COLUMNNAME_Processed, false)
.create();
filters.addInSubQueryFilter(de.metas.materialtracking.model.I_PP_Order.COLUMNNAME_M_Material_Tracking_ID,
I_M_Material_Tracking.COLUMNNAME_M_Material_Tracking_ID,
unprocessedMaterialTrackingsQuery);
}
//
// Only those manufacturing orders which do not already have an invoice detail
{
final IQuery<I_C_Invoice_Detail> invoiceDetailsWithPPOrderQuery = queryBL
.createQueryBuilder(I_C_Invoice_Detail.class, contextProvider)
.addOnlyActiveRecordsFilter()
.addNotEqualsFilter(I_C_Invoice_Detail.COLUMNNAME_C_Invoice_Candidate_ID, null)
.create();
filters.addNotInSubQueryFilter(I_PP_Order.COLUMNNAME_PP_Order_ID,
I_C_Invoice_Detail.COLUMNNAME_PP_Order_ID,
invoiceDetailsWithPPOrderQuery);
}
return filters;
}
/**
* Uses a <b>DB</b> query to validate if the given <code>ppOrder</code> is still invoiceable.
*
* @param ppOrder
*
* @return true if given manufacturing order is invoiceable
*
* @see #getPP_OrderInvoiceableFilter()
*/
/* package */boolean isInvoiceable(final I_PP_Order ppOrder)
{
final IQueryBL queryBL = Services.get(IQueryBL.class);
final IQueryFilter<I_PP_Order> invoiceableFilter = getPP_OrderInvoiceableFilter(ppOrder);
return queryBL.createQueryBuilder(I_PP_Order.class, ppOrder)
.filter(invoiceableFilter)
.addEqualsFilter(I_PP_Order.COLUMNNAME_PP_Order_ID, ppOrder.getPP_Order_ID())
.create()
.anyMatch();
}
/**
* Retrieves PP_Orders which:
* <ul>
* <li>are closed
* <li>have IsInvoicecandidate='N'
* <li>don't have an invoice candidate linked to them
* <li>reference an unprocessed M_Material_tracking
* </ul>
*
* @param ctx
* @param limit
* @param trxName
* @return
*/
/* package */Iterator<de.metas.materialtracking.model.I_PP_Order> retrievePPOrdersWithMissingICs(final Properties ctx, final int limit, final String trxName)
{
final IQueryBL queryBL = Services.get(IQueryBL.class);
final IQueryBuilder<I_PP_Order> ppOrderQueryBuilder = queryBL.createQueryBuilder(I_PP_Order.class, ctx, trxName)
.addOnlyContextClient()
.addOnlyActiveRecordsFilter();
//
// Only those manufacturing orders which are invoiceable
ppOrderQueryBuilder.filter(getPP_OrderInvoiceableFilter(PlainContextAware.newWithTrxName(ctx, trxName)));
//
// Order by
// (just to have a predictable order)
ppOrderQueryBuilder.orderBy()
.addColumn(I_PP_Order.COLUMN_PP_Order_ID);
//
// Execute query and return
return ppOrderQueryBuilder
.setLimit(limit)
.create()
.iterate(de.metas.materialtracking.model.I_PP_Order.class);
}
}
|
import unwrap.EvaluatingUnwrapper
import utils.asList
import utils.getMappingOperationOrNull
object Find : FunctionalLogicOperation, EvaluatingUnwrapper {
override fun invoke(expression: Any?, data: Any?, evaluator: LogicEvaluator): Any? {
return expression.asList.let { expressionValues ->
val inputData = unwrapDataByEvaluation(expressionValues, data, evaluator)
val predicateOperation = expressionValues.getMappingOperationOrNull()
predicateOperation?.let {
inputData?.find { evaluator.evaluateLogic(predicateOperation, it) == true }
}
}
}
}
|
package in.tombo.kashiki.view;
import com.jogamp.opengl.GL2;
public abstract class Base {
private Position position = new Position();
private Angle angle = new Angle();
private Scale scale = new Scale();
private Color color = new Color();
public void draw(GL2 gl) {
preDraw(gl);
innerDraw(gl);
postDraw(gl);
}
public abstract void innerDraw(GL2 gl);
public void preDraw(GL2 gl) {
gl.glPushMatrix();
position.updateTranslate(gl);
angle.updateRotate(gl);
scale.updateScale(gl);
color.updateColor(gl);
}
public void postDraw(GL2 gl) {
gl.glPopMatrix();
}
public boolean isAnimated() {
return position.isAnimated() && angle.isAnimated() && scale.isAnimated() && color.isAnimated();
}
public Position getPosition() {
return position;
}
public void setPosition(Position position) {
this.position = position;
}
public Angle getAngle() {
return angle;
}
public void setAngle(Angle angle) {
this.angle = angle;
}
public Scale getScale() {
return scale;
}
public void setScale(Scale scale) {
this.scale = scale;
}
public Color getColor() {
return color;
}
public void setColor(Color color) {
this.color = color;
}
}
|
if [ $# -lt 4 ];then
echo "usage: maven.sh targetDir groupId artifactId version [type]"
exit 1
fi
if [ ${1:0:1} == "/" ];then
targetDir=$1
else
targetDir=`pwd`/$1
fi
groupId=$2
artifactId=$3
version=$4
type=war
if [ $# -ge 5 ];then
type=$5
fi
cwd=`pwd`
tmpdir=target/maventmp/$groupId/$artifactId/$version
rm -rf $tmpdir
mkdir -p $tmpdir
cd $tmpdir
cat <<-END > pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.dianping.platform</groupId>
<artifactId>sample-app1</artifactId>
<version>0.1-SNAPSHOT</version>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>$groupId</groupId>
<artifactId>$artifactId</artifactId>
<version>$version</version>
<type>$type</type>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<configuration>
<outputDirectory>
\${project.build.directory}
</outputDirectory>
</configuration>
</plugin>
</plugins>
</build>
</project>
END
mvn -Dmaven.test.skip clean dependency:copy-dependencies
cp target/*.$type $targetDir >/dev/null 2>&1
cd $cwd
rm -rf $tmpdir
|
---
layout: slide
title: Boats
excerpt: "images of boats in a reveal.js slide format"
category: presentation
---
<section data-markdown>
<script type="text/template">
## Pictures of boats
</script>
</section>
|
---
name: VnManager
author: Micah686
tagline: "A program to keep track of your visual novels, with support for pulling in metadata"
project_url: https://github.com/micah686/VnManager
date_visited: 02.03.2021
order: 700
---

|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.