text
stringlengths 1
1.05M
|
|---|
import Day05 from './day-05';
export default class Challenge10 extends Day05 {
solve(): number {
for (let coords of this.input) {
this.drawLine(coords[0], coords[1], false);
}
return this.countOverlaps();
}
}
|
<filename>persistency/p3/AlbumNummerDAO.java
package muziekDAO;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
public class AlbumNummerDAO extends BaseDao {
private ArrayList<AlbumNummer> selectAlbumNummer(String query){
ArrayList<AlbumNummer> aN = new ArrayList<AlbumNummer>();
try(Connection con = super.connect()){
Statement stmt = con.createStatement();
ResultSet dbResultSet = stmt.executeQuery(query);
while (dbResultSet.next()){
Integer aid = dbResultSet.getInt("album_id");
Integer nid = dbResultSet.getInt("nummer_id");
AlbumNummer an = new AlbumNummer(aid,nid);
aN.add(an);
}
}
catch(SQLException e){
e.printStackTrace();
}
return aN;
}
public ArrayList<AlbumNummer> selectAll() {
return selectAlbumNummer("SELECT * FROM album_nummer");
}
public AlbumNummer findByAlbum(int id) {
return selectAlbumNummer("SELECT * FROM album_nummer WHERE album_id = " + id ).get(0);
}
public AlbumNummer findByNummer(int id) {
return selectAlbumNummer("SELECT * FROM album_nummer WHERE nummer_id = " + id ).get(0);
}
public AlbumNummer findByAlbumNummer(int aid,int nid) {
return selectAlbumNummer("SELECT * FROM album_nummer WHERE( album_id = " + aid +" and nummer_id= "+ nid+")").get(0);
}
public AlbumNummer insert(AlbumNummer an) {
try (Connection con = super.connect()) {
Statement stmt = con.createStatement();
String query = "insert into album_nummer(album_id,nummer_id)"
+"values("+an.getAlbumID()+","+ an.getNummerID() +" )";
stmt.executeQuery(query);
} catch (SQLException sqle) {
sqle.printStackTrace();
}
return findByAlbumNummer(an.getAlbumID(),an.getNummerID());
}
}
|
<gh_stars>0
/**
* @author 冷暖自知
* @date 2020/4/15 15:22
* @version 1.0
* @Description
*/
import request from '@/utils/request'
export default {
getSubjectList() {
return request({
url: '/eduservice/edu-subject/list',
method: 'get'
})
}
}
|
package estimator
import (
"testing"
"time"
"sync"
"sync/atomic"
"github.com/jech/galene/rtptime"
)
func TestEstimator(t *testing.T) {
now := rtptime.Jiffies()
e := new(now, time.Second)
e.estimate(now)
e.Accumulate(42)
e.Accumulate(128)
e.estimate(now + rtptime.JiffiesPerSec)
rate, packetRate :=
e.estimate(now + (rtptime.JiffiesPerSec*1001)/1000)
if rate != 42+128 {
t.Errorf("Expected %v, got %v", 42+128, rate)
}
if packetRate != 2 {
t.Errorf("Expected 2, got %v", packetRate)
}
totalP, totalB := e.Totals()
if totalP != 2 {
t.Errorf("Expected 2, got %v", totalP)
}
if totalB != 42+128 {
t.Errorf("Expected %v, got %v", 42+128, totalB)
}
e.Accumulate(12)
totalP, totalB = e.Totals()
if totalP != 3 {
t.Errorf("Expected 2, got %v", totalP)
}
if totalB != 42+128+12 {
t.Errorf("Expected %v, got %v", 42+128, totalB)
}
}
func TestEstimatorMany(t *testing.T) {
now := rtptime.Jiffies()
e := new(now, time.Second)
for i := 0; i < 10000; i++ {
e.Accumulate(42)
now += rtptime.JiffiesPerSec / 1000
b, p := e.estimate(now)
if i >= 1000 {
if p != 1000 || b != p*42 {
t.Errorf("Got %v %v (%v), expected %v %v",
p, b, 1000, i, p*42,
)
}
}
}
}
func TestEstimatorParallel(t *testing.T) {
now := make([]uint64, 1)
now[0] = rtptime.Jiffies()
getNow := func() uint64 {
return atomic.LoadUint64(&now[0])
}
addNow := func(v uint64) {
atomic.AddUint64(&now[0], v)
}
e := new(getNow(), time.Second)
estimate := func() (uint32, uint32) {
e.mu.Lock()
defer e.mu.Unlock()
return e.estimate(getNow())
}
f := func(n int) {
for i := 0; i < 10000; i++ {
e.Accumulate(42)
addNow(rtptime.JiffiesPerSec / 1000)
b, p := estimate()
if i >= 1000 {
if b != p * 42 {
t.Errorf("%v: Got %v %v (%v), expected %v %v",
n, p, b, i, 1000, p*42,
)
}
}
}
}
var wg sync.WaitGroup
for i := 0; i < 16; i++ {
wg.Add(1)
go func(i int) {
f(i)
wg.Done()
}(i)
}
wg.Wait()
}
func BenchmarkEstimator(b *testing.B) {
e := New(time.Second)
e.Estimate()
time.Sleep(time.Millisecond)
e.Estimate()
b.ResetTimer()
for i := 0; i < 1000 * b.N; i++ {
e.Accumulate(100)
}
e.Estimate()
}
func BenchmarkEstimatorParallel(b *testing.B) {
e := New(time.Second)
e.Estimate()
time.Sleep(time.Millisecond)
e.Estimate()
b.ResetTimer()
b.RunParallel(func (pb *testing.PB) {
for pb.Next() {
for i := 0; i < 1000; i++ {
e.Accumulate(100)
}
}
})
e.Estimate()
}
|
package com.partyrgame.blackhandservice.model;
import lombok.Data;
@Data
public class BlackHandNumberOfPlayers {
private int monstersTotal;
private int blackHandTotal;
private int towniesTotal;
public BlackHandNumberOfPlayers() {
this.monstersTotal = 0;
this.blackHandTotal = 0;
this.towniesTotal = 0;
}
public void incrementMonstersTotal() {
this.monstersTotal++;
}
public void incrementBlackHandTotal() {
this.blackHandTotal++;
}
public void incrementTowniesTotal() {
this.towniesTotal++;
}
public void decrementMonstersTotal() {
this.monstersTotal--;
}
public void decrementBlackHandTotal() {
this.blackHandTotal--;
}
public void decrementTowniesTotal() {
this.towniesTotal--;
}
}
|
#!/usr/bin/env bash
set -euxo pipefail
GO111MODULE=off go get -u github.com/elastic/go-licenser
go get -d -t ./...
go mod download
go mod verify
if go mod tidy ; then
if [ -z "$(git status --porcelain go.mod go.sum)" ] ; then
echo "Go module manifest has not changed."
else
echo "Go module manifest changed. Run 'go mod tidy'" 1>&2
exit 1
fi
fi
go-licenser -d
if find . -name '*.go' | grep -v vendor | xargs gofmt -s -l | read ; then
echo "Code differs from gofmt's style. Run 'gofmt -s -w .'" 1>&2
exit 1
fi
# Run the tests
set +e
mkdir -p build
go get -v -u github.com/jstemmer/go-junit-report
export OUT_FILE="build/test-report.out"
go test -v $(go list ./... | grep -v /vendor/) | tee ${OUT_FILE}
status=$?
go-junit-report > "build/junit.xml" < ${OUT_FILE}
OUT_FILE="build/test-report-386.out"
GOARCH=386 go test -v $(go list ./... | grep -v /vendor/) | tee ${OUT_FILE}
if [ $? -gt 0 ] ; then
status=1
fi
go-junit-report > "build/junit-386.xml" < ${OUT_FILE}
if [ $status -gt 0 ] ; then
exit 1
fi
set -x
mkdir -p build/bin
go build -o build/bin/audit ./cmd/audit/
go build -o build/bin/auparse ./cmd/auparse/
|
#!/usr/bin/env bash
sudo apt-get --yes --force-yes install \
git make gcc numactl libnuma-dev \
libmemcached-dev zlib1g-dev memcached \
libmemcached-dev libmemcached-tools libpapi-dev
wget https://github.com/ivmai/libatomic_ops/releases/download/v7.4.6/libatomic_ops-7.4.6.tar.gz
tar xzvf libatomic_ops-7.4.6.tar.gz
cd libatomic_ops-7.4.6
./configure
make
sudo make install
cd ..
rm -rf libatomic_ops-7.4.6*
|
package mcjty.incontrol;
import mcjty.incontrol.commands.*;
import mcjty.incontrol.rules.EntityModCache;
import mcjty.incontrol.rules.RulesManager;
import mcjty.incontrol.setup.IProxy;
import mcjty.incontrol.setup.ModSetup;
import mcjty.tools.cache.StructureCache;
import net.minecraftforge.fml.common.Mod;
import net.minecraftforge.fml.common.SidedProxy;
import net.minecraftforge.fml.common.event.*;
@Mod(modid = InControl.MODID, name = InControl.MODNAME,
dependencies =
"after:forge@[" + InControl.MIN_FORGE11_VER + ",)",
version = InControl.VERSION,
acceptedMinecraftVersions = "[1.12,1.13)",
acceptableRemoteVersions = "*")
public class InControl {
public static final String MODID = "incontrol";
public static final String MODNAME = "InControl";
public static final String VERSION = "3.9.16";
public static final String MIN_FORGE11_VER = "13.19.0.2176";
@SidedProxy(clientSide = "mcjty.incontrol.setup.ClientProxy", serverSide = "mcjty.incontrol.setup.ServerProxy")
public static IProxy proxy;
public static ModSetup setup = new ModSetup();
@Mod.Instance
public static InControl instance;
public EntityModCache modCache = new EntityModCache();
@Mod.EventHandler
public void preInit(FMLPreInitializationEvent event) {
setup.preInit(event);
proxy.preInit(event);
}
@Mod.EventHandler
public void init(FMLInitializationEvent e) {
setup.init(e);
proxy.init(e);
}
@Mod.EventHandler
public void postInit(FMLPostInitializationEvent e) {
setup.postInit(e);
proxy.postInit(e);
}
@Mod.EventHandler
public void onLoadComplete(FMLLoadCompleteEvent e) {
RulesManager.readRules();
}
@Mod.EventHandler
public void serverLoad(FMLServerStartingEvent event) {
event.registerServerCommand(new CmdReload());
event.registerServerCommand(new CmdDebug());
event.registerServerCommand(new CmdLoadSpawn());
event.registerServerCommand(new CmdLoadPotentialSpawn());
event.registerServerCommand(new CmdLoadSummonAid());
event.registerServerCommand(new CmdLoadLoot());
event.registerServerCommand(new CmdShowMobs());
event.registerServerCommand(new CmdKillMobs());
}
@Mod.EventHandler
public void serverStopped(FMLServerStoppedEvent event) {
StructureCache.CACHE.clean();
}
}
|
class EmailTask extends Task {
public function process() {
echo "Processing email task";
}
}
class PrintTask extends Task {
public function process() {
echo "Processing print task";
}
|
const Axe = require('axe');
const signale = require('signale');
// const safeStringify = require('fast-safe-stringify');
// const { WebClient } = require('@slack/web-api');
// const titleize = require('titleize');
const pino = require('pino')({
customLevels: {
log: 30
},
hooks: {
// <https://github.com/pinojs/pino/blob/master/docs/api.md#logmethod>
logMethod(inputArgs, method) {
return method.call(this, {
// <https://github.com/pinojs/pino/issues/854>
// message: inputArgs[0],
msg: inputArgs[0],
meta: inputArgs[1]
});
}
}
});
const env = require('./env');
const isProduction = env.NODE_ENV === 'production';
const config = {
silent: env.IS_SILENT,
logger: isProduction ? pino : signale,
level: isProduction ? 'warn' : 'debug',
showStack: env.SHOW_STACK,
showMeta: env.SHOW_META,
capture: false,
name: env.APP_NAME
};
// create our application logger that uses a custom callback function
const axe = new Axe({ ...config });
/*
const OPTIONAL_SLACK_FIELDS = [
'bounce_info',
'envelope',
'sender',
'arc',
'dmarc',
'spf',
'dkim',
'session'
];
if (env.SLACK_API_TOKEN) {
// custom logger for Slack that inherits our Axe config
// (with the exception of a `callback` function for logging to Slack)
const slackLogger = new Axe(config);
// create an instance of the Slack Web Client API for posting messages
const web = new WebClient(env.SLACK_API_TOKEN, {
// <https://slack.dev/node-slack-sdk/web-api#logging>
logger: slackLogger,
logLevel: config.level
});
axe.setCallback(async (level, message, meta) => {
try {
// if meta did not have `slack: true` or not a specific level
if (!meta.slack && !['fatal'].includes(level)) return;
// otherwise post a message to the slack channel
const fields = [
{
title: 'Level',
value: meta.level,
short: true
},
{
title: 'Environment',
value: meta.app.environment,
short: true
},
{
title: 'Hostname',
value: meta.app.hostname,
short: true
},
{
title: 'Hash',
value: meta.app.hash,
short: true
}
];
for (const field of OPTIONAL_SLACK_FIELDS) {
if (meta[field])
fields.push({
title: titleize(field),
value: safeStringify(meta[field]),
short: true
});
}
const result = await web.chat.postMessage({
channel: 'logs',
username: 'Cabin',
icon_emoji: ':evergreen_tree:',
attachments: [
{
title: meta.err && meta.err.message ? meta.err.message : message,
color: 'danger',
text: meta.err && meta.err.stack ? meta.err.stack : null,
fields
}
]
});
// finally log the result from slack
axe.debug('web.chat.postMessage', { result, callback: false });
} catch (err) {
axe.debug(err, { callback: false });
}
});
}
*/
module.exports = axe;
|
#!/bin/bash
XVID_SRC="https://downloads.xvid.com/downloads/xvidcore-1.3.7.tar.gz"
ffbuild_enabled() {
[[ $VARIANT == gpl* ]] || return -1
return 0
}
ffbuild_dockerstage() {
to_df "ADD $SELF /stage.sh"
to_df "RUN run_stage"
}
ffbuild_dockerbuild() {
mkdir xvid
cd xvid
wget -O xvid.tar.gz "$XVID_SRC" || return -1
tar xaf xvid.tar.gz || return -1
rm xvid.tar.gz
cd xvid*
cd build/generic
local myconf=(
--prefix="$FFBUILD_PREFIX"
)
if [[ $TARGET == win* ]]; then
myconf+=(
--host="$FFBUILD_TOOLCHAIN"
)
else
echo "Unknown target"
return -1
fi
./configure "${myconf[@]}" || return -1
make -j$(nproc) || return -1
make install || return -1
rm "$FFBUILD_PREFIX"/{bin/xvidcore.dll,lib/xvidcore.dll.a}
mv "$FFBUILD_PREFIX"/lib/{,lib}xvidcore.a
cd ../../../..
rm -rf xvid
}
ffbuild_configure() {
echo --enable-libxvid
}
ffbuild_unconfigure() {
echo --disable-libxvid
}
|
<filename>deno/lib/iterable/toIterable.ts
export function* toIterable<T>(iter: Iterable<T>): Iterable<T> {
yield* iter;
}
|
<reponame>AnDamazio/book4u-api<filename>src/core/dtos/book.dto.ts
import {
IsNumber,
IsString,
IsNotEmpty,
IsNotEmptyObject,
IsObject,
ValidateNested,
IsArray,
IsEnum,
IsOptional,
IsDate,
} from "class-validator";
import { Type } from "class-transformer";
import { CreateAuthorDto } from "./author.dto";
import { CreateLanguageDto } from "./language.dto";
import { CreatePublisherDto } from "./publisher.dto";
import { CreateCategoryDto } from "./category.dto";
import { CreateBookImagesDto } from "./book-images.dto";
import { CreateUserDto } from "./user.dto";
import { Condition, Status } from "../enums";
import { Book } from "../entities";
import { CreateBookCategoriesDto } from "./book-categories.dto";
export class CreateBookDto {
@IsString()
@IsNotEmpty()
name: string;
@IsNumber()
@IsNotEmpty()
pagesQuantity: number;
@IsString()
@IsNotEmpty()
synopsis: string;
@IsString()
@IsNotEmpty()
price: string;
@IsEnum(Status)
@IsNotEmpty()
status: string;
@IsEnum(Condition)
@IsNotEmpty()
condition: string;
@IsString()
createdAt?: string;
@IsNotEmpty()
@IsNotEmptyObject()
@IsObject()
@ValidateNested()
@Type(() => CreateAuthorDto)
author: CreateAuthorDto;
@IsNotEmpty()
@IsNotEmptyObject()
@IsObject()
@ValidateNested()
@Type(() => CreateLanguageDto)
language: CreateLanguageDto;
@IsNotEmpty()
@IsNotEmptyObject()
@IsObject()
@ValidateNested()
@Type(() => CreatePublisherDto)
publisher: CreatePublisherDto;
@IsObject()
@ValidateNested()
@Type(() => CreateBookImagesDto)
bookImages: CreateBookImagesDto;
@ValidateNested()
@Type(() => CreateUserDto)
owner: CreateUserDto;
@IsArray()
@IsOptional()
@ValidateNested()
@Type(() => CreateCategoryDto)
category?: CreateCategoryDto[];
@IsArray()
@IsOptional()
@ValidateNested()
@Type(() => CreateBookCategoriesDto)
bookCategories?: CreateBookCategoriesDto[];
}
|
<reponame>jiaqiluo/kubernetes<gh_stars>10-100
/*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package podresources
import (
"context"
"testing"
"github.com/stretchr/testify/mock"
"k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/types"
podresourcesv1 "k8s.io/kubelet/pkg/apis/podresources/v1"
"k8s.io/kubelet/pkg/apis/podresources/v1alpha1"
)
type mockProvider struct {
mock.Mock
}
func (m *mockProvider) GetPods() []*v1.Pod {
args := m.Called()
return args.Get(0).([]*v1.Pod)
}
func (m *mockProvider) GetDevices(podUID, containerName string) []*podresourcesv1.ContainerDevices {
args := m.Called(podUID, containerName)
return args.Get(0).([]*podresourcesv1.ContainerDevices)
}
func (m *mockProvider) GetCPUs(podUID, containerName string) []int64 {
args := m.Called(podUID, containerName)
return args.Get(0).([]int64)
}
func (m *mockProvider) GetMemory(podUID, containerName string) []*podresourcesv1.ContainerMemory {
args := m.Called(podUID, containerName)
return args.Get(0).([]*podresourcesv1.ContainerMemory)
}
func (m *mockProvider) UpdateAllocatedDevices() {
m.Called()
}
func (m *mockProvider) GetAllocatableDevices() []*podresourcesv1.ContainerDevices {
args := m.Called()
return args.Get(0).([]*podresourcesv1.ContainerDevices)
}
func (m *mockProvider) GetAllocatableCPUs() []int64 {
args := m.Called()
return args.Get(0).([]int64)
}
func (m *mockProvider) GetAllocatableMemory() []*podresourcesv1.ContainerMemory {
args := m.Called()
return args.Get(0).([]*podresourcesv1.ContainerMemory)
}
func TestListPodResourcesV1alpha1(t *testing.T) {
podName := "pod-name"
podNamespace := "pod-namespace"
podUID := types.UID("pod-uid")
containerName := "container-name"
devs := []*podresourcesv1.ContainerDevices{
{
ResourceName: "resource",
DeviceIds: []string{"dev0", "dev1"},
},
}
for _, tc := range []struct {
desc string
pods []*v1.Pod
devices []*podresourcesv1.ContainerDevices
expectedResponse *v1alpha1.ListPodResourcesResponse
}{
{
desc: "no pods",
pods: []*v1.Pod{},
devices: []*podresourcesv1.ContainerDevices{},
expectedResponse: &v1alpha1.ListPodResourcesResponse{},
},
{
desc: "pod without devices",
pods: []*v1.Pod{
{
ObjectMeta: metav1.ObjectMeta{
Name: podName,
Namespace: podNamespace,
UID: podUID,
},
Spec: v1.PodSpec{
Containers: []v1.Container{
{
Name: containerName,
},
},
},
},
},
devices: []*podresourcesv1.ContainerDevices{},
expectedResponse: &v1alpha1.ListPodResourcesResponse{
PodResources: []*v1alpha1.PodResources{
{
Name: podName,
Namespace: podNamespace,
Containers: []*v1alpha1.ContainerResources{
{
Name: containerName,
Devices: []*v1alpha1.ContainerDevices{},
},
},
},
},
},
},
{
desc: "pod with devices",
pods: []*v1.Pod{
{
ObjectMeta: metav1.ObjectMeta{
Name: podName,
Namespace: podNamespace,
UID: podUID,
},
Spec: v1.PodSpec{
Containers: []v1.Container{
{
Name: containerName,
},
},
},
},
},
devices: devs,
expectedResponse: &v1alpha1.ListPodResourcesResponse{
PodResources: []*v1alpha1.PodResources{
{
Name: podName,
Namespace: podNamespace,
Containers: []*v1alpha1.ContainerResources{
{
Name: containerName,
Devices: v1DevicesToAlphaV1(devs),
},
},
},
},
},
},
} {
t.Run(tc.desc, func(t *testing.T) {
m := new(mockProvider)
m.On("GetPods").Return(tc.pods)
m.On("GetDevices", string(podUID), containerName).Return(tc.devices)
m.On("UpdateAllocatedDevices").Return()
server := NewV1alpha1PodResourcesServer(m, m)
resp, err := server.List(context.TODO(), &v1alpha1.ListPodResourcesRequest{})
if err != nil {
t.Errorf("want err = %v, got %q", nil, err)
}
if tc.expectedResponse.String() != resp.String() {
t.Errorf("want resp = %s, got %s", tc.expectedResponse.String(), resp.String())
}
})
}
}
|
/* Copyright (c) 2021 Skyward Experimental Rocketry
* Authors: <NAME>, <NAME>, <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#pragma once
#include <diagnostic/PrintLogger.h>
#include <sensors/BMX160/BMX160.h>
#include <sensors/calibration/BiasCalibration.h>
#include <sensors/calibration/SixParameterCalibration.h>
#include "BMX160WithCorrectionData.h"
/**
* Holds correction parameters for BMX160.
*/
struct BMX160CorrectionParameters
{
Matrix<float, 3, 2> accelParams, magnetoParams;
int minGyroSamplesForCalibration = 0;
BMX160CorrectionParameters();
static std::string header();
void read(std::istream& inputStream);
void print(std::ostream& outputStream) const;
};
/**
* @brief Driver for BMX160 with calibration.
*
* Gets samples from a BMX160 and applies a specified correction and rotation.
* It also calibrates the gyroscope.
*/
class BMX160WithCorrection : public Sensor<BMX160WithCorrectionData>
{
public:
/**
* @param bmx160_ already initialized bmx.
* @param correctionParameters correction parameter to apply.
* @param rotation_ axis rotation.
*/
BMX160WithCorrection(BMX160* bmx160_,
BMX160CorrectionParameters correctionParameters,
AxisOrthoOrientation rotation_);
/**
* Constructor without rotation, no rotation will be applied.
*
* @param bmx160_ already initialized bmx.
* @param correctionParameters correction parameter to apply.
*/
BMX160WithCorrection(BMX160* bmx160_,
BMX160CorrectionParameters correctionParameters);
/**
* @brief Constructor without correction nor rotation, no correciton and
* rotation will be applied.
*
* @param bmx160_ correction parameter to apply.
*/
BMX160WithCorrection(BMX160* bmx160_);
bool init() override;
bool selfTest() override;
/**
* @brief Performs the gyroscope calibration.
*
* The gyroscope calibration consists in averaging some samples to measure
* the bias.
* This function is intended to run while another thread samples the bmx at
* at least 10Hz.
*/
bool calibrate();
/**
* @brief Utility function to read correction parameters from file.
*/
static BMX160CorrectionParameters readCorrectionParametersFromFile(
const char* fileName);
/**
* @return Gyroscope calibration biases.
*/
BMX160GyroscopeCalibrationBiases getGyroscopeBiases();
private:
BMX160WithCorrectionData sampleImpl() override;
/**
* @brief Rotates data axes as specified.
*/
BMX160WithCorrectionData rotateAxis(BMX160WithCorrectionData data);
BMX160* bmx160;
int minGyroSamplesForCalibration = 200;
AxisOrthoOrientation rotation = {Direction::POSITIVE_X,
Direction::POSITIVE_Y};
SixParameterCorrector<AccelerometerData> accelerometerCorrector;
SixParameterCorrector<MagnetometerData> magnetometerCorrector;
BiasCorrector<GyroscopeData> gyroscopeCorrector{};
Vector3f gyroscopeCorrectionParameters;
PrintLogger logger = Logging::getLogger("bmx160withcorrection");
};
|
SELECT *
FROM customers
ORDER BY age DESC
LIMIT 10;
|
#!/bin/bash
export RUNNER_ALLOW_RUNASROOT=1
export PATH=$PATH:/actions-runner
deregister_runner() {
echo "Caught SIGTERM. Deregistering runner"
_TOKEN=$(bash /token.sh)
RUNNER_TOKEN=$(echo "${_TOKEN}" | jq -r .token)
./config.sh remove --token "${RUNNER_TOKEN}"
exit
}
_RUNNER_NAME=${RUNNER_NAME:-${RUNNER_NAME_PREFIX:-github-runner}-$(head /dev/urandom | tr -dc A-Za-z0-9 | head -c 13 ; echo '')}
_RUNNER_WORKDIR=${RUNNER_WORKDIR:-/_work}
_LABELS=${LABELS:-default}
_SHORT_URL=${REPO_URL}
if [[ -n "${ACCESS_TOKEN}" ]]; then
_TOKEN=$(bash /token.sh)
RUNNER_TOKEN=$(echo "${_TOKEN}" | jq -r .token)
_SHORT_URL=$(echo "${_TOKEN}" | jq -r .short_url)
fi
echo "Configuring"
./config.sh \
--url "${_SHORT_URL}" \
--token "${RUNNER_TOKEN}" \
--name "${_RUNNER_NAME}" \
--work "${_RUNNER_WORKDIR}" \
--labels "${_LABELS}" \
--unattended \
--replace
unset RUNNER_TOKEN
trap deregister_runner SIGINT SIGQUIT SIGTERM
./bin/runsvc.sh
|
MININIX_PKG_HOMEPAGE=https://www.gnu.org/software/bc/
MININIX_PKG_DESCRIPTION="Arbitrary precision numeric processing language"
MININIX_PKG_VERSION=1.07.1
MININIX_PKG_SRCURL=https://mirrors.kernel.org/gnu/bc/bc-${MININIX_PKG_VERSION}.tar.gz
MININIX_PKG_SHA256=62adfca89b0a1c0164c2cdca59ca210c1d44c3ffc46daf9931cf4942664cb02a
MININIX_PKG_DEPENDS="readline,flex"
MININIX_PKG_HOSTBUILD=true
MININIX_PKG_EXTRA_CONFIGURE_ARGS="
--infodir=$MININIX_PREFIX/share/info
--mandir=$MININIX_PREFIX/share/man
--with-readline
"
mininix_step_pre_configure() {
cp $MININIX_PKG_HOSTBUILD_DIR/bc/libmath.h \
$MININIX_PKG_SRCDIR/bc/libmath.h
touch -d "next hour" $MININIX_PKG_SRCDIR/bc/libmath.h
}
|
#!/bin/sh
mvn exec:java -Dexec.mainClass="com.weisong.test.comm.impl.CHazelcastWebSocketProxy"
|
def triangleArea(a,b,c):
s = (a+b+c) / 2
area = (s*(s-a)*(s-b)*(s-c))**0.5
return area
|
#!/bin/bash
sudo rfkill block bluetooth
sudo killall bluetoothd
sudo bluetoothd -C &
sudo rfkill unblock bluetooth
sudo sdptool add sp
|
import requests
from bs4 import BeautifulSoup
def scrape(htmlPage):
soup = BeautifulSoup(htmlPage, 'html.parser')
productList = []
for product in soup.find_all('div', class_='product'):
productName = product.h2.text
productPrice = product.div.text
productList.append({
'name': productName,
'price': productPrice
})
return productList
htmlPage = requests.get("https://www.examplewebsite.com/products").text
products = scrape(htmlPage)
for product in products:
print(product['name'] + ": " + product['price'])
|
<reponame>WalterHu/DemonCat
package org.spongycastle.tls;
import org.spongycastle.tls.crypto.TlsCrypto;
class TlsClientContextImpl
extends AbstractTlsContext
implements TlsClientContext
{
TlsClientContextImpl(TlsCrypto crypto, SecurityParameters securityParameters)
{
super(crypto, securityParameters);
}
public boolean isServer()
{
return false;
}
}
|
<reponame>bbhunter/ipv666<filename>ipv666/cmd/generate/generate.go<gh_stars>100-1000
package generate
import (
"github.com/spf13/cobra"
"strings"
)
func init() {
Cmd.AddCommand(blgenCmd)
Cmd.AddCommand(modelgenCmd)
Cmd.AddCommand(addrgenCmd)
}
var generateLongDesc = strings.TrimSpace(`
The generation utilities of IPv666 include (1) generating a network range blacklist,
(2) generating a predictive clustering model, and (3) generating IPv6 addresses.
`)
var Cmd = &cobra.Command{
Use: "generate",
Short: "Perform generation functions",
Long: generateLongDesc,
}
|
require 'spec_helper'
require 'rhc/commands/port_forward'
describe RHC::Commands::PortForward do
before(:each) do
RHC::Config.set_defaults
end
describe 'run' do
let(:arguments) { ['port-forward', '--noprompt', '--config', 'test.conf', '-l', 'test<EMAIL>', '-p', 'password', '--app', 'mockapp'] }
before :each do
@rc = MockRestClient.new
@domain = @rc.add_domain("mockdomain")
@app = @domain.add_application 'mockapp', 'mock-1.0'
@uri = URI.parse @app.ssh_url
@ssh = mock(Net::SSH)
end
context 'when port forwarding for a down appl' do
before(:each) do
Net::SSH.should_receive(:start).with(@uri.host, @uri.user).and_yield(@ssh)
@ssh.should_receive(:exec!).with("rhc-list-ports").and_yield(nil, :stderr, '127.0.0.1:3306')
@gg = MockRestGearGroup.new
@app.should_receive(:gear_groups).and_return([@gg])
@gg.should_receive(:gears).and_return([{'state' => 'stopped', 'id' => 'fakegearid'}])
end
it "should error out and suggest restarting the application" do
expect { run }.should exit_with_code(1)
end
it { run_output.should match(/Application \S+ is stopped\..*restart/m) }
end
context 'when port forwarding an app without ports to forward' do
before(:each) do
Net::SSH.should_receive(:start).with(@uri.host, @uri.user).and_yield(@ssh)
@ssh.should_receive(:exec!).with("rhc-list-ports").and_yield(nil, :stderr, '127.0.0.1:3306')
end
it "should error out as no ports to forward" do
expect { run }.should exit_with_code(102)
@rc.domains[0].id.should == 'mockdomain'
@rc.domains[0].applications.size.should == 1
@rc.domains[0].applications[0].name.should == 'mockapp'
end
it { run_output.should match("no available ports to forward.") }
end
context 'when port forwarding an app with permission denied ports' do
before(:each) do
Net::SSH.should_receive(:start).with(@uri.host, @uri.user).and_yield(@ssh)
@ssh.should_receive(:exec!).with("rhc-list-ports").and_yield(nil, :stderr, 'permission denied')
end
it "should error out as permission denied" do
expect { run }.should exit_with_code(129)
@rc.domains[0].id.should == 'mockdomain'
@rc.domains[0].applications.size.should == 1
@rc.domains[0].applications[0].name.should == 'mockapp'
end
it { run_output.should match("Permission denied") }
end
context 'when port forwarding an app with ports to forward' do
before(:each) do
Net::SSH.should_receive(:start).with(@uri.host, @uri.user).and_yield(@ssh).twice
@ssh.should_receive(:exec!).with("rhc-list-ports").and_yield(nil, :stderr, 'mysql -> 127.0.0.1:3306')
forward = mock(Net::SSH::Service::Forward)
@ssh.should_receive(:forward).and_return(forward)
if mac?
forward.should_receive(:local).with(3306, '127.0.0.1', 3306)
else
forward.should_receive(:local).with('127.0.0.1', 3306, '127.0.0.1', 3306)
end
@ssh.should_receive(:loop)
end
it "should run successfully" do
expect { run }.should exit_with_code(0)
@rc.domains[0].id.should == 'mockdomain'
@rc.domains[0].applications.size.should == 1
@rc.domains[0].applications[0].name.should == 'mockapp'
end
it { run_output.should match(/Forwarding ports.*Press CTRL-C/m) }
end
context 'when host is unreachable' do
before(:each) do
Net::SSH.should_receive(:start).and_raise(Errno::EHOSTUNREACH)
end
it "should error out" do
expect { run }.should exit_with_code(1)
@rc.domains[0].id.should == 'mockdomain'
@rc.domains[0].applications.size.should == 1
@rc.domains[0].applications[0].name.should == 'mockapp'
end
it { run_output.should include("Error trying to forward ports.") }
end
context 'when REST client connection times out' do
before(:each) do
@rc.should_receive(:find_domain).and_raise(RestClient::ServerBrokeConnection)
end
it "should error out" do
expect { run }.should exit_with_code(1)
end
it { run_output.should match("Connection.*failed:") }
end
context 'when port forwarding an app with ports to forward' do
before(:each) do
Net::SSH.should_receive(:start).with(@uri.host, @uri.user).and_yield(@ssh).twice
@ssh.should_receive(:exec!).with("rhc-list-ports").and_yield(nil, :stderr, 'mysql -> 127.0.0.1:3306')
forward = mock(Net::SSH::Service::Forward)
@ssh.should_receive(:forward).and_return(forward)
if mac?
forward.should_receive(:local).with(3306, '127.0.0.1', 3306)
else
forward.should_receive(:local).with('127.0.0.1', 3306, '127.0.0.1', 3306)
end
@ssh.should_receive(:loop).and_raise(Interrupt.new)
end
it "should exit when user interrupts" do
expect { run }.should exit_with_code(0)
@rc.domains[0].id.should == 'mockdomain'
@rc.domains[0].applications.size.should == 1
@rc.domains[0].applications[0].name.should == 'mockapp'
end
it { run_output.should include("Ending port forward") }
end
context 'when host refuses connection' do
before(:each) do
Net::SSH.should_receive(:start).with(@uri.host, @uri.user).and_yield(@ssh).twice
@ssh.should_receive(:exec!).with("rhc-list-ports").and_yield(nil, :stderr, 'mysql -> 127.0.0.1:3306')
forward = mock(Net::SSH::Service::Forward)
@ssh.should_receive(:forward).and_raise(Errno::ECONNREFUSED)
end
it "should error out" do
expect { run }.should exit_with_code(0)
end
it { run_output.should include("ssh -N") }
it { run_output.should include("Error forwarding") }
end
context 'when port forwarding a scaled app with ports to forward' do
before(:each) do
Net::SSH.should_receive(:start).with(@uri.host, @uri.user).and_yield(@ssh).twice
@ssh.should_receive(:exec!).with("rhc-list-ports").and_yield(nil, :stderr, "httpd -> 127.0.0.1:8080\nhttpd -> 127.0.0.2:8080")
forward = mock(Net::SSH::Service::Forward)
@ssh.should_receive(:forward).at_least(3).times.and_return(forward)
if mac?
forward.should_receive(:local).with(8080, '127.0.0.1', 8080)
forward.should_receive(:local).with(8080, '127.0.0.2', 8080).and_raise(Errno::EADDRINUSE)
forward.should_receive(:local).with(8081, '127.0.0.2', 8080)
else
forward.should_receive(:local).with('127.0.0.1', 8080, '127.0.0.1', 8080)
forward.should_receive(:local).with('127.0.0.2', 8080, '127.0.0.2', 8080).and_raise(Errno::EADDRINUSE)
forward.should_receive(:local).with('127.0.0.2', 8081, '127.0.0.2', 8080)
end
@ssh.should_receive(:loop).and_raise(Interrupt.new)
end
it "should exit when user interrupts" do
expect { run }.should exit_with_code(0)
@rc.domains[0].id.should == 'mockdomain'
@rc.domains[0].applications.size.should == 1
@rc.domains[0].applications[0].name.should == 'mockapp'
end
it { run_output.should include("Ending port forward") }
end
end
end
|
package subcmd
import (
"log"
"os"
"path/filepath"
"time"
"github.com/Shizuoka-Univ-dev/cvpn/api"
)
func Execute() {
cmd := NewRootCmd()
cmd.SetOutput(os.Stdout)
if err := cmd.Execute(); err != nil {
if err := saveLogs(); err != nil {
log.Fatal(err)
}
cmd.SetOutput(os.Stderr)
cmd.Println(err)
os.Exit(1)
}
if err := saveLogs(); err != nil {
log.Fatal(err)
}
}
func saveLogs() error {
userCacheDir, err := os.UserCacheDir()
if err != nil {
return err
}
logDirPath := filepath.Join(userCacheDir, "cvpn", "log")
_ = os.MkdirAll(logDirPath, 0755)
logNameFormat := "2006-01-02_15-04-05.log"
logName := time.Now().Format(logNameFormat)
file, err := os.Create(filepath.Join(logDirPath, logName))
if err != nil {
return err
}
defer file.Close()
if _, err := file.Write(api.ReadLog()); err != nil {
return err
}
return nil
}
|
import random
import string
def gen_random_password():
chars = string.ascii_letters + string.digits
password = ''.join(random.choice(chars) for _ in range(8))
return password
print(gen_random_password())
|
import os
from pymongo import errors, MongoClient
db_pass = <PASSWORD>('DBPASS')
client = MongoClient(
f"mongodb+srv://yanhkawakami:{db_pass}@<EMAIL>.<EMAIL>.mongodb.<EMAIL>/")
class UserDao:
def __init__(self):
self.users_db = client['users']
self.login_col = self.users_db['login']
self.message = ''
def read(self, email, password):
query = {"_id": email, "pass": password}
doc = self.login_col.find_one(query)
if (doc is not None):
self.nome = doc['user']
return True
return False
def create_user(self, email, user, password, valida_password):
try:
if (password != <PASSWORD>a_password):
self.message = "As senhas não coincidem"
return False
query = {"user": user}
doc = self.login_col.find_one(query)
if (doc is not None):
self.message = "Nome de usuário em uso"
return False
doc = {"_id": email, "user": user, "pass": password}
self.login_col.insert_one(doc)
return True
except errors.DuplicateKeyError:
self.message = "Email já cadastrado"
return False
class SearchDao:
def __init__(self):
self.cursos_db = client['cursos']
self.teste_col = self.cursos_db['teste']
def query_by_description(self, description):
query = {"descricao": {"$regex": f"{description}", "$options": 'i'}}
docs = self.teste_col.find(query)
return docs
|
~/Documents/projectsUtilities/glvis-3.4/./glvis -run vis_dsl_test.glvs -fn -40
|
<gh_stars>0
import React from "react";
import { createStackNavigator } from "@react-navigation/stack";
import PokedexScreen from "../screens/Pokedex";
import PokemonScreen from "../screens/Pokemon";
const Stack = createStackNavigator();
export default function PokedexNavigation() {
return (
<Stack.Navigator>
<Stack.Screen
name="Pokedex"
component={PokedexScreen}
options={{ title: "", headerTransparent: true }}
/>
<Stack.Screen
name="Pokemon"
component={PokemonScreen}
options={{ title: "", headerTransparent: true }}
/>
</Stack.Navigator>
);
}
|
class OperationManager:
def operation1(self):
print("Operation 1 performed")
def operation2(self):
print("Operation 2 performed")
def operation3(self):
print("Operation 3 performed")
def perform_operations(self, operations):
for op in operations:
getattr(self, op)()
|
<filename>springboot_memcached/src/main/java/com/oven/controller/DemoController.java
package com.oven.controller;
import com.oven.config.MemcachedRunner;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import javax.annotation.Resource;
@RestController
public class DemoController {
@Resource
private MemcachedRunner memcachedRunner;
@RequestMapping("/set")
public void set(String key, String value) {
memcachedRunner.getClient().set(key, 1000, value);
}
@RequestMapping("/get")
public String get(String key) {
return memcachedRunner.getClient().get(key).toString();
}
}
|
#!/bin/bash
# Copyright 2015 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
export NGINX_VERSION=1.15.6
export NDK_VERSION=0.3.1rc1
export SETMISC_VERSION=0.32
export MORE_HEADERS_VERSION=0.33
export NGINX_DIGEST_AUTH=274490cec649e7300fea97fed13d84e596bbc0ce
export NGINX_SUBSTITUTIONS=bc58cb11844bc42735bbaef7085ea86ace46d05b
export NGINX_OPENTRACING_VERSION=ea9994d7135be5ad2e3009d0f270e063b1fb3b21
export OPENTRACING_CPP_VERSION=1.5.0
export ZIPKIN_CPP_VERSION=0.5.2
export JAEGER_VERSION=ba0fa3fa6dbb01995d996f988a897e272100bf95
export MODSECURITY_VERSION=fc061a57a8b0abda79b17cbe103d78db803fa575
export LUA_NGX_VERSION=1c72f57ce87d4355d546a97c2bd8f5123a70db5c
export LUA_STREAM_NGX_VERSION=0.0.6rc2
export LUA_UPSTREAM_VERSION=0.07
export NGINX_INFLUXDB_VERSION=0e2cb6cbf850a29c81e44be9e33d9a15d45c50e8
export GEOIP2_VERSION=3.2
export NGINX_AJP_VERSION=bf6cd93f2098b59260de8d494f0f4b1f11a84627
export LUAJIT_VERSION=c58fe79b870f1934479bf14fe8035fc3d9fdfde2
export BUILD_PATH=/tmp/build
ARCH=$(uname -m)
get_src()
{
hash="$1"
url="$2"
f=$(basename "$url")
curl -sSL "$url" -o "$f"
echo "$hash $f" | sha256sum -c - || exit 10
tar xzf "$f"
rm -rf "$f"
}
apt-get update && apt-get dist-upgrade -y
# install required packages to build
clean-install \
bash \
build-essential \
curl ca-certificates \
libgeoip1 \
libgeoip-dev \
patch \
libpcre3 \
libpcre3-dev \
libssl-dev \
zlib1g \
zlib1g-dev \
libaio1 \
libaio-dev \
openssl \
libperl-dev \
cmake \
util-linux \
lua5.1 liblua5.1-0 liblua5.1-dev \
lmdb-utils \
wget \
libcurl4-openssl-dev \
libprotobuf-dev protobuf-compiler \
libz-dev \
procps \
git g++ pkgconf flex bison doxygen libyajl-dev liblmdb-dev libtool dh-autoreconf libxml2 libpcre++-dev libxml2-dev \
lua-cjson \
python \
luarocks \
libmaxminddb-dev \
authbind \
dumb-init \
gdb \
valgrind \
bc \
|| exit 1
if [[ ${ARCH} == "ppc64le" ]]; then
wget http://ftp.us.debian.org/debian/pool/main/a/apt/libapt-pkg5.0_1.7.0_ppc64el.deb
dpkg -i libapt-pkg5.0_1.7.0_ppc64el.deb
clean-install python3-apt python3-software-properties software-properties-common
fi
if [[ ${ARCH} == "x86_64" ]]; then
ln -s /usr/lib/x86_64-linux-gnu/liblua5.1.so /usr/lib/liblua.so
ln -s /usr/lib/x86_64-linux-gnu /usr/lib/lua-platform-path
fi
if [[ ${ARCH} == "armv7l" ]]; then
ln -s /usr/lib/arm-linux-gnueabihf/liblua5.1.so /usr/lib/liblua.so
ln -s /usr/lib/arm-linux-gnueabihf /usr/lib/lua-platform-path
fi
if [[ ${ARCH} == "aarch64" ]]; then
ln -s /usr/lib/aarch64-linux-gnu/liblua5.1.so /usr/lib/liblua.so
ln -s /usr/lib/aarch64-linux-gnu /usr/lib/lua-platform-path
fi
if [[ ${ARCH} == "ppc64le" ]]; then
ln -s /usr/lib/powerpc64le-linux-gnu/liblua5.1.so /usr/lib/liblua.so
ln -s /usr/lib/powerpc64le-linux-gnu /usr/lib/lua-platform-path
fi
mkdir -p /etc/nginx
# Get the GeoIP data
GEOIP_FOLDER=/etc/nginx/geoip
mkdir -p $GEOIP_FOLDER
function geoip_get {
wget -O $GEOIP_FOLDER/$1 $2 || { echo "Could not download $1, exiting." ; exit 1; }
gunzip $GEOIP_FOLDER/$1
}
function geoip2_get {
wget -O $GEOIP_FOLDER/$1.tar.gz $2 || { echo "Could not download $1, exiting." ; exit 1; }
mkdir $GEOIP_FOLDER/$1 && tar xf $GEOIP_FOLDER/$1.tar.gz -C $GEOIP_FOLDER/$1 --strip-components 1 && mv $GEOIP_FOLDER/$1/$1.mmdb $GEOIP_FOLDER/$1.mmdb && rm -rf $GEOIP_FOLDER/$1
}
geoip_get "GeoIPASNum.dat.gz" "http://download.maxmind.com/download/geoip/database/asnum/GeoIPASNum.dat.gz"
geoip_get "GeoIP.dat.gz" "https://geolite.maxmind.com/download/geoip/database/GeoLiteCountry/GeoIP.dat.gz"
geoip_get "GeoLiteCity.dat.gz" "https://geolite.maxmind.com/download/geoip/database/GeoLiteCity.dat.gz"
geoip2_get "GeoLite2-City" "http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.tar.gz"
geoip2_get "GeoLite2-ASN" "http://geolite.maxmind.com/download/geoip/database/GeoLite2-ASN.tar.gz"
if [[ (${ARCH} == "ppc64le") ]]; then
echo "deb http://deb.debian.org/debian experimental main" >> /etc/apt/sources.list
apt-get update
apt-get -t experimental install -y luajit
fi
mkdir --verbose -p "$BUILD_PATH"
cd "$BUILD_PATH"
# download, verify and extract the source files
get_src a3d8c67c2035808c7c0d475fffe263db8c353b11521aa7ade468b780ed826cc6 \
"https://nginx.org/download/nginx-$NGINX_VERSION.tar.gz"
get_src 49f50d4cd62b166bc1aaf712febec5e028d9f187cedbc27a610dfd01bdde2d36 \
"https://github.com/simpl/ngx_devel_kit/archive/v$NDK_VERSION.tar.gz"
get_src f1ad2459c4ee6a61771aa84f77871f4bfe42943a4aa4c30c62ba3f981f52c201 \
"https://github.com/openresty/set-misc-nginx-module/archive/v$SETMISC_VERSION.tar.gz"
get_src a3dcbab117a9c103bc1ea5200fc00a7b7d2af97ff7fd525f16f8ac2632e30fbf \
"https://github.com/openresty/headers-more-nginx-module/archive/v$MORE_HEADERS_VERSION.tar.gz"
get_src ede0ad490cb9dd69da348bdea2a60a4c45284c9777b2f13fa48394b6b8e7671c \
"https://github.com/atomx/nginx-http-auth-digest/archive/$NGINX_DIGEST_AUTH.tar.gz"
get_src 618551948ab14cac51d6e4ad00452312c7b09938f59ebff4f93875013be31f2d \
"https://github.com/yaoweibin/ngx_http_substitutions_filter_module/archive/$NGINX_SUBSTITUTIONS.tar.gz"
get_src 343b4293ca0d4afa55bf1ab54c866766043b2585b6ce81467d3d3e25987fc186 \
"https://github.com/opentracing-contrib/nginx-opentracing/archive/$NGINX_OPENTRACING_VERSION.tar.gz"
get_src 4455ca507936bc4b658ded10a90d8ebbbd61c58f06207be565a4ffdc885687b5 \
"https://github.com/opentracing/opentracing-cpp/archive/v$OPENTRACING_CPP_VERSION.tar.gz"
get_src 30affaf0f3a84193f7127cc0135da91773ce45d902414082273dae78914f73df \
"https://github.com/rnburn/zipkin-cpp-opentracing/archive/v$ZIPKIN_CPP_VERSION.tar.gz"
get_src 073deba39f74eff81da917907465e1343c89b335244349d3d3b4ae9331de86f2 \
"https://github.com/SpiderLabs/ModSecurity-nginx/archive/$MODSECURITY_VERSION.tar.gz"
get_src b68286966f292fb552511b71bd8bc11af8f12c8aa760372d1437ac8760cb2f25 \
"https://github.com/jaegertracing/jaeger-client-cpp/archive/$JAEGER_VERSION.tar.gz"
get_src 6c8a2792222f6bfad927840bf64cb890466fcca703a0133cbde0e5b808461279 \
"https://github.com/openresty/lua-nginx-module/archive/$LUA_NGX_VERSION.tar.gz"
get_src 5420dbf59bac52cef8021658d7eae1667a2bd14dda23602c985cae2604de77dd \
"https://github.com/openresty/stream-lua-nginx-module/archive/v$LUA_STREAM_NGX_VERSION.tar.gz"
get_src 2a69815e4ae01aa8b170941a8e1a10b6f6a9aab699dee485d58f021dd933829a \
"https://github.com/openresty/lua-upstream-nginx-module/archive/v$LUA_UPSTREAM_VERSION.tar.gz"
get_src 2349dd0b7ee37680306ee76bc4b6bf5c7509a4a4be16d246d9bbff44f564e4a0 \
"https://github.com/openresty/lua-resty-lrucache/archive/v0.08.tar.gz"
get_src bc9a00f4dd6dd3928c6e878dc84fa7a1073d5a65900cd77a5c1c7ce2d863b22a \
"https://github.com/openresty/lua-resty-core/archive/v0.1.16rc3.tar.gz"
get_src eaf84f58b43289c1c3e0442ada9ed40406357f203adc96e2091638080cb8d361 \
"https://github.com/openresty/lua-resty-lock/archive/v0.07.tar.gz"
get_src 3917d506e2d692088f7b4035c589cc32634de4ea66e40fc51259fbae43c9258d \
"https://github.com/hamishforbes/lua-resty-iputils/archive/v0.3.0.tar.gz"
get_src 5d16e623d17d4f42cc64ea9cfb69ca960d313e12f5d828f785dd227cc483fcbd \
"https://github.com/openresty/lua-resty-upload/archive/v0.10.tar.gz"
get_src 4aca34f324d543754968359672dcf5f856234574ee4da360ce02c778d244572a \
"https://github.com/openresty/lua-resty-dns/archive/v0.21.tar.gz"
get_src 095615fe94e64615c4a27f4f4475b91c047cf8d10bc2dbde8d5ba6aa625fc5ab \
"https://github.com/openresty/lua-resty-string/archive/v0.11.tar.gz"
get_src a77bf0d7cf6a9ba017d0dc973b1a58f13e48242dd3849c5e99c07d250667c44c \
"https://github.com/openresty/lua-resty-balancer/archive/v0.02rc4.tar.gz"
get_src d81b33129c6fb5203b571fa4d8394823bf473d8872c0357a1d0f14420b1483bd \
"https://github.com/cloudflare/lua-resty-cookie/archive/v0.1.0.tar.gz"
get_src 21dab7625a028d4560d0215c4bc3b82f6153344f933abb99dc9fd5f0d19519ab \
"https://github.com/openresty/luajit2/archive/$LUAJIT_VERSION.tar.gz"
get_src c673fcee37c1c4794f921b6710b09e8a0e1e58117aa788f798507d033f737192 \
"https://github.com/influxdata/nginx-influxdb-module/archive/$NGINX_INFLUXDB_VERSION.tar.gz"
get_src 15bd1005228cf2c869a6f09e8c41a6aaa6846e4936c473106786ae8ac860fab7 \
"https://github.com/leev/ngx_http_geoip2_module/archive/$GEOIP2_VERSION.tar.gz"
get_src 5f629a50ba22347c441421091da70fdc2ac14586619934534e5a0f8a1390a950 \
"https://github.com/yaoweibin/nginx_ajp_module/archive/$NGINX_AJP_VERSION.tar.gz"
# improve compilation times
CORES=$(($(grep -c ^processor /proc/cpuinfo) - 0))
export MAKEFLAGS=-j${CORES}
export CTEST_BUILD_FLAGS=${MAKEFLAGS}
export HUNTER_JOBS_NUMBER=${CORES}
export HUNTER_KEEP_PACKAGE_SOURCES=false
export HUNTER_USE_CACHE_SERVERS=true
# Install luajit from openresty fork
export LUAJIT_LIB=/usr/local/lib
export LUA_LIB_DIR="$LUAJIT_LIB/lua"
# luajit is available only as deb package on ppc64le
if [[ (${ARCH} != "ppc64le") ]]; then
cd "$BUILD_PATH/luajit2-$LUAJIT_VERSION"
make CCDEBUG=-g
make install
export LUAJIT_INC=/usr/local/include/luajit-2.1
fi
# Installing luarocks packages
if [[ ${ARCH} == "x86_64" ]]; then
export PCRE_DIR=/usr/lib/x86_64-linux-gnu
fi
if [[ ${ARCH} == "armv7l" ]]; then
export PCRE_DIR=/usr/lib/armhf-linux-gnu
fi
if [[ ${ARCH} == "aarch64" ]]; then
export PCRE_DIR=/usr/lib/aarch64-linux-gnu
fi
if [[ ${ARCH} == "ppc64le" ]]; then
export PCRE_DIR=/usr/lib/powerpc64le-linux-gnu
fi
cd "$BUILD_PATH"
luarocks install lrexlib-pcre 2.7.2-1 PCRE_LIBDIR=${PCRE_DIR}
cd "$BUILD_PATH/lua-resty-core-0.1.16rc3"
make install
cd "$BUILD_PATH/lua-resty-lrucache-0.08"
make install
cd "$BUILD_PATH/lua-resty-lock-0.07"
make install
cd "$BUILD_PATH/lua-resty-iputils-0.3.0"
make install
cd "$BUILD_PATH/lua-resty-upload-0.10"
make install
cd "$BUILD_PATH/lua-resty-dns-0.21"
make install
cd "$BUILD_PATH/lua-resty-string-0.11"
make install
cd "$BUILD_PATH/lua-resty-balancer-0.02rc4"
make all
make install
cd "$BUILD_PATH/lua-resty-cookie-0.1.0"
make install
# build and install lua-resty-waf with dependencies
/install_lua_resty_waf.sh
# install openresty-gdb-utils
cd /
git clone --depth=1 https://github.com/openresty/openresty-gdb-utils.git
cat > ~/.gdbinit << EOF
directory /openresty-gdb-utils
py import sys
py sys.path.append("/openresty-gdb-utils")
source luajit20.gdb
source ngx-lua.gdb
source luajit21.py
source ngx-raw-req.py
set python print-stack full
EOF
# build opentracing lib
cd "$BUILD_PATH/opentracing-cpp-$OPENTRACING_CPP_VERSION"
mkdir .build
cd .build
cmake -DCMAKE_BUILD_TYPE=Release \
-DCMAKE_CXX_FLAGS="-fPIC" \
-DBUILD_TESTING=OFF \
-DBUILD_MOCKTRACER=OFF \
..
make
make install
# build jaeger lib
cd "$BUILD_PATH/jaeger-client-cpp-$JAEGER_VERSION"
sed -i 's/-Werror/-Wno-psabi/' CMakeLists.txt
cat <<EOF > export.map
{
global:
OpenTracingMakeTracerFactory;
local: *;
};
EOF
mkdir .build
cd .build
cmake -DCMAKE_BUILD_TYPE=Release \
-DBUILD_TESTING=OFF \
-DJAEGERTRACING_BUILD_EXAMPLES=OFF \
-DJAEGERTRACING_BUILD_CROSSDOCK=OFF \
-DJAEGERTRACING_COVERAGE=OFF \
-DJAEGERTRACING_PLUGIN=ON \
-DHUNTER_CONFIGURATION_TYPES=Release \
-DJAEGERTRACING_WITH_YAML_CPP=ON ..
make
make install
export HUNTER_INSTALL_DIR=$(cat _3rdParty/Hunter/install-root-dir) \
mv libjaegertracing_plugin.so /usr/local/lib/libjaegertracing_plugin.so
# build zipkin lib
cd "$BUILD_PATH/zipkin-cpp-opentracing-$ZIPKIN_CPP_VERSION"
cat <<EOF > export.map
{
global:
OpenTracingMakeTracerFactory;
local: *;
};
EOF
mkdir .build
cd .build
cmake -DCMAKE_BUILD_TYPE=Release \
-DBUILD_SHARED_LIBS=ON \
-DBUILD_PLUGIN=ON \
-DBUILD_TESTING=OFF ..
make
make install
# Get Brotli source and deps
cd "$BUILD_PATH"
git clone --depth=1 https://github.com/google/ngx_brotli.git
cd ngx_brotli
git submodule init
git submodule update
# build modsecurity library
cd "$BUILD_PATH"
git clone -b v3/master --single-branch https://github.com/SpiderLabs/ModSecurity
cd ModSecurity/
git checkout 9ada0a28c8100f905014c128b0e6d11dd75ec7e5
git submodule init
git submodule update
sh build.sh
./configure --disable-doxygen-doc --disable-examples --disable-dependency-tracking
make
make install
mkdir -p /etc/nginx/modsecurity
cp modsecurity.conf-recommended /etc/nginx/modsecurity/modsecurity.conf
cp unicode.mapping /etc/nginx/modsecurity/unicode.mapping
# Download owasp modsecurity crs
cd /etc/nginx/
git clone -b v3.0/master --single-branch https://github.com/SpiderLabs/owasp-modsecurity-crs
cd owasp-modsecurity-crs
git checkout a216353c97dd6ef767a6db4dbf9b724627811c9b
mv crs-setup.conf.example crs-setup.conf
mv rules/REQUEST-900-EXCLUSION-RULES-BEFORE-CRS.conf.example rules/REQUEST-900-EXCLUSION-RULES-BEFORE-CRS.conf
mv rules/RESPONSE-999-EXCLUSION-RULES-AFTER-CRS.conf.example rules/RESPONSE-999-EXCLUSION-RULES-AFTER-CRS.conf
cd ..
# OWASP CRS v3 rules
echo "
Include /etc/nginx/owasp-modsecurity-crs/crs-setup.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-900-EXCLUSION-RULES-BEFORE-CRS.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-901-INITIALIZATION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-903.9001-DRUPAL-EXCLUSION-RULES.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-903.9002-WORDPRESS-EXCLUSION-RULES.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-905-COMMON-EXCEPTIONS.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-910-IP-REPUTATION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-911-METHOD-ENFORCEMENT.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-912-DOS-PROTECTION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-913-SCANNER-DETECTION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-920-PROTOCOL-ENFORCEMENT.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-921-PROTOCOL-ATTACK.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-930-APPLICATION-ATTACK-LFI.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-931-APPLICATION-ATTACK-RFI.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-932-APPLICATION-ATTACK-RCE.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-933-APPLICATION-ATTACK-PHP.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-941-APPLICATION-ATTACK-XSS.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-942-APPLICATION-ATTACK-SQLI.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-943-APPLICATION-ATTACK-SESSION-FIXATION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/REQUEST-949-BLOCKING-EVALUATION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-950-DATA-LEAKAGES.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-951-DATA-LEAKAGES-SQL.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-952-DATA-LEAKAGES-JAVA.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-953-DATA-LEAKAGES-PHP.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-954-DATA-LEAKAGES-IIS.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-959-BLOCKING-EVALUATION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-980-CORRELATION.conf
Include /etc/nginx/owasp-modsecurity-crs/rules/RESPONSE-999-EXCLUSION-RULES-AFTER-CRS.conf
" > /etc/nginx/owasp-modsecurity-crs/nginx-modsecurity.conf
# build nginx
cd "$BUILD_PATH/nginx-$NGINX_VERSION"
# apply Nginx patches
patch -p1 < /patches/openresty-ssl_cert_cb_yield.patch
WITH_FLAGS="--with-debug \
--with-compat \
--with-pcre-jit \
--with-http_ssl_module \
--with-http_stub_status_module \
--with-http_realip_module \
--with-http_auth_request_module \
--with-http_addition_module \
--with-http_dav_module \
--with-http_geoip_module \
--with-http_gzip_static_module \
--with-http_sub_module \
--with-http_v2_module \
--with-stream \
--with-stream_ssl_module \
--with-stream_ssl_preread_module \
--with-threads \
--with-http_secure_link_module \
--with-http_gunzip_module"
if [[ ${ARCH} != "armv7l" || ${ARCH} != "aarch64" ]]; then
WITH_FLAGS+=" --with-file-aio"
fi
# "Combining -flto with -g is currently experimental and expected to produce unexpected results."
# https://gcc.gnu.org/onlinedocs/gcc/Optimize-Options.html
CC_OPT="-g -Og -fPIE -fstack-protector-strong \
-Wformat \
-Werror=format-security \
-Wno-deprecated-declarations \
-fno-strict-aliasing \
-D_FORTIFY_SOURCE=2 \
--param=ssp-buffer-size=4 \
-DTCP_FASTOPEN=23 \
-fPIC \
-I$HUNTER_INSTALL_DIR/include \
-Wno-cast-function-type"
LD_OPT="-fPIE -fPIC -pie -Wl,-z,relro -Wl,-z,now -L$HUNTER_INSTALL_DIR/lib"
if [[ ${ARCH} == "x86_64" ]]; then
CC_OPT+=' -m64 -mtune=native'
fi
WITH_MODULES="--add-module=$BUILD_PATH/ngx_devel_kit-$NDK_VERSION \
--add-module=$BUILD_PATH/set-misc-nginx-module-$SETMISC_VERSION \
--add-module=$BUILD_PATH/headers-more-nginx-module-$MORE_HEADERS_VERSION \
--add-module=$BUILD_PATH/nginx-http-auth-digest-$NGINX_DIGEST_AUTH \
--add-module=$BUILD_PATH/ngx_http_substitutions_filter_module-$NGINX_SUBSTITUTIONS \
--add-module=$BUILD_PATH/lua-nginx-module-$LUA_NGX_VERSION \
--add-module=$BUILD_PATH/stream-lua-nginx-module-$LUA_STREAM_NGX_VERSION \
--add-module=$BUILD_PATH/lua-upstream-nginx-module-$LUA_UPSTREAM_VERSION \
--add-module=$BUILD_PATH/nginx-influxdb-module-$NGINX_INFLUXDB_VERSION \
--add-dynamic-module=$BUILD_PATH/nginx-opentracing-$NGINX_OPENTRACING_VERSION/opentracing \
--add-dynamic-module=$BUILD_PATH/ModSecurity-nginx-$MODSECURITY_VERSION \
--add-dynamic-module=$BUILD_PATH/ngx_http_geoip2_module-${GEOIP2_VERSION} \
--add-module=$BUILD_PATH/nginx_ajp_module-${NGINX_AJP_VERSION} \
--add-module=$BUILD_PATH/ngx_brotli"
./configure \
--prefix=/usr/share/nginx \
--conf-path=/etc/nginx/nginx.conf \
--modules-path=/etc/nginx/modules \
--http-log-path=/var/log/nginx/access.log \
--error-log-path=/var/log/nginx/error.log \
--lock-path=/var/lock/nginx.lock \
--pid-path=/run/nginx.pid \
--http-client-body-temp-path=/var/lib/nginx/body \
--http-fastcgi-temp-path=/var/lib/nginx/fastcgi \
--http-proxy-temp-path=/var/lib/nginx/proxy \
--http-scgi-temp-path=/var/lib/nginx/scgi \
--http-uwsgi-temp-path=/var/lib/nginx/uwsgi \
${WITH_FLAGS} \
--without-mail_pop3_module \
--without-mail_smtp_module \
--without-mail_imap_module \
--without-http_uwsgi_module \
--without-http_scgi_module \
--with-cc-opt="${CC_OPT}" \
--with-ld-opt="${LD_OPT}" \
--user=www-data \
--group=www-data \
${WITH_MODULES}
make || exit 1
make install || exit 1
echo "Cleaning..."
cd /
mv /usr/share/nginx/sbin/nginx /usr/sbin
apt-mark unmarkauto \
bash \
curl ca-certificates \
libgeoip1 \
libpcre3 \
zlib1g \
libaio1 \
gdb \
geoip-bin \
libyajl2 liblmdb0 libxml2 libpcre++ \
gzip \
openssl
apt-get remove -y --purge \
build-essential \
libgeoip-dev \
libpcre3-dev \
libssl-dev \
zlib1g-dev \
libaio-dev \
linux-libc-dev \
cmake \
wget \
patch \
protobuf-compiler \
python \
xz-utils \
bc \
git g++ pkgconf flex bison doxygen libyajl-dev liblmdb-dev libgeoip-dev libtool dh-autoreconf libpcre++-dev libxml2-dev
apt-get autoremove -y
rm -rf "$BUILD_PATH"
rm -Rf /usr/share/man /usr/share/doc
rm -rf /tmp/* /var/tmp/*
rm -rf /var/lib/apt/lists/*
rm -rf /var/cache/apt/archives/*
rm -rf /usr/local/modsecurity/bin
rm -rf /usr/local/modsecurity/include
rm -rf /usr/local/modsecurity/lib/libmodsecurity.a
rm -rf /root/.cache
rm -rf /etc/nginx/owasp-modsecurity-crs/.git
rm -rf /etc/nginx/owasp-modsecurity-crs/util/regression-tests
rm -rf $HOME/.hunter
# update image permissions
writeDirs=( \
/etc/nginx \
/var/lib/nginx \
/var/log/nginx \
/opt/modsecurity/var/log \
/opt/modsecurity/var/upload \
/opt/modsecurity/var/audit \
);
for dir in "${writeDirs[@]}"; do
mkdir -p ${dir};
chown -R www-data.www-data ${dir};
done
for value in {1..1023};do
touch /etc/authbind/byport/$value
chown www-data /etc/authbind/byport/$value
chmod 755 /etc/authbind/byport/$value
done
|
/* PicLens Lite: version 1.3.1 (14221)
* Copyright (c) 2008 Cooliris, Inc. All Rights Reserved.
*
* The JavaScript part of PicLens Lite (i.e., this file) is BSD licensed (see: http://lite.piclens.com/bsdlicense)
* This launcher includes and interacts with SWFObject (MIT), BrowserDetect (BSD Compatible), and Lytebox (CC Attribution 3.0).
*
* There are two versions of this JS:
* http://lite.piclens.com/current/piclens.js full commented file (~39KB)
* http://lite.piclens.com/current/piclens_optimized.js lighter deployment file (~21KB)
*
* Updated 2012-09-21 by Photocrati with higher z-index scores
*/
var PicLensLite = {
// PUBLIC API
// PicLens Lite can be deployed in one of two ways:
// 1) include http://lite.piclens.com/current/piclens.js in the <head> of your webpage
// 2) download the zip file and deploy it on your own website (unzip it anywhere, and point to the JS file in the <head> of your page)
// see: http://lite.piclens.com/releases/current.zip
//
// For example: the directory layout looks like:
// lite.piclens.com/current/ contains the SWF, JS, and image files
// /lytebox/ contains slideshow support for browsers w/o Flash
//
// Pointing to the JS directly will configure Lite relative to that URL.
// Alternatively, you can customize the URLs with PicLensLite.setLiteURLs
// 1) Call PicLensLite.start() to launch the default feed (specified in the head)
// 2) Call PicLensLite.start({feedUrl:'http://myWebsite.com/myFeed.rss', ...}) to launch a specific feed
// Option 2 supports the following named arguments:
// feedUrl : String // is the URL to the specific Media RSS feed you want to launch
// feedData : String // is the Media RSS feed itself (do not use feedUrl if you want to programmatically generate & pass in the feed text)
// guid : String // starts from the item in the feed that is tagged w/ this unique id
// maxScale : Number // normally, images fill the stage; 0 -> never scale up; any other positive number S --> scale up to S times the original size of the photo (but never bigger than the stage)
// loadFeedInFlash : Boolean // if true, we ask Flash to load the feed, instead of AJAX (expert option)
// loop : Boolean // if true, we turn looping on by default
// paused : Boolean // if true, we start the slideshow in paused mode
// To enable smoothing for images. a crossdomain.xml file is required at the root of your image server.
// Lite detects this crossdomain.xml and applies smoothing automatically.
start : function (namedArgs) {
this.determineBrowserParams();
clearTimeout(this.REMOVE_TIMER_ID);
clearTimeout(this.AUTO_CLOSE_TIMER_ID);
this.ARGS = {}; // clear out previous args
// handle named arguments
if (typeof namedArgs !== "undefined" && namedArgs !== null) {
this.ARGS = namedArgs;
// if feedUrl is specified, it launches immediately
if (namedArgs.feedUrl) {
this.THE_FEED_URL = namedArgs.feedUrl;
if (this.checkForPluginAndLaunchIfPossible(namedArgs.feedUrl, namedArgs.guid)) {
return;
}
if (namedArgs.loadFeedInFlash) {
// read up on flash crossdomain.xml if you choose this option
// Flash can only load feeds from servers hosting a crossdomain.xml
// pass the URL as a FlashVar, and load the contents via a GET request
this.showFlashUI("");
} else {
// load the contents of the URL via AJAX, and launch the Flash UI afterward....
this.loadViaXHR(namedArgs.feedUrl);
}
}
// pass in the feed XML directly through Javascript
// use feedUrl OR feedData, but not both!
if (typeof namedArgs.feedData !== 'undefined') {
this.showFlashUI(namedArgs.feedData);
}
} else {
// find the feed from the header, since none was specified
// build list of XML feeds
var feeds = this.indexFeeds();
if (feeds.length !== 0) { // view the first feed, if available
var feed = feeds[0];
this.THE_FEED_URL = feed.url;
if (this.checkForPluginAndLaunchIfPossible(feed.url)) {
return;
}
this.loadViaXHR(feed.url);
}
}
},
// check if the slideshow is currently running
isRunning : function () {
return this.LITE_IS_RUNNING;
},
// check if the browser plug-in is installed
hasClient : function () {
return this.hasCooliris();
},
// call this before starting lite. we currently support a single custom button
// the icon is a 24x24 PNG
// we will perform a GET request of a provided URL (w/ the item's GUID) when the user clicks
// http://yourwebserver.com/buttonURL?itemGUID=guidVal
addCustomButton : function (buttonRESTUrl, buttonLabel, buttonIcon) {
this.CUSTOM_BUTTON = {targetURL: buttonRESTUrl, labelText: buttonLabel, iconImage: buttonIcon};
},
// OPTIONAL: provide callbacks to be notified in certain situations. Call this BEFORE PicLensLite.start(...)
// onNoPlugins():Boolean
// is called when the user invokes Lite but does not have PicLens / Flash installed
// onExit(itemUID):void
// is called when the user exits from Lite
// we provide the item's GUID if it exists, and the item's content URL otherwise
// itemUID is undefined if the user exited before Lite launched, or if the user did not have Flash
setCallbacks : function (args) {
if (args.onNoPlugins) {
this.ON_NO_PLUGINS = args.onNoPlugins;
}
if (args.onExit) {
this.ON_EXIT = args.onExit;
}
},
// OPTIONAL: customize the location of resources. Call this BEFORE PicLensLite.start(...)
// Normally, we locate the PicLensLite files relative to the JS file
// To use this function, pass in an object with the following named arguments:
// args = {
// lite : other paths can be determined from this (make sure it ends in a slash)
// swf : the URL of the SWF file 1
// button : image allowing users to download piclens 1
// lbox : where to find lytebox 1
// lboxcss : the CSS file 2
// lboxjs : the JS file 2
// }
// 1: Can be determined from args.lite
// 2: Can be determined from args.lbox or args.lite
setLiteURLs : function (args) {
if (!this.LITE_URL) {
if (args.swf) {
this.LITE_URL = args.swf;
} else if (args.lite) {
this.LITE_URL = args.lite + "PicLensLite.swf";
} // if both lite & swf aren't set, it won't work
}
if (!this.BUTTON_URL) {
if (args.button) {
this.BUTTON_URL = args.button;
} else if (args.lite) {
this.BUTTON_URL = args.lite + "NoFlash.jpg";
}
}
var lboxUrl = "";
if (args.lbox) {
lboxUrl = args.lbox;
} else if (args.lite) {
lboxUrl = args.lite + "../lytebox/";
}
if (!this.LBOX_CSS_URL) {
if (args.lboxcss) {
this.LBOX_CSS_URL = args.lboxcss;
} else if (lboxUrl != "") {
this.LBOX_CSS_URL = lboxUrl + "lytebox.css";
}
}
if (!this.LBOX_JS_URL) {
if (args.lboxjs) {
this.LBOX_JS_URL = args.lboxjs;
} else if (lboxUrl != "") {
this.LBOX_JS_URL = lboxUrl + "lytebox.js";
}
}
},
//////////////////////////////////////////////////////////////////////////////////////////////////////////
// The PRIVATE API is below
// DO NOT USE these functions/variables directly; they WILL change in future releases
// Email us to request changes to the public API
ARGS : {},
DEBUG_NOCLIENT : false, // if true, we will NEVER launch the PicLens Client (for testing Lite)
DEBUG_NOFLASH : false, // if true, we will assume the user does not have Flash (for testing Lite)
HPAD : 60, // horizontal padding
VPAD : 20, // vertical padding
LITE_BG_DIV : null, // the grey/black background overlay
LITE_FG_DIV : null, // the foreground div that contains the flash component
LITE_URL : null, // the location of PicLensLite.SWF
BUTTON_URL : null, // image to display if the user doesn't have flash
LBOX_CSS_URL : null, // where to find lytebox css/js files
LBOX_JS_URL : null,
LBOX_COUNT : 0, // try to start lytebox, but if it doesn't exist after a few tries, give up...
SHOW_LBOX : false, // if true, skip flash altogether
OS_WIN : false, // OS Detect
OS_MAC : false, // sadly, sometimes we have to do something different depending on our Browser/OS/Configuration
BROWSER_FFX : false, // Browser Detect
BROWSER_SAF : false,
BROWSER_IE : false,
BROWSER_IE6 : false,
OLD_B_MARGIN : null,
OLD_B_OVERFLOW : null,
OLD_B_HEIGHT : null,
OLD_H_OVERFLOW : null,
OLD_H_HEIGHT : null,
THE_FEED : "", // the feed text
THE_FEED_URL : "", // the feed url
LITE_IS_RUNNING : false, // use isRunning()
piclensIsRunning_ : false, // maintain compatibility with the Wordpress Plugin for a few iterations...
FLASH_ID_1 : "pllflash1", // outer
FLASH_ID_2 : "pllflash2", // inner
FLASH_VER : null, // the version of Flash we're running
FLASH_URL : "http://www.adobe.com/go/getflashplayer",
PL_URL : "http://download.piclens.com/partner/", // downloads PL immediately
PLC : null, // PicLens Client
LEARN_PL_URL : "http://affiliate.piclens.com/partner/", // landing page to read about / download PL
FONT : "font-family: Lucida Grande, Myriad Pro, Verdana, Helvetica, Arial, sans-serif;",
KEY_HANDLERS : "", // save the old key handlers, if any
ON_NO_PLUGINS : null, // callback
ON_EXIT : null, // callback
AUTO_CLOSE_TIMER_ID : 0, //
REMOVE_TIMER_ID : 0, // the timer for removing the children...
RESIZE_TIMER_IE6 : null, // every second, autoresizes the UI
RESIZE_HANDLER_EXISTS : false,// add a handler to detect user resize events in safari
CUSTOM_BUTTON : null, // add an action to the UI
addKeyHandlers : function() {
var self = this;
if (typeof document.onkeydown !== 'undefined') { // save & later restore key handlers...
this.KEY_HANDLERS = document.onkeydown;
}
document.onkeydown = function(e) {
var keycode;
if (typeof e === "undefined" || e === null) { // ie
keycode = window.event.keyCode;
} else { // mozilla
keycode = e.which;
}
var val=self.handleKeyPress(keycode);
if (typeof e != "undefined" && e != null) {
e.returnValue = val;
}
return val;
};
},
addMouseHandlers : function() {
if (window.addEventListener) { // Firefox/Opera
window.addEventListener("DOMMouseScroll", this.handleMouseWheel, false);
} else if (document.attachEvent) { // IE
document.attachEvent("onmousewheel", this.handleMouseWheel);
}
// must be outside of the if-else
window.onmousewheel = document.onmousewheel = this.handleMouseWheel; // Safari & Others
},
// call this at the last possible moment (especially for Win/Firefox)
appendElementsToDocument : function() {
if (this.BROWSER_FFX && this.OS_MAC) { // avoid redraw bug by not showing the background
this.LITE_BG_DIV.style.display = "none";
}
document.body.appendChild(this.LITE_BG_DIV);
document.body.appendChild(this.LITE_FG_DIV);
},
autoResize : function() { // for the IE6 auto resize
if (!this.isRunning()) {
// unregister the timer
clearInterval(this.RESIZE_TIMER_IE6);
return;
}
// resize the BG and FG divs
var size = this.getPageSize();
var bg = this.LITE_BG_DIV;
if (bg) {
bg.style.height = size.h + 'px';
bg.style.width = size.w + 'px';
}
if (this.LITE_FG_DIV) {
var fgs = this.LITE_FG_DIV.style;
this.resizeToPaddedBox(fgs);
this.resizeToFitPaddedBox(fgs, size);
this.resizeFlashToFitPaddedBox();
}
},
checkForPluginAndLaunchIfPossible : function (url, guid) {
// if we have the correct version of piclens, pass it onto the client and do not use LITE
if (this.hasCooliris()) {
if (typeof(guid) != "undefined") {
this.PLC.launch(url,'uid',guid);
} else {
this.PLC.launch(url,'','');
}
return true; // launched!
}
return false;
},
createBackgroundOverlay : function () {
// create a background that covers the page
var bg = document.createElement('div');
this.LITE_BG_DIV = bg;
bg.id = "lite_bg_div";
var bgs = bg.style;
bgs.position = 'fixed';
// stick to the sides when the window resizes
bgs.width = bgs.height = "100%";
if (this.BROWSER_IE6) {
var b = document.body;
var bs = b.currentStyle;
var de = document.documentElement;
var ds = de.currentStyle;
// save previous document styles
this.OLD_B_MARGIN = bs.margin;
this.OLD_B_OVERFLOW = bs.overflow;
this.OLD_B_HEIGHT = bs.height;
this.OLD_H_OVERFLOW = ds.overflow;
this.OLD_H_HEIGHT = ds.height;
this.OLD_SCROLL_Y = de.scrollTop;
// simulate position:fixed...
b.style.margin = "0";
b.style.overflow = "auto";
b.style.height = "100%";
de.style.overflow = "auto";
de.style.height = "100%";
bgs.position = 'absolute';
var page = this.getPageSize();
bgs.height = page.h + 'px';
bgs.width = page.w + 'px';
}
bgs.left = bgs.right = bgs.top = bgs.bottom = '0';
bgs.backgroundColor = '#000';
bgs.zIndex = 10000;
bgs.opacity = '0.5';
bgs.filter = 'alpha(opacity=50)'; // IE7
var self = this;
bg.onclick = function () {
self.exitPicLensLite();
};
},
createForegroundFlashComponent : function () { // configure the box
var fg = document.createElement('div');
this.LITE_FG_DIV = fg;
fg.id = "lite_fg_div";
var fgs = fg.style;
fgs.backgroundColor = '#000';
fgs.position = 'fixed';
fgs.border = '2px solid #555';
fgs.zIndex = 10001; // above the bg
this.resizeToPaddedBox(fgs);
if (this.BROWSER_IE6) {
fgs.position = 'absolute';
this.resizeToFitPaddedBox(fgs);
}
},
// this just removes the HTML elements
// we call this from Flash (thus, we need to allow the function to return before removing the children)
closeFlashUI : function (itemID) {
var doc = document;
// remove the keyboard & mouse handlers...
doc.onkeydown = this.KEY_HANDLERS;
window.onmousewheel = doc.onmousewheel = "";
if (window.removeEventListener) {
window.removeEventListener("DOMMouseScroll", this.handleMouseWheel, false);
}
if (doc.detachEvent) { // IE/Opera
doc.detachEvent("onmousewheel", this.handleMouseWheel);
}
// hide the div now; remove them later
this.LITE_BG_DIV.style.display = this.LITE_FG_DIV.style.display = 'none';
this.REMOVE_TIMER_ID = setTimeout(function (){PicLensLite.removeChildren();}, 150); // 0.15s
if (this.BROWSER_IE6) { // restore styles
var b = document.body;
var de = document.documentElement;
b.style.margin = this.OLD_B_MARGIN;
b.style.overflow = this.OLD_B_OVERFLOW;
b.style.height = this.OLD_B_HEIGHT;
de.style.overflow = this.OLD_H_OVERFLOW;
de.style.height = this.OLD_H_HEIGHT;
window.scrollTo(0, this.OLD_SCROLL_Y);
}
if (this.ON_EXIT !== null) {
this.ON_EXIT(itemID); // call on exit
}
this.setRunningFlag(false);
},
// for handling cross-browser quirks...
determineBrowserParams : function () {
// BrowserDetect {.OS, .browser, .version} e.g., "Mac Firefox 2" and "Windows Explorer 7"
var os = BrowserDetect.OS;
var b = BrowserDetect.browser;
this.OS_MAC = (os == "Mac");
this.OS_WIN = (os == "Windows");
this.BROWSER_FFX = (b == "Firefox");
this.BROWSER_SAF = (b == "Safari");
this.BROWSER_IE = (b == "Explorer");
this.BROWSER_IE6 = (this.BROWSER_IE && BrowserDetect.version == "6");
this.FLASH_VER = swfobjlite.getFlashPlayerVersion(); // what version of Flash is the browser running?
},
// we should tell Flash we are exiting when this is called...
// this should only be called when the user clicks outside of the flash component
// all other exits are handled through Flash
exitPicLensLite : function () {
var fl = this.getFlash();
if (fl !== null && fl.fl_exitPicLensLite) { // binding exists
// tell flash that we are quitting
fl.fl_exitPicLensLite();
// close after .5 seconds, if nothing happened
// TODO: make sure this doesn't crash any browsers
// TODO: Check the Return Value to Fire this Timer?
this.AUTO_CLOSE_TIMER_ID = setTimeout(function (){ if (PicLensLite.isRunning()) { PicLensLite.closeFlashUI();}}, 500); // 0.5s
} else {
// if it's not running already, we just remove the DIVs (flash isn't defined)
this.closeFlashUI();
}
},
// a website should include the absolute URL of the piclens.js in its header
// This function looks for the script tag and extracts the ROOT_URL
// <script type="text/javascript" src="ROOT_URL/piclens.js"></script>
// we assume the SWF and JPEG/PNG/GIF files are relative to this ROOT_URL...
findScriptLocation : function () {
var scriptTags = document.getElementsByTagName("script");
for (var i = 0; i != scriptTags.length; ++i) {
var script = scriptTags[i];
var type = script.getAttribute("type");
if (type == "text/javascript") {
var src = script.getAttribute("src");
if (src === null) {
continue;
}
var index = src.indexOf("piclens.js");
if (index != -1) {
this.setLiteURLs({lite:src.substring(0,index)});
return;
} else {
index = src.indexOf("piclens_optimized.js");
if (index != -1) {
this.setLiteURLs({lite:src.substring(0,index)});
return;
}
}
}
}
},
// returns an object describing the page size of the browser window
getPageSize : function () {
var xScroll, yScroll, winW, winH;
var doc = document;
var body = doc.body;
var html;
if (window.innerHeight && window.scrollMaxY) {
xScroll = doc.scrollWidth;
yScroll = (this.isFrame ? parent.innerHeight : self.innerHeight) + (this.isFrame ? parent.scrollMaxY : self.scrollMaxY);
} else if (body.scrollHeight > body.offsetHeight){
xScroll = body.scrollWidth;
yScroll = body.scrollHeight;
} else {
html = doc.getElementsByTagName("html").item(0);
xScroll = html.offsetWidth;
yScroll = html.offsetHeight;
xScroll = (xScroll < body.offsetWidth) ? body.offsetWidth : xScroll;
yScroll = (yScroll < body.offsetHeight) ? body.offsetHeight : yScroll;
}
var docElement = doc.documentElement;
if (self.innerHeight) {
winW = (this.isFrame) ? parent.innerWidth : self.innerWidth;
winH = (this.isFrame) ? parent.innerHeight : self.innerHeight;
} else if (docElement && docElement.clientHeight) {
winW = docElement.clientWidth;
winH = docElement.clientHeight;
} else if (body) {
html = doc.getElementsByTagName("html").item(0);
winW = html.clientWidth;
winH = html.clientHeight;
winW = (winW == 0) ? body.clientWidth : winW;
winH = (winH == 0) ? body.clientHeight : winH;
}
var pageHeight = (yScroll < winH) ? winH : yScroll;
var pageWidth = (xScroll < winW) ? winW : xScroll;
return {pw:pageWidth, ph:pageHeight, w:winW, h:winH}; // pw and ph are the larger pair. use w and h.
},
getElementsFromXMLFeed : function () {
var xmlDoc;
if (window.ActiveXObject) { // IE
xmlDoc=new ActiveXObject("Microsoft.XMLDOM");
xmlDoc.async=false;
xmlDoc.loadXML(PicLensLite.THE_FEED);
} else { // Mozilla, Firefox, Opera, etc.
var parser = new DOMParser();
xmlDoc = parser.parseFromString(PicLensLite.THE_FEED, "text/xml");
}
var elements = xmlDoc.getElementsByTagName('*');
return elements;
},
getBasicSlideShowHTML : function () {
if (!this.LBOX_JS_URL || !this.LBOX_CSS_URL) {
return "";
}
// make sure the lytebox JS is included
var head = document.getElementsByTagName('head').item(0);
// add the script tag
var script = document.createElement('script');
script.src = this.LBOX_JS_URL;
script.type = 'text/javascript';
head.appendChild(script);
// add the lytebox CSS too
var link = document.createElement('link');
link.rel = "stylesheet";
link.href = this.LBOX_CSS_URL;
link.type = "text/css";
link.media = "screen";
head.appendChild(link);
// find all image URLs from the feed.
var xmlElements = this.getElementsFromXMLFeed();
var i;
var hiddenURLs = "";
for (i = 0; i < xmlElements.length; i++) {
if (xmlElements[i].nodeName == "media:content") { // what about the namespace?
var url = xmlElements[i].getAttribute("url");
if (url.indexOf(".flv") == -1) { // images only... avoid FLV files
hiddenURLs += '<a id="lboxImage" href="' + url + '" rel="lytebox[lite]"></a> ';
}
}
}
// rel="lytebox[lite]"
var basicSlideShow = "<div id='lightbox_images' align='center' style='display: none; padding-top:10px; color:#FFFFFF; font-size:.8em; " +this.FONT+ " color:#999999;'>";
basicSlideShow += '( Alternatively, <a onclick="javascript:PicLensLite.invokeLytebox();return false;" href="#" style="color:#656588">click here for a basic slideshow</a>. )';
basicSlideShow += hiddenURLs;
basicSlideShow += "</div><br/>";
return basicSlideShow;
},
generateAlternativeContent : function () {
var altContentHTML = '<div id="altContent" style="text-align:center; margin: 0 0 0 0; padding: 0 0 0 0; background-color: #000; min-width:860px;">';
altContentHTML += '<div align="center" style="width: 100%; padding-top:60px; '+this.FONT+'">';
var v = this.FLASH_VER;
var flashMessage;
if (v.major > 0) { // has some version of Flash
flashMessage = "update your Flash Player from version "+ v.major + '.' + v.minor + '.' + v.release + " to version 9.0.28 or newer";
} else {
flashMessage = "install the most recent Flash Player";
}
var basicSlideShow = "";
if (this.THE_FEED !== "") { // do this if we've loaded the feed in AJAX
basicSlideShow = this.getBasicSlideShowHTML();
}
var downloadPL = this.PL_URL;
var learnPL = this.LEARN_PL_URL;
var pid = this.ARGS.pid;
if (pid) {
downloadPL += pid + "/";
learnPL += pid + "/";
} else {
var x = "000000000001/";
downloadPL += x;
learnPL += x;
}
if (this.SHOW_LBOX) {
// don't show the image, because we will invoke lytebox immediately
} else {
var sp = "<span style='padding-left:25px; color:#C6C6C6; font-size:";
altContentHTML +=
"<div style='padding:10px;'>" +
sp+"1.5em; font-weight: bold; " +this.FONT+ "'>You're clicks away from going full screen!</span><br/>" +
sp+".9em; padding-bottom: 15px; " +this.FONT+ "'>You must get the <a href='"+downloadPL+"' style='color:#656588'>Cooliris</a> browser plugin, or "+flashMessage+".</span>" +
"</div>";
if (!this.BUTTON_URL) {
altContentHTML +=
'<a href="' + downloadPL + '" style="color:#ACD">Get Cooliris Now!</a>';
} else {
var area = '<area shape="rect" coords=';
altContentHTML +=
'<img src="'+this.BUTTON_URL+'" alt="" border="0" usemap="#Map">' +
'<map name="Map" id="Map">' +
area+'"0,0,33,33" href="#" onclick="javascript:PicLensLite.closeFlashUI();" />' +
area+'"35,35,325,325" href="' + downloadPL +'" />' +
area+'"593,209,825,301" href="' + this.FLASH_URL +'" />' +
area+'"327,148,448,178" href="' + learnPL +'" />' +
'</map>';
}
}
altContentHTML += '</div>';
altContentHTML += basicSlideShow;
altContentHTML += '<div align="center" style="color:#666666; font-size:11px; '+this.FONT+'">© 2008 Cooliris, Inc. All trademarks are property of their respective holders.<br/><br/><br/></div>';
altContentHTML += '</div>';
return altContentHTML;
},
generateFlashVars : function () {
var fv = '';
var args = this.ARGS;
if (typeof args.guid !== 'undefined') {
fv += "&startItemGUID=" + args.guid;
}
if (args.loadFeedInFlash) {
fv += "&feedURL=" + encodeURIComponent(this.THE_FEED_URL); // may need crossdomain.xml to allow loading of feed
}
if (args.paused) {
fv += "&paused=" + args.paused;
}
if (args.loop) {
fv += "&loop=" + args.loop;
}
if (args.delay) { // seconds: from 1-10
fv += "&delay=" + args.delay;
}
if (args.pid) {
fv += "&pid=" + args.pid;
}
if (typeof args.maxScale != 'undefined') { // allow 0
fv += "&maxScale=" + args.maxScale;
}
if (typeof args.overlayToolbars != 'undefined') {
fv += "&overlayToolbars=" + args.overlayToolbars;
}
var cb = this.CUSTOM_BUTTON;
if (cb != null) {
fv += "&cButtonURL=" + encodeURIComponent(cb.targetURL);
if (cb.labelText != null) {
fv += "&cButtonLabel=" + encodeURIComponent(cb.labelText);
}
if (cb.iconImage != null) {
fv += "&cButtonIcon=" + encodeURIComponent(cb.iconImage);
}
}
fv += "&swfURL="+encodeURIComponent(this.LITE_URL);
fv = fv.substring(1); // kill the first &
return fv;
},
// does the right thing for each browser
// returns the Flash object, so we can communicate with it over the ExternalInterface
getFlash : function () {
// we should determine which one to pass back depending on Browser/OS configuration
if (this.BROWSER_SAF || this.BROWSER_IE) {
return document.getElementById(this.FLASH_ID_1); // outer <object>
} else {
return document.getElementById(this.FLASH_ID_2); // inner <object>
}
},
getWindowSize : function () { // inner size
var docElement = document.documentElement;
var docBody = document.body;
var w = 0, h = 0;
if (typeof(window.innerWidth) == 'number') {
// not IE
w = window.innerWidth;
h = window.innerHeight;
} else if (docElement && (docElement.clientWidth || docElement.clientHeight)) {
// IE 6+ in 'standards compliant mode'
w = docElement.clientWidth;
h = docElement.clientHeight;
} else if (docBody && (docBody.clientWidth || docBody.clientHeight)) {
// IE 4 compatible
w = docBody.clientWidth;
h = docBody.clientHeight;
}
return {w:w, h:h};
},
handleKeyPress : function (code) {
if (!this.isRunning()) { return true; }
var fl = this.getFlash();
if (fl != null && fl.fl_keyPressed) {
fl.fl_keyPressed(code); // forward to Flash
} else {
if (code == 27) { // ESC to close
this.closeFlashUI();
return false;
}
}
if (code == 9 || code == 13) { // trap tab, enter
return false;
}
return true; // allow the browser to process the key
},
handleMouseWheel : function (e) {
// e.wheelDelta
// Safari/Windows (MouseWheel Up is +120; Down is -120)
var delta = 0;
if (!e) {
e = window.event;
}
if (e.wheelDelta) { // IE/Opera
delta = e.wheelDelta/120;
if (window.opera) {
delta = -delta;
}
} else if (e.detail) { // Firefox/Moz
var d = e.detail;
// on mac, don't divide by 3...
if (Math.abs(d) < 3) {
delta = -d;
} else {
delta = -d/3;
}
}
if (delta) {
// don't send abs values < 1; otherwise, you can only scroll next
PicLensLite.sendMouseScrollToFlash(delta);
}
if (e.preventDefault) {
e.preventDefault();
}
e.returnValue = false;
return false;
},
hasPicLensClient : function () { // DEPRECATED! Use hasClient()
return this.hasCooliris();
},
// check if Cooliris Client is available
hasCooliris : function () {
// a flag to turn off the client
if (this.DEBUG_NOCLIENT) {
return false;
}
// check if the bridge has already been defined
var clientExists = false;
if (this.PLC) {
clientExists = true;
} else if (window.piclens && window.piclens.launch) {
this.PLC = window.piclens;
clientExists = true;
} else { // if not, try to define it here...
var context = null;
if (typeof PicLensContext != 'undefined') { // Firefox
context = new PicLensContext();
} else {
try {
context = new ActiveXObject("PicLens.Context"); // IE
} catch (e) {
if (navigator.mimeTypes['application/x-cooliris']) { // Safari
context = document.createElement('object');
context.style.display = 'none';
context.width = 0;
context.height = 0;
context.type = 'application/x-cooliris';
document.documentElement.appendChild(context);
} else {
context = null;
}
}
}
this.PLC = context;
if (this.PLC) {
clientExists = true;
}
}
if (clientExists) { // check the version number
if (this.BROWSER_SAF) { // for Safari, we just return true (the first v. was 1.8)
return true;
}
var version;
try { version = this.PLC.version; } catch (e) { return false; }
var parts = version.split('.'); // minimum ver. is: 1.6.0.824
if (parts[0] > 1) { // a ver. 2.X product
return true;
} else if (parts[0] == 1) { // a 1.X product
if (parts[1] > 6) { // a 1.7.X product
return true;
} else if (parts[1] == 6) { // a 1.6 product
if (parts[2] > 0) { // a 1.6.1.X product
return true;
} else if (parts[2] == 0) {
if (parts[3] >= 824) { // 1.6.0.824 or newer...
return true;
}
}
}
}
return false; // a 0.X product
} else {
return false;
}
},
invokeLytebox : function () {
this.SHOW_LBOX = true; // user has specified that she wants to use the basic slideshow
myLytebox.start(document.getElementById("lboxImage"), false, false);
this.closeFlashUI();
},
showLyteboxLink : function () {
myLytebox.updateLyteboxItems();
myLytebox.doAnimations = false;
var lboxImages = document.getElementById('lightbox_images');
if (lboxImages != null) {
lboxImages.style.display = "block";
if (this.SHOW_LBOX && this.getFlash()==null) { // the user has clicked on lbox once, so we assume it going forward
this.invokeLytebox();
}
}
},
startLytebox : function () { // allows us to include lytebox, unmodified
if (typeof myLytebox != "undefined") {
this.showLyteboxLink();
} else {
if (typeof initLytebox != "undefined") {
initLytebox();
this.showLyteboxLink();
} else {
if (this.LBOX_COUNT >= 4) {
return; // give up after 600 ms
}
setTimeout(function (){PicLensLite.startLytebox();}, 150); // try again in 150 ms
this.LBOX_COUNT++;
}
}
},
injectFlashPlayer : function () {
var fg = this.LITE_FG_DIV;
// determine the width and height of the flash component
var flashWInner;
var flashHInner;
flashWInner = flashHInner = '100%';
if (this.BROWSER_IE6) {
flashWInner = flashHInner = '0';
}
var flashVars = this.generateFlashVars();
var altContentHTML = this.generateAlternativeContent(); // non-flash content
if (this.meetsReqs()) {
var par = '<param name=';
fg.innerHTML =
'<object id="'+ this.FLASH_ID_1 +'" classid="clsid:D27CDB6E-AE6D-11cf-96B8-444553540000" width="100%" height="100%">' + // SAF & IE
par+'"movie" value="' + this.LITE_URL + '" />' +
par+'"quality" value="high"/> ' +
par+'"bgcolor" value="#000000"/> ' +
par+'"allowScriptAccess" value="always"/> ' +
par+'"FlashVars" value="' + flashVars + '"/> ' +
par+'"allowFullScreen" value="true"/> ' +
par+'"wmode" value="window"/> ' +
par+'"scale" value="noscale"/> ' +
'<object type="application/x-shockwave-flash" data="' + this.LITE_URL + '" width="'+flashWInner+'" height="'+flashHInner+'" ' + // NOT IE
'quality="high" ' +
'bgcolor="#000000" id="'+ this.FLASH_ID_2 + '" ' +
'quality="high" ' +
'FlashVars="' + flashVars + '" ' +
'allowFullScreen="true" ' +
'scale="noscale" ' +
'wmode="window" ' +
'allowScriptAccess="always">' +
altContentHTML + // IE
'</object>'+ // NOT IE
'</object>';
} else {
if (this.ON_NO_PLUGINS) {
this.ON_NO_PLUGINS(); // callback instead of showing NoFlash.jpg
} else {
fg.innerHTML = altContentHTML;
fg.style.minWidth = "860px";
fg.style.minHeight = "550px";
}
}
if (this.BROWSER_SAF) {
this.resizeUI(); // fixes layout
}
},
// find the RSS feeds on this page, and return an array
indexFeeds : function () {
var linkTags = document.getElementsByTagName("link");
var feeds = [];
for (var i = 0; i != linkTags.length; ++i) {
var link = linkTags[i], type = link.getAttribute("type");
if (type == "application/rss+xml" || type == "text/xml") {
feeds.push({ title: link.getAttribute("title"), url: link.getAttribute("href") });
}
}
return feeds;
},
// once we get the response text, we launch flash
loadViaXHR : function (url) {
var self = this;
var request = window.XMLHttpRequest ? new XMLHttpRequest() : new ActiveXObject("MSXML2.XMLHTTP.3.0");
try {
request.open("GET", url, true);
request.onreadystatechange = function () {
if (request.readyState == 4) {
if ((request.status == 200 || request.status == 0)) { // 0 -> File System Testing
if (request.responseText) {
// at this point, we have the text
self.showFlashUI(request.responseText);
}
} else {
if (console) {console.log("PicLens Lite could not load the RSS Feed: " + url);}
}
}
};
request.send("");
} catch (err) { // probably a crossdomain issue, so ask flash to try loading
this.ARGS.loadFeedInFlash = true;
this.showFlashUI("");
}
},
meetsReqs : function () {
if (this.DEBUG_NOFLASH) {
return false;
}
// if IE7 and Flash detect returns v0, we show the Flash
var ie7FlashDetectionWorkaround = (this.FLASH_VER.major == 0) && this.BROWSER_IE;
var hasFlash = swfobjlite.hasFlashPlayerVersion("9.0.28");
return hasFlash || ie7FlashDetectionWorkaround;
},
removeChildren : function () {
this.REMOVE_TIMER_ID = 0;
// remove the divs after a timeout
if (this.LITE_BG_DIV !== null) {
document.body.removeChild(this.LITE_BG_DIV);
this.LITE_BG_DIV = null;
}
if (this.LITE_FG_DIV !== null) {
document.body.removeChild(this.LITE_FG_DIV);
this.LITE_FG_DIV = null;
}
},
resizeFlashToFitPaddedBox : function () {
var flash = this.getFlash();
if (flash) {
var size = this.getPageSize();
var w = size.w - this.HPAD * 2;
var h = size.h - this.VPAD * 2;
flash.style.width = w; flash.style.height = h;
flash.width = w; flash.height = h;
}
},
resizeToFitPaddedBox : function (s, size) {
if (typeof size == 'undefined') {
size = this.getPageSize();
}
s.width = (size.w - this.HPAD * 2) + 'px';
s.height = (size.h - this.VPAD * 2) + 'px';
},
resizeToPaddedBox : function (s) {
s.left = s.right = this.HPAD + 'px';
s.top = s.bottom = this.VPAD + 'px';
},
resizeUI : function () { // resize handler for Safari
if (this.LITE_FG_DIV) {
var fgs = this.LITE_FG_DIV.style;
this.resizeToPaddedBox(fgs);
this.resizeToFitPaddedBox(fgs);
this.resizeFlashToFitPaddedBox();
}
},
setRunningFlag : function (flag) {
this.LITE_IS_RUNNING = flag;
this.piclensIsRunning_ = flag;
},
setResizeHandler : function () { // for safari
if (!this.RESIZE_HANDLER_EXISTS && this.BROWSER_SAF) {
var self = this;
window.addEventListener('resize', function () { self.resizeUI(); }, false);
this.RESIZE_HANDLER_EXISTS = true;
}
},
setResizeTimer : function () { // only do it for IE6...
if (this.BROWSER_IE6) {
this.RESIZE_TIMER_IE6 = setInterval(function () { PicLensLite.autoResize(); }, 1000);
}
},
showFlashUI : function (feedText) {
this.THE_FEED = feedText; // is "" if we are loading the feed in Flash
this.findScriptLocation();
this.createBackgroundOverlay();
this.createForegroundFlashComponent();
if (this.BROWSER_IE) {
this.appendElementsToDocument();
}
this.injectFlashPlayer();
if (!this.BROWSER_IE) {
// Win Firefox needs this to be last
// Other Browsers are OK with this
this.appendElementsToDocument();
}
this.addKeyHandlers();
this.addMouseHandlers();
this.setRunningFlag(true);
this.setResizeTimer();
this.setResizeHandler();
this.startLytebox();
},
sendMouseScrollToFlash : function (delta) {
if (!this.isRunning()) { return; }
var fl = this.getFlash();
if (fl != null && fl.fl_mouseMoved) {
fl.fl_mouseMoved(delta);
}
}
// don't end the last function with a comma; it messes up IE7
};
/* SWFObject v2.0 <http://code.google.com/p/swfobject/> / Copyright 2007 <NAME>, <NAME>, and <NAME> / MIT License */
var swfobjlite = function() {
var UNDEF = "undefined",
OBJECT = "object",
SHOCKWAVE_FLASH = "Shockwave Flash",
SHOCKWAVE_FLASH_AX = "ShockwaveFlash.ShockwaveFlash",
win = window,
doc = document,
nav = navigator;
var ua = function() {
var w3cdom = typeof doc.getElementById != UNDEF && typeof doc.getElementsByTagName != UNDEF && typeof doc.createElement != UNDEF && typeof doc.appendChild != UNDEF
&& typeof doc.replaceChild != UNDEF && typeof doc.removeChild != UNDEF && typeof doc.cloneNode != UNDEF,
playerVersion = [0,0,0],
d = null;
if (typeof nav.plugins != UNDEF && typeof nav.plugins[SHOCKWAVE_FLASH] == OBJECT) {
d = nav.plugins[SHOCKWAVE_FLASH].description;
if (d) {
d = d.replace(/^.*\s+(\S+\s+\S+$)/, "$1");
playerVersion[0] = parseInt(d.replace(/^(.*)\..*$/, "$1"), 10);
playerVersion[1] = parseInt(d.replace(/^.*\.(.*)\s.*$/, "$1"), 10);
playerVersion[2] = /r/.test(d) ? parseInt(d.replace(/^.*r(.*)$/, "$1"), 10) : 0;
}
}
else if (typeof win.ActiveXObject != UNDEF) {
var a = null, fp6Crash = false;
try {
a = new ActiveXObject(SHOCKWAVE_FLASH_AX + ".7");
}
catch(e) {
try {
a = new ActiveXObject(SHOCKWAVE_FLASH_AX + ".6");
playerVersion = [6,0,21];
a.AllowScriptAccess = "always"; // Introduced in fp6.0.47
}
catch(e) {
if (playerVersion[0] == 6) {
fp6Crash = true;
}
}
if (!fp6Crash) {
try {
a = new ActiveXObject(SHOCKWAVE_FLASH_AX);
}
catch(e) {}
}
}
if (!fp6Crash && a) { // a will return null when ActiveX is disabled
try {
d = a.GetVariable("$version"); // Will crash fp6.0.21/23/29
if (d) {
d = d.split(" ")[1].split(",");
playerVersion = [parseInt(d[0], 10), parseInt(d[1], 10), parseInt(d[2], 10)];
}
}
catch(e) {}
}
}
var u = nav.userAgent.toLowerCase(),
p = nav.platform.toLowerCase(),
webkit = /webkit/.test(u) ? parseFloat(u.replace(/^.*webkit\/(\d+(\.\d+)?).*$/, "$1")) : false, // returns either the webkit version or false if not webkit
ie = false,
windows = p ? /win/.test(p) : /win/.test(u),
mac = p ? /mac/.test(p) : /mac/.test(u);
/*@cc_on
ie = true;
@if (@_win32)
windows = true;
@elif (@_mac)
mac = true;
@end
@*/
return { w3cdom:w3cdom, pv:playerVersion, webkit:webkit, ie:ie, win:windows, mac:mac };
}();
return { // PUBLIC API
hasFlashPlayerVersion : function(rv) {
var pv = ua.pv, v = rv.split(".");
v[0] = parseInt(v[0], 10);
v[1] = parseInt(v[1], 10);
v[2] = parseInt(v[2], 10);
return (pv[0] > v[0] || (pv[0] == v[0] && pv[1] > v[1]) || (pv[0] == v[0] && pv[1] == v[1] && pv[2] >= v[2])) ? true : false;
},
getFlashPlayerVersion: function() {
return { major:ua.pv[0], minor:ua.pv[1], release:ua.pv[2] };
}
};
}();
/* BrowserDetect: http://www.quirksmode.org/js/detect.html */
var BrowserDetect={
init:function() { this.browser = this.searchString(this.dataBrowser) || "Unknown Browser"; this.version = this.searchVersion(navigator.userAgent) || this.searchVersion(navigator.appVersion) || "Unknown Version"; this.OS = this.searchString(this.dataOS) || "Unknown OS"; },
searchString:function(data) { for (var i=0;i<data.length;i++) { var dataString = data[i].string; var dataProp = data[i].prop; this.versionSearchString = data[i].versionSearch || data[i].identity; if (dataString) { if (dataString.indexOf(data[i].subString) != -1) {return data[i].identity;} } else if (dataProp) { return data[i].identity; } } },
searchVersion:function(dataString) { var index = dataString.indexOf(this.versionSearchString); if (index == -1) {return;} return parseFloat(dataString.substring(index+this.versionSearchString.length+1)); },
dataBrowser:[
{ string: navigator.userAgent, subString: "OmniWeb", versionSearch: "OmniWeb/", identity: "OmniWeb" },
{ string: navigator.vendor, subString: "Apple", identity: "Safari" },
{ prop: window.opera, identity: "Opera" },
{ string: navigator.vendor, subString: "iCab", identity: "iCab" },
{ string: navigator.vendor, subString: "KDE", identity: "Konqueror" },
{ string: navigator.userAgent, subString: "Firefox", identity: "Firefox" },
{ string: navigator.vendor, subString: "Camino", identity: "Camino" },
{ string: navigator.userAgent, subString: "Netscape", identity: "Netscape" }, // newer Netscapes (6+)
{ string: navigator.userAgent, subString: "MSIE", identity: "Explorer", versionSearch: "MSIE" },
{ string: navigator.userAgent, subString: "Gecko", identity: "Mozilla", versionSearch: "rv" },
{ string: navigator.userAgent, subString: "Mozilla", identity: "Netscape", versionSearch: "Mozilla" } // older Netscapes (4-)
],
dataOS:[{ string: navigator.platform, subString: "Win", identity: "Windows" }, { string: navigator.platform, subString: "Mac", identity: "Mac" }, { string: navigator.platform, subString: "Linux", identity: "Linux" } ]
};
BrowserDetect.init();
|
<filename>controller.go
package weigo
/*MVC的C层,控制器类*/
/*
控制器基类,框架控制器,业务控制器需要继承
*/
import (
"html/template"
"io"
"net/http"
"strings"
)
//控制器类
type Controller struct {
Context *Context
data map[string]interface{}
}
//控制器初始化
func (controller *Controller) Init(context *Context) {
controller.Context = context
controller.data = make(map[string]interface{})
}
//控制器默认的请求方法
func (controller *Controller) Index() {
http.NotFound(controller.Context.ResponseWriter, controller.Context.Request)
}
//列表方法
func (controller *Controller) List() {
http.NotFound(controller.Context.ResponseWriter, controller.Context.Request)
}
//详情方法
func (controller *Controller) View() {
http.NotFound(controller.Context.ResponseWriter, controller.Context.Request)
}
//删除方法
func (controller *Controller) Delete() {
http.NotFound(controller.Context.ResponseWriter, controller.Context.Request)
}
//修改方法
func (controller *Controller) Update() {
http.NotFound(controller.Context.ResponseWriter, controller.Context.Request)
}
//页面模版赋值
func (controller *Controller) Assign(Key string, Value interface{}) {
controller.data[Key] = Value
}
//页面模版渲染
func (controller *Controller) Display(viewName string) {
if viewName == "" {
viewName = RootPath + DS + "view" + DS + strings.ToLower(controller.Context.AppName) + DS + strings.ToLower(controller.Context.ControllerName) + DS + strings.ToLower(controller.Context.ActionName) + ".html"
}
t, err := template.ParseFiles(viewName)
if err != nil {
io.WriteString(controller.Context.ResponseWriter, err.Error())
} else {
t.Execute(controller.Context.ResponseWriter, controller.data)
}
}
//输出成功的json格式
func (controller *Controller) ResponseSuccess(msg string, data interface{}) {
controller.Context.ResponseSuccess(msg, data)
}
//输出失败的json格式
func (controller *Controller) ResponseError(msg string, data interface{}) {
controller.Context.ResponseError(msg, data)
}
//输出系统定义好的错误代码格式json数据
func (controller *Controller) ResponseErrorMessage(message *Message, data interface{}) {
controller.Context.ResponseMessage(message, data)
}
//输出字符串
func (controller *Controller) ResponseString(msg string) {
controller.Context.ResponseString(msg)
}
func (controller *Controller) MethodNotAllowed() {
http.Error(controller.Context.ResponseWriter, "Method Not Allowed", http.StatusMethodNotAllowed)
}
|
'use strict';
class _gd_sandbox_project{
constructor(project_name, projectFolder = new _gd_sandbox_folder(project_name, "div") ){
if( !(typeof project_name == "string") )
throw new TypeError('typeof project_name == "string"');
if(!(projectFolder instanceof _gd_sandbox_folder)){
console.warn("No projectFolder given or invalid projectFolder");
this.projectFolder = new _gd_sandbox_folder(project_name);
}
else{
this.projectFolder = projectFolder;
}
this.name = project_name;
//this.projectFolder._make_ui_element();
this.projectFolderUiElement = this.projectFolder.uiElement;
this.projectFolderUiElement._contextmenu_type = "project";
addClass(this.projectFolderUiElement, "project");
removeClass(this.projectFolderUiElement, "folder");
addClass(this.projectFolder.uiContent, "folder project-content");
this.uiElement = document.createElement("li");
this.uiElement.append(this.projectFolderUiElement);
//this.projectFolder.toggleUiContent();
console.log(this.projectFolder.folderContent);
}
projectData(){
return{
name : this.name,
content : this.projectFolder.folderData(),
};
}
addFolder(path){
this.projectFolder.addFolder(new _gd_sandbox_folder(path));
}
addFile(path){
this.projectFolder.addFolder(new _gd_sandbox_folder(path));
}
uiSetup(){
let cach = this.projectFolder.folderContent;
this.ui = ui_project(this.projectFolder.name, this.projectFolder);
while(true){
this.ui.append(ui_folder());
}
}
__uiSetupAddFolder(master, folder){
}
}
//FOR IE
function removeClass(element, _class){
let index = element.className.indexOf(_class);
if(index > -1){
let classArray = element.className.split("");
classArray.splice(index, _class.length);
element.className = classArray.join("");
return;
}
}
function addClass(element, _class){
let index = element.className.indexOf(_class);
if(index == -1){
element.className = element.className + " " + _class;
}
}
function toggleClass(element, _class){
let index = element.className.indexOf(_class);
if(index > -1){
let classArray = element.className.split("");
classArray.splice(index, _class.length);
element.className = classArray.join("");
return;
}
element.className = element.className + " " + _class;
}
|
CREATE TABLE [Retail].[SalesDetail] (
[InvoiceNo] UNIQUEIDENTIFIER NOT NULL,
[Description] NVARCHAR (100) NULL,
[Qty] INT NULL,
[Price] NUMERIC (18, 2) NULL,
[TotalValue] NUMERIC (18, 2) NULL,
[Discount] NUMERIC (18, 2) NULL,
[NetDiscount] NUMERIC (18, 2) NULL,
[NetAmount] NUMERIC (18, 2) NULL,
[ActualSales] NUMERIC (18, 2) NULL,
[Gst] NUMERIC (18, 4) NULL,
[SalesManId] NVARCHAR (50) NULL,
[ProductId] INT NOT NULL,
[TopCategoryId] INT NULL,
[CategoryId] INT NULL,
[BrandId] NVARCHAR (50) NULL,
[AverageCost] NUMERIC (18, 2) NULL,
[MemberDiscount] NUMERIC (18) NULL,
[IsRegularBuy] BIT NULL,
[Profile] NVARCHAR (50) NULL,
[AgeGroupType] NVARCHAR (50) NULL,
[Gender] NVARCHAR (10) NULL,
[TimeSliceId] INT NULL,
[TimeSlice] NVARCHAR (50) NULL,
[CreateDate] DATETIME NOT NULL,
[ModifyDate] DATETIME NULL
);
|
Vue.component('coupon',{
data(){
return{
code:'',
invalides:['notkoko','koko']
}
}, template:`<input type="text" :value="code" @input="updateCode($event.target.value)" ref="input">`,
methods:{
updateCode(code){
if(this.invalides.includes(code)){
alert("THE BEST");
this.$refs.input.value=code='';
}
this.$emit('input',code)
}
}
});
new Vue({
el:'#app',
data:{
coupon:'coupon',
}
});
|
def CalculateDistanceMoved(speed, time):
distance = speed * time
return distance
|
/////////////////////////////////////////////////////////////
// UserManagementService.java
// gooru-api
// Created by Gooru on 2014
// Copyright (c) 2014 Gooru. All rights reserved.
// http://www.goorulearning.org/
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
/////////////////////////////////////////////////////////////
package org.ednovo.gooru.domain.service.userManagement;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import org.ednovo.gooru.core.api.model.ActionResponseDTO;
import org.ednovo.gooru.core.api.model.Application;
import org.ednovo.gooru.core.api.model.CustomTableValue;
import org.ednovo.gooru.core.api.model.EntityOperation;
import org.ednovo.gooru.core.api.model.Identity;
import org.ednovo.gooru.core.api.model.Profile;
import org.ednovo.gooru.core.api.model.RoleEntityOperation;
import org.ednovo.gooru.core.api.model.User;
import org.ednovo.gooru.core.api.model.UserRole;
import org.ednovo.gooru.core.api.model.UserRoleAssoc;
import org.ednovo.gooru.core.api.model.UserToken;
import org.ednovo.gooru.domain.service.BaseService;
import org.ednovo.gooru.domain.service.search.SearchResults;
public interface UserManagementService extends BaseService {
Set<String> checkContentAccess(User authenticationUser, String contentgooruId);
Map<String, Object> userMeta(User user);
User findByGooruId(String gooruId);
Profile getProfile(User user);
Profile getUserProfile(String gooruUid, Integer activeFlag);
Identity findUserByGooruId(String gooruId);
User getUserByToken(String userToken);
SearchResults<Map<String, Object>> getFollowedOnUsers(String gooruUId, Integer offset, Integer limit);
Boolean isFollowedUser(String gooruUserId, User apiCaller);
SearchResults<Map<String, Object>> getFollowedByUsers(String gooruUserId, Integer offset, Integer limit);
Profile updateProfileInfo(Profile profile, String gooruUid, User apiCaller, String activeFlag, Boolean emailConfirmStatus, String showProfilePage,String accountType,String password);
void validateUserOrganization(String organizationCode, String superAdminToken) throws Exception;
User createUserWithValidation(User user, String password, String school, Integer confirmStatus, Boolean useGeneratedPassword, Boolean sendConfirmationMail, User apiCaller, String accountType, String dateOfBirth, String userParentId, String sessionId, String gender, String childDOB,
String gooruClassicUrl, Boolean token, HttpServletRequest resRequest, String role, String mailConfirmationUrl) throws Exception;
Boolean isContentAdmin(User user);
UserToken createSessionToken(User user, String sessionId, Application application);
User createUser(User user, String password, String school, Integer confirmStatus, Integer addedBySystem, String userImportCode, String accountType, String dateOfBirth, String userParentId, String remoteEntityId, String gender, String childDOB, String source, String emailSSO,
HttpServletRequest resRequest, String role, String mailConfirmationUrl) throws Exception;
User getUser(String gooruUId) throws Exception;
String buildUserProfileImageUrl(User user);
String encryptPassword(String password);
User createUser(User user, String password, String school, Integer confirmStatus, Integer addedBySystem, String userImportCode, String accountType, String dateOfBirth, String userParentId, String gender, String childDOB, String source, HttpServletRequest resRequest, String role, String mailConfirmationUrl)
throws Exception;
User resetPasswordRequest(String emailId, String gooruClassicUrl, User apicaller,String mailConfirmationUrl) throws Exception;
Identity resetCredential(String token, String gooruUid, String password, User apicaller, String mailConfirmationUrl,Boolean isPartnerPortal) throws Exception;
String getUserEmailFromIdentity(Set<Identity> identity);
void deleteUserMeta(String gooruUid, Profile newProfile, User apicaller);
void deleteUserContent(String gooruUid, String newUser, User apiCaller);
void deleteUserImageProfile(String userId) throws Exception;
User resendConfirmationMail(String gooruUid,User apicaller,String sessionId,String gooruBaseUrl,String type) throws Exception;
void updateOrgAdminCustomField(String organizationUid, User user) throws Exception;
User updateUserViewFlagStatus(String gooruUid, Integer viewFlag);
Map<String, Object> followUser(User user, String followOnUserId);
void unFollowUser(User user, String unFollowUserId);
Map<String, Object> getUserSummary(String gooruUid);
void resetEmailAddress(List<String> data) throws Exception;
SearchResults<UserRole> getRoles(Integer offset, Integer limit,String userUid);
ActionResponseDTO<UserRole> createNewRole(UserRole userRole, User user) throws Exception;
UserRole updateRole(UserRole role,Integer roleId) throws Exception;
void removeRole(Integer roleId) throws Exception;
EntityOperation getEntityOperationByEntityOperationId(Integer entityOperationId);
SearchResults<EntityOperation> findAllEntityNames(Integer offset, Integer limit);
List<EntityOperation> getOperationsByEntityName(String entityName);
UserRoleAssoc assignRoleByUserUid(Integer roleId,String userUid) throws Exception;
void removeAssignedRoleByUserUid(Integer roleId,String userUid) throws Exception;
UserRole getRoleByRoleId(Integer roleId);
List<RoleEntityOperation> getRoleOperationsByRoleId(Integer roleId);
List<CustomTableValue> getUserCategory(User apiCaller);
}
|
#!/usr/bin/env bash
testdir=$(readlink -f $(dirname $0))
rootdir=$(readlink -f $testdir/../..)
rpc_server=/var/tmp/spdk-raid.sock
rpc_py="$rootdir/scripts/rpc.py -s $rpc_server"
tmp_file=/tmp/raidrandtest
source $rootdir/test/common/autotest_common.sh
source $testdir/nbd_common.sh
function raid_unmap_data_verify() {
if hash blkdiscard; then
local nbd=$1
local rpc_server=$2
local blksize
blksize=$(lsblk -o LOG-SEC $nbd | grep -v LOG-SEC | cut -d ' ' -f 5)
local rw_blk_num=4096
local rw_len=$((blksize * rw_blk_num))
local unmap_blk_offs=(0 1028 321)
local unmap_blk_nums=(128 2035 456)
local unmap_off
local unmap_len
# data write
dd if=/dev/urandom of=$tmp_file bs=$blksize count=$rw_blk_num
dd if=$tmp_file of=$nbd bs=$blksize count=$rw_blk_num oflag=direct
blockdev --flushbufs $nbd
# confirm random data is written correctly in raid0 device
cmp -b -n $rw_len $tmp_file $nbd
for (( i=0; i<${#unmap_blk_offs[@]}; i++ )); do
unmap_off=$((blksize * ${unmap_blk_offs[$i]}))
unmap_len=$((blksize * ${unmap_blk_nums[$i]}))
# data unmap on tmp_file
dd if=/dev/zero of=$tmp_file bs=$blksize seek=${unmap_blk_offs[$i]} count=${unmap_blk_nums[$i]} conv=notrunc
# data unmap on raid bdev
blkdiscard -o $unmap_off -l $unmap_len $nbd
blockdev --flushbufs $nbd
# data verify after unmap
cmp -b -n $rw_len $tmp_file $nbd
done
fi
return 0
}
function on_error_exit() {
if [ -n "$raid_pid" ]; then
killprocess $raid_pid
fi
rm -f $tmp_file
print_backtrace
exit 1
}
function configure_raid_bdev() {
rm -rf $testdir/rpcs.txt
cat <<- EOL >> $testdir/rpcs.txt
bdev_malloc_create 32 512 -b Base_1
bdev_malloc_create 32 512 -b Base_2
bdev_raid_create -z 64 -r 0 -b "Base_1 Base_2" -n raid0
EOL
$rpc_py < $testdir/rpcs.txt
rm -rf $testdir/rpcs.txt
}
function raid_function_test() {
if [ $(uname -s) = Linux ] && modprobe -n nbd; then
local nbd=/dev/nbd0
local raid_bdev
modprobe nbd
$rootdir/test/app/bdev_svc/bdev_svc -r $rpc_server -i 0 -L bdev_raid &
raid_pid=$!
echo "Process raid pid: $raid_pid"
waitforlisten $raid_pid $rpc_server
configure_raid_bdev
raid_bdev=$($rpc_py bdev_raid_get_bdevs online | cut -d ' ' -f 1)
if [ $raid_bdev = "" ]; then
echo "No raid0 device in SPDK app"
return 1
fi
nbd_start_disks $rpc_server $raid_bdev $nbd
count=$(nbd_get_count $rpc_server)
if [ $count -ne 1 ]; then
return 1
fi
raid_unmap_data_verify $nbd $rpc_server
nbd_stop_disks $rpc_server $nbd
count=$(nbd_get_count $rpc_server)
if [ $count -ne 0 ]; then
return 1
fi
killprocess $raid_pid
else
echo "skipping bdev raid tests."
fi
return 0
}
trap 'on_error_exit;' ERR
raid_function_test
rm -f $tmp_file
report_test_completion "bdev_raid"
|
#!/usr/bin/env bash
##
## Copyright (c) 2020 Hanson Robotics.
##
## This file is part of Hanson AI.
## See https://www.hansonrobotics.com/hanson-ai for further info.
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
package() {
local reponame=ros_offline_asr
mkdir -p $BASEDIR/src
rsync -r --delete \
--exclude ".git" \
--exclude "package" \
$BASEDIR/../ $BASEDIR/src/$reponame
get_version $1
source_ros
catkin_make_isolated --directory $BASEDIR --install --install-space $BASEDIR/install -DCMAKE_BUILD_TYPE=Release
local name=head-ros-offline-asr
local desc="ROS Offline Speech recognition"
local url="https://api.github.com/repos/hansonrobotics/$reponame/releases"
#pushd "$BASEDIR/src/$reponame" >/dev/null
#./download_models.sh
#popd >/dev/null
fpm -C "${BASEDIR}" -s dir -t deb -n "${name}" -v "${version#v}" --vendor "${VENDOR}" \
--url "${url}" --description "${desc}" ${ms} \
--deb-no-default-config-files \
-p $BASEDIR/${name}_VERSION_ARCH.deb \
install/share=${HR_ROS_PREFIX}/ \
install/lib=${HR_ROS_PREFIX}/
cleanup_ros_package_build $BASEDIR
}
if [[ $(readlink -f ${BASH_SOURCE[0]}) == $(readlink -f $0) ]]; then
BASEDIR=$(dirname $(readlink -f ${BASH_SOURCE[0]}))
source $BASEDIR/common.sh
set -e
package $1
fi
|
import { ExpansionPanelSummary } from '@material-ui/core'
import blue from '@material-ui/core/colors/blue'
import { makeStyles } from '@material-ui/core/styles'
import EditIcon from '@material-ui/icons/Edit'
import ExpandMoreIcon from '@material-ui/icons/ExpandMore'
import React from 'react'
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const useStyles = makeStyles((theme) => ({
expanded: {}, // DONT DELETE THIS
root: {
background: theme.palette.details.main,
minHeight: '36px',
'&$expanded': {
minHeight: '36px',
height: 'auto',
},
height: 'auto',
},
expandIcon: {
padding: '2px',
'&$expanded': {
padding: '2px',
},
},
content: {
margin: '4px',
'& p': {
margin: '4px',
},
'&$expanded': {
margin: '4px',
},
fontWeight: 'bold',
},
}))
// eslint-disable-next-line @typescript-eslint/no-unused-vars
export default function Summary ({ className = null, children, ...props }) {
const classes = useStyles()
return (
<ExpansionPanelSummary
classes={classes}
expandIcon={<ExpandMoreIcon />}
aria-controls="expand"
{...props}
>
<EditIcon
style={{
margin: '0px 10px 2px -5px',
alignSelf: 'center',
color: blue[500],
}}
/>
{children}
</ExpansionPanelSummary>
)
}
|
<filename>qutebrowser/greass monkey/duckduckgo.js
// ==UserScript==
// @name Duckduckgo custom CSS
// @namespace https://github.com/olmokramer
// @description Custom CSS for *.duckduckgo.com
// @include *.duckduckgo.com
// @include *duckduckgo.com
// @run-at document-start
// @version 2
// @author <NAME>
// ==/UserScript==
(function IIFE() {
'use strict';
document.addEventListener('readystatechange', function onReadyStateChange() {
if (document.readyState == 'interactive') {
const style = document.createElement('style');
document.head.appendChild(style);
style.innerHTML = `
* {
border-color: #fabd2f !important;
}
a {
color: #458588 !important;
}
a:visited {
color: #d3869b !important;
}
body {
background-color: #282828 !important;
color: #83a598 !important;
}
html {
background-color: #282828 !important;
}
input,
textarea {
background-color: #3c3836 !important;
color: #83a598 !important;
}
blockquote,
pre {
background-color: #3c3836 !important;
color: #83a598 !important;
}
.search--adv {
background-color: #3c3836 !important;
}
.search__autocomplete > .acp-wrap {
background-color: #282828 !important;
}
.search__autocomplete > .acp-wrap > .acp--highlight {
background-color: #3c3836 !important;
color: #a89984 !important;
}
.search__autocomplete > .acp-wrap strong {
color: #a89984 !important;
}
.site-wrapper > #header_wrapper {
background-color: #282828 !important;
}
.site-wrapper > #header_wrapper > #header {
background-color: #282828 !important;
}
.search--header {
background-color: #3c3836 !important;
}
.zci {
background-color: #3c3836 !important;
color: #83a598 !important;
}
.tile--info {
background-color: #282828 !important;
}
.tile--info__expand {
background-color: #b8bb26 !important;
}
.tile--c {
background-color: #b8bb26 !important;
color: #a89984 !important;
}
.module__text {
color: #83a598 !important;
}
.about-info-box__heading {
color: #8ec07c !important;
}
.result.highlight {
background-color: #3c3836 !important;
}
.result__snippet {
color: #83a598 !important;
}
.result__snippet b {
color: #8ec07c !important;
}
.btn--top {
background-color: #3c3836 !important;
color: #83a598 !important;
}
.btn--top:hover {
background-color: #b8bb26 !important;
}
.result--sep--hr:before {
background-color: #b8bb26 !important;
}
`;
}
});
})();
|
class NginxMetrics:
def __init__(self):
self.metrics = {}
def add_metric(self, name, value):
if name in self.metrics:
self.metrics[name].append(value)
else:
self.metrics[name] = [value]
def get_metric(self, name):
return self.metrics.get(name, [None])[-1]
def calculate_average(self, name):
values = self.metrics.get(name)
if values:
return sum(values) / len(values)
else:
return None
|
#!/bin/bash
#SBATCH --gres=gpu:2 # request GPU "generic resource"
#SBATCH --cpus-per-task=6 # maximum CPU cores per GPU request: 6 on Cedar, 16 on Graham.
#SBATCH --mem=15000M # memory per node
#SBATCH --time=0-06:00 # time (DD-HH:MM)
#SBATCH --output=scripts/caps_r/cifar10/train/o_train_BIM_ep1_iter16.out # %N for node name, %j for jobID
source ~/tfp363/bin/activate
REPO_DIR=/home/xuc/Adversarial-Attack-on-CapsNets
SUMMARY_DIR=/home/xuc/scratch/xuc/summary/
MODEL=caps_r
DATASET=cifar10
ADVERSARIAL_METHOD=BIM
EPSILON=1
ITERATION_N=16
python $REPO_DIR/experiment.py --data_dir=$REPO_DIR/data/$MODEL/$DATASET --dataset=$DATASET --adversarial_method=$ADVERSARIAL_METHOD --epsilon=$EPSILON --iteration_n=$ITERATION_N --summary_dir=$SUMMARY_DIR --model=$MODEL
|
<reponame>dennisdrew/mysharepal
/**
* Determines which screen / experience the app should route to, based on current user state, like:
*
* - Is logged in
* - Does belong to a ministry
* - Is a ministry admin
*
* Etc.
*/
import { ContactsStates } from './presentation/redux/Contacts'
import { MinistryMgmtStates } from './presentation/redux/MinistryMgmt'
import handleError, { AUTH_ERROR, CONTACTS_ERROR, TIMEOUT_ERROR } from './utils/GlobalErrorHandler'
export default {
route: ({
}),
routeByState: ({
// State params
user,
hasCheckedAuth,
ministry,
contactsState,
contacts,
logInError,
contactsError,
genericError,
// Callback functions
goToWelcome,
refreshMinistryStatus,
goToAdminHome,
goToSharerHome,
checkForContacts
}) => {
goToWelcome()
// TODO
// if (!hasCheckedAuth) {
// return
// }
// if (user == null) {
// // Not logged in --> go to welcome screen
// goToWelcome()
// } else if (ministry.ministryState == MinistryMgmtStates.NOT_READY) {
// // Logged in; update ministry status
// refreshMinistryStatus(user)
// } else if (ministry.ministryState == MinistryMgmtStates.REFRESHING) {
// // do nothing
// } else if (ministry.isAdmin) {
// // Go to ministry admin page
// goToAdminHome()
// } else if (contacts && contacts.length) {
// // Logged in & has contacts --> go to home screen
// goToSharerHome()
// } else if (contactsState == ContactsStates.NOT_READY) {
// // Logged in, but has not checked for contacts --> check first
// checkForContacts(user.uid)
// } else if (contactsState !== ContactsStates.LOADING) {
// // Logged in, already checked for contacts, no contacts
// goToWelcome()
// }
// if (logInError != null || contactsError != null || genericError.error != null) {
// if (logInError != null) handleError(AUTH_ERROR, logInError)
// if (contactsError != null) handleError(CONTACTS_ERROR, contactsError)
// if (genericError.error != null) handleError(TIMEOUT_ERROR, genericError.error)
// // Just go to welcome screen in the current state
// goToWelcome()
// }
}
}
|
import employeeService from '../services/EmployeeService';
import Employee from '../models/Employee';
class EmployeeController {
create (req: any, res: any) {
const employee: Employee = req.body.employee;
employeeService.create(employee);
}
getAll (req: any, res: any) {
return employeeService.getAll();
}
getById (req: any, res: any) {
const id: string = req.params.id;
return employeeService.getById(id);
}
update (req: any, res: any) {
const employee: Employee = req.body.employee;
employeeService.update(employee);
}
delete (req: any, res: any) {
const id: string = req.params.id;
employeeService.delete(id);
}
};
export default new EmployeeController();
|
parallel --jobs 32 < ./results/exp_threads/run-1/lustre_5n_32t_6d_1000f_617m_5i/jobs/jobs_n0.txt
|
#!/bin/bash
for i in `find . -name "swagger.yaml" -type f`; do
echo "validating $i"
pipenv run openapi-spec-validator --schema 2.0 $i
done
|
#!/bin/bash
tar -xf contour-detection-model.tar.gz
rm -f contour-detection-model.tar.gz
|
<filename>tests/publish.test.js<gh_stars>1-10
'use strict';
process.env.QUEUE_LOG_PAYLOAD = 'true';
const tape = require('tape');
const queueLib = require('./../lib/index');
const config = require('./config');
const test_routes = { test1: 'test.test1', error_test: 'test.errorSync' };
const makeRouterWithHandler = () => {
let implementation;
let errImplementation = () => {};
let responseCode = 200;
let errResponseCode = 200;
const handler = (fn) => implementation = fn;
const errorHandler = (fn) => errImplementation = fn;
const status = (code) => responseCode = code;
const errorStatus = (code) => errResponseCode = code;
const router = (server) => {
server.route({
topic: test_routes.test1,
handler: (req, reply) => {
return Promise.resolve()
.then(() => implementation(req))
.then(() => reply({}).code(responseCode));
}
});
// Sync handler without reply
server.route({
topic: test_routes.error_test,
handler: (req) => {
return implementation(req);
}
});
// Error queue handler
server.route({
queue: config.queueConfig.errorQueue,
topic: config.queueConfig.errorTopic,
handler: (req, reply) => {
return Promise.resolve()
.then(() => errImplementation(req))
.then(() => reply({}).code(errResponseCode));
}
});
};
return [{handler, status, errorHandler, errorStatus}, router];
};
const [routeConfig, router] = makeRouterWithHandler();
let myPublish, myClose;
tape('Set up', async (t) => {
const { publish, closeConnection } = await queueLib.create({queueConfig: config.queueConfig, routes: router});
t.equal(typeof publish, 'function', 'publish Should be a function');
t.equal(publish.length, 2, 'Should receive 2 parameters');
myPublish = publish;
myClose = closeConnection;
t.end();
});
tape('Should handle correctly a handler error', async(t) => {
const test_msg = 'My test message';
routeConfig.handler((req) => {
t.equal(req.queueMessage, test_msg, 'Should be equal to ' + test_msg);
t.end();
throw new Error('Handler errored');
});
myPublish(test_routes.error_test, test_msg);
});
tape('Should publish and receive the message if it is a string', async(t) => {
const test_msg = 'My test message';
routeConfig.handler((req) => {
t.equal(req.queueMessage, test_msg, 'Should be equal to ' + test_msg);
t.end();
return Promise.resolve();
});
myPublish(test_routes.test1, test_msg);
});
tape('Should publish a null and handle it correctly', async(t) => {
const test_msg = null;
routeConfig.handler((req) => {
t.equal(req.queueMessage, undefined, 'Should be undefined');
t.end();
return Promise.resolve();
});
myPublish(test_routes.test1, test_msg);
});
tape('Should publish and receive a message that contains the message if it is a JSON', async(t) => {
const test_msg = { payload: 'test', headers: { a: 1, b: '2', c: 'ABC'} };
routeConfig.handler((req) => {
t.equal(req.payload, test_msg.payload, 'Should be equal to ' + test_msg.payload);
t.equal(req.headers.a, test_msg.headers.a, 'Should be equal to ' + test_msg.headers.a);
t.equal(req.headers.b, test_msg.headers.b, 'Should be equal to ' + test_msg.headers.b);
t.equal(req.headers.c, test_msg.headers.c, 'Should be equal to ' + test_msg.headers.c);
t.end();
return Promise.resolve();
});
myPublish(test_routes.test1, test_msg);
});
tape('Should publish and receive a JSON adding the trace id to the headers', async(t) => {
const test_msg = { payload: 'test', params: { a: 1, b: '2', c: 'ABC'} };
routeConfig.handler((req) => {
t.deepLooseEqual(req.payload, test_msg.payload, 'Should be equal to ' + test_msg.payload);
t.deepLooseEqual(req.params, test_msg.params, 'Should be equal to ' + test_msg.params);
t.equal(typeof req.headers['x-trace-id'], 'string', 'Should set the x-trace-id header');
t.end();
return Promise.resolve();
});
myPublish(test_routes.test1, test_msg);
});
tape('Should send to the error queue if a 4XX error is received', async(t) => {
t.plan(2);
const test_msg = { payload: 'test', params: { a: 1, b: '2', c: 'ABC'} };
routeConfig.status(400);
routeConfig.handler(() => {
t.pass();
return Promise.resolve();
});
routeConfig.errorHandler(() => {
t.pass();
return Promise.resolve();
});
myPublish(test_routes.test1, test_msg);
});
tape('Teardown', async (t) => {
await myClose();
t.end();
});
|
<filename>runescape-client/src/main/java/AbstractSocket.java
import java.io.IOException;
import net.runelite.mapping.Export;
import net.runelite.mapping.Implements;
import net.runelite.mapping.ObfuscatedName;
import net.runelite.mapping.ObfuscatedSignature;
@ObfuscatedName("li")
@Implements("AbstractSocket")
public abstract class AbstractSocket {
protected AbstractSocket() {
} // L: 7
@ObfuscatedName("f")
@ObfuscatedSignature(
descriptor = "(I)V",
garbageValue = "-1705512918"
)
@Export("close")
public abstract void close();
@ObfuscatedName("o")
@ObfuscatedSignature(
descriptor = "(B)I",
garbageValue = "-75"
)
@Export("readUnsignedByte")
public abstract int readUnsignedByte() throws IOException;
@ObfuscatedName("u")
@ObfuscatedSignature(
descriptor = "(B)I",
garbageValue = "-65"
)
@Export("available")
public abstract int available() throws IOException;
@ObfuscatedName("p")
@ObfuscatedSignature(
descriptor = "(II)Z",
garbageValue = "1604537239"
)
@Export("isAvailable")
public abstract boolean isAvailable(int var1) throws IOException;
@ObfuscatedName("b")
@ObfuscatedSignature(
descriptor = "([BIIB)I",
garbageValue = "121"
)
@Export("read")
public abstract int read(byte[] var1, int var2, int var3) throws IOException;
@ObfuscatedName("k")
@ObfuscatedSignature(
descriptor = "([BIII)V",
garbageValue = "199608149"
)
@Export("write")
public abstract void write(byte[] var1, int var2, int var3) throws IOException;
@ObfuscatedName("ar")
@ObfuscatedSignature(
descriptor = "(II)V",
garbageValue = "-1917773732"
)
@Export("runWidgetOnLoadListener")
static void runWidgetOnLoadListener(int var0) {
if (var0 != -1) { // L: 4271
if (class15.loadInterface(var0)) { // L: 4272
Widget[] var1 = Widget.Widget_interfaceComponents[var0]; // L: 4273
for (int var2 = 0; var2 < var1.length; ++var2) { // L: 4274
Widget var3 = var1[var2]; // L: 4275
if (var3.onLoad != null) { // L: 4276
ScriptEvent var4 = new ScriptEvent(); // L: 4277
var4.widget = var3; // L: 4278
var4.args = var3.onLoad; // L: 4279
FaceNormal.runScript(var4, 5000000, 0); // L: 4280
}
}
}
}
} // L: 4283
}
|
<reponame>lananh265/social-network<filename>node_modules/react-icons-kit/linea/basic_cards_diamonds.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.basic_cards_diamonds = void 0;
var basic_cards_diamonds = {
"viewBox": "0 0 64 64",
"children": [{
"name": "polygon",
"attribs": {
"fill": "none",
"stroke": "#000000",
"stroke-width": "2",
"stroke-miterlimit": "10",
"points": "44,59 16,45 36,5 63,19 "
},
"children": []
}, {
"name": "polyline",
"attribs": {
"fill": "none",
"stroke": "#000000",
"stroke-width": "2",
"stroke-miterlimit": "10",
"points": "31.899,14.004 28,6 1,20 19,59 32,52.964 \r\n\t"
},
"children": []
}, {
"name": "polygon",
"attribs": {
"fill": "none",
"stroke": "#000000",
"stroke-width": "2",
"stroke-miterlimit": "10",
"points": "35,41 46,36 45,24 34,29 "
},
"children": []
}, {
"name": "line",
"attribs": {
"fill": "none",
"stroke": "#000000",
"stroke-width": "2",
"stroke-miterlimit": "10",
"x1": "38",
"y1": "9",
"x2": "37",
"y2": "11"
},
"children": []
}, {
"name": "line",
"attribs": {
"fill": "none",
"stroke": "#000000",
"stroke-width": "2",
"stroke-miterlimit": "10",
"x1": "7",
"y1": "23",
"x2": "6",
"y2": "21"
},
"children": []
}, {
"name": "line",
"attribs": {
"fill": "none",
"stroke": "#000000",
"stroke-width": "2",
"stroke-miterlimit": "10",
"x1": "43",
"y1": "53",
"x2": "42",
"y2": "55"
},
"children": []
}]
};
exports.basic_cards_diamonds = basic_cards_diamonds;
|
<gh_stars>0
export const api = () => null;
|
import java.io.File;
import java.io.IOException;
import opennlp.tools.doccat.DoccatModel;
import opennlp.tools.doccat.DocumentCategorizerME;
import opennlp.tools.doccat.DocumentSample;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.ObjectStreamUtils;
import opennlp.tools.util.PlainTextByLineStream;
public class GradePaperML {
public static void main(String[] args) {
//model and document directory
File modelDir = new File("en-doccat.bin");
File documentDir = new File("elxample.txt");
//train the model
DoccatModel model;
try {
ObjectStream<String> lineStream =
new PlainTextByLineStream(new FileInputStream(documentDir),
"UTF-8");
ObjectStream<DocumentSample> sampleStream =
new DocumentSampleStream(lineStream);
//assign document categories
model = DocumentCategorizerME.train("en", sampleStream);
}
catch (IOException e) {
// Handle exception
e.printStackTrace();
}
//score the document
DocumentCategorizerME myCategorizer = new DocumentCategorizerME(model);
String documentContent = "This is an example document.";
double[] outcomes = myCategorizer.categorize(documentContent.split(" "));
String category = myCategorizer.getBestCategory(outcomes);
// Output grade
System.out.println("This paper has been given a grade of " + category + ".");
}
}
|
#!/usr/bin/env bash
set -e
if [ -n "$SKIP_TESTS" ]; then
exit 0
fi
# Windows doesn't run the NTLM tests properly (yet)
if [[ "$(uname -s)" == MINGW* ]]; then
SKIP_NTLM_TESTS=1
fi
SOURCE_DIR=${SOURCE_DIR:-$( cd "$( dirname "${BASH_SOURCE[0]}" )" && dirname $( pwd ) )}
BUILD_DIR=$(pwd)
TMPDIR=${TMPDIR:-/tmp}
USER=${USER:-$(whoami)}
SUCCESS=1
CONTINUE_ON_FAILURE=0
cleanup() {
echo "Cleaning up..."
if [ ! -z "$GITDAEMON_PID" ]; then
echo "Stopping git daemon..."
kill $GITDAEMON_PID
fi
if [ ! -z "$SSHD_DIR" -a -f "${SSHD_DIR}/pid" ]; then
echo "Stopping SSH..."
kill $(cat "${SSHD_DIR}/pid")
fi
echo "Done."
}
run_test() {
if [[ "$GITTEST_FLAKY_RETRY" > 0 ]]; then
ATTEMPTS_REMAIN=$GITTEST_FLAKY_RETRY
else
ATTEMPTS_REMAIN=1
fi
FAILED=0
while [[ "$ATTEMPTS_REMAIN" > 0 ]]; do
if [ "$FAILED" -eq 1 ]; then
echo ""
echo "Re-running flaky ${1} tests..."
echo ""
fi
RETURN_CODE=0
CLAR_SUMMARY="${BUILD_DIR}/results_${1}.xml" ctest -V -R "^${1}$" || RETURN_CODE=$? && true
if [ "$RETURN_CODE" -eq 0 ]; then
FAILED=0
break
fi
echo "Test exited with code: $RETURN_CODE"
ATTEMPTS_REMAIN="$(($ATTEMPTS_REMAIN-1))"
FAILED=1
done
if [ "$FAILED" -ne 0 ]; then
if [ "$CONTINUE_ON_FAILURE" -ne 1 ]; then
exit 1
fi
SUCCESS=0
fi
}
# Configure the test environment; run them early so that we're certain
# that they're started by the time we need them.
echo "##############################################################################"
echo "## Configuring test environment"
echo "##############################################################################"
if [ -z "$SKIP_GITDAEMON_TESTS" ]; then
echo "Starting git daemon..."
GITDAEMON_DIR=`mktemp -d ${TMPDIR}/gitdaemon.XXXXXXXX`
git init --bare "${GITDAEMON_DIR}/test.git" >/dev/null
git daemon --listen=localhost --export-all --enable=receive-pack --base-path="${GITDAEMON_DIR}" "${GITDAEMON_DIR}" 2>/dev/null &
GITDAEMON_PID=$!
disown $GITDAEMON_PID
fi
if [ -z "$SKIP_PROXY_TESTS" ]; then
curl --location --silent --show-error https://github.com/ethomson/poxyproxy/releases/download/v0.7.0/poxyproxy-0.7.0.jar >poxyproxy.jar
echo ""
echo "Starting HTTP proxy (Basic)..."
java -jar poxyproxy.jar --address 127.0.0.1 --port 8080 --credentials foo:bar --auth-type basic --quiet &
echo ""
echo "Starting HTTP proxy (NTLM)..."
java -jar poxyproxy.jar --address 127.0.0.1 --port 8090 --credentials foo:bar --auth-type ntlm --quiet &
fi
if [ -z "$SKIP_NTLM_TESTS" -o -z "$SKIP_ONLINE_TESTS" ]; then
curl --location --silent --show-error https://github.com/ethomson/poxygit/releases/download/v0.5.1/poxygit-0.5.1.jar >poxygit.jar
echo ""
echo "Starting HTTP server..."
NTLM_DIR=`mktemp -d ${TMPDIR}/ntlm.XXXXXXXX`
git init --bare "${NTLM_DIR}/test.git"
java -jar poxygit.jar --address 127.0.0.1 --port 9000 --credentials foo:baz --quiet "${NTLM_DIR}" &
fi
if [ -z "$SKIP_SSH_TESTS" ]; then
echo ""
echo "Starting ssh daemon..."
HOME=`mktemp -d ${TMPDIR}/home.XXXXXXXX`
SSHD_DIR=`mktemp -d ${TMPDIR}/sshd.XXXXXXXX`
git init --bare "${SSHD_DIR}/test.git" >/dev/null
cat >"${SSHD_DIR}/sshd_config" <<-EOF
Port 2222
ListenAddress 0.0.0.0
Protocol 2
HostKey ${SSHD_DIR}/id_rsa
PidFile ${SSHD_DIR}/pid
AuthorizedKeysFile ${HOME}/.ssh/authorized_keys
LogLevel DEBUG
RSAAuthentication yes
PasswordAuthentication yes
PubkeyAuthentication yes
ChallengeResponseAuthentication no
StrictModes no
# Required here as sshd will simply close connection otherwise
UsePAM no
EOF
ssh-keygen -t rsa -f "${SSHD_DIR}/id_rsa" -N "" -q
/usr/sbin/sshd -f "${SSHD_DIR}/sshd_config" -E "${SSHD_DIR}/log"
# Set up keys
mkdir "${HOME}/.ssh"
ssh-keygen -t rsa -f "${HOME}/.ssh/id_rsa" -N "" -q
cat "${HOME}/.ssh/id_rsa.pub" >>"${HOME}/.ssh/authorized_keys"
while read algorithm key comment; do
echo "[localhost]:2222 $algorithm $key" >>"${HOME}/.ssh/known_hosts"
done <"${SSHD_DIR}/id_rsa.pub"
# Get the fingerprint for localhost and remove the colons so we can
# parse it as a hex number. Older versions have a different output
# format.
if [[ $(ssh -V 2>&1) == OpenSSH_6* ]]; then
SSH_FINGERPRINT=$(ssh-keygen -F '[localhost]:2222' -f "${HOME}/.ssh/known_hosts" -l | tail -n 1 | cut -d ' ' -f 2 | tr -d ':')
else
SSH_FINGERPRINT=$(ssh-keygen -E md5 -F '[localhost]:2222' -f "${HOME}/.ssh/known_hosts" -l | tail -n 1 | cut -d ' ' -f 3 | cut -d : -f2- | tr -d :)
fi
fi
# Run the tests that do not require network connectivity.
if [ -z "$SKIP_OFFLINE_TESTS" ]; then
echo ""
echo "##############################################################################"
echo "## Running core tests"
echo "##############################################################################"
echo ""
echo "Running libgit2 integration (offline) tests"
echo ""
run_test offline
echo ""
echo "Running utility tests"
echo ""
run_test util
fi
if [ -n "$RUN_INVASIVE_TESTS" ]; then
echo ""
echo "Running invasive tests"
echo ""
export GITTEST_INVASIVE_FS_SIZE=1
export GITTEST_INVASIVE_MEMORY=1
export GITTEST_INVASIVE_SPEED=1
run_test invasive
unset GITTEST_INVASIVE_FS_SIZE
unset GITTEST_INVASIVE_MEMORY
unset GITTEST_INVASIVE_SPEED
fi
if [ -z "$SKIP_ONLINE_TESTS" ]; then
# Run the online tests. The "online" test suite only includes the
# default online tests that do not require additional configuration.
# The "proxy" and "ssh" test suites require further setup.
echo ""
echo "##############################################################################"
echo "## Running networking (online) tests"
echo "##############################################################################"
export GITTEST_REMOTE_REDIRECT_INITIAL="http://localhost:9000/initial-redirect/libgit2/TestGitRepository"
export GITTEST_REMOTE_REDIRECT_SUBSEQUENT="http://localhost:9000/subsequent-redirect/libgit2/TestGitRepository"
run_test online
unset GITTEST_REMOTE_REDIRECT_INITIAL
unset GITTEST_REMOTE_REDIRECT_SUBSEQUENT
# Run the online tests that immutably change global state separately
# to avoid polluting the test environment.
echo ""
echo "Running custom certificate (online_customcert) tests"
echo ""
run_test online_customcert
fi
if [ -z "$SKIP_GITDAEMON_TESTS" ]; then
echo ""
echo "Running gitdaemon tests"
echo ""
export GITTEST_REMOTE_URL="git://localhost/test.git"
run_test gitdaemon
unset GITTEST_REMOTE_URL
fi
if [ -z "$SKIP_PROXY_TESTS" ]; then
echo ""
echo "Running proxy tests (Basic authentication)"
echo ""
export GITTEST_REMOTE_PROXY_HOST="localhost:8080"
export GITTEST_REMOTE_PROXY_USER="foo"
export GITTEST_REMOTE_PROXY_PASS="bar"
run_test proxy
unset GITTEST_REMOTE_PROXY_HOST
unset GITTEST_REMOTE_PROXY_USER
unset GITTEST_REMOTE_PROXY_PASS
echo ""
echo "Running proxy tests (NTLM authentication)"
echo ""
export GITTEST_REMOTE_PROXY_HOST="localhost:8090"
export GITTEST_REMOTE_PROXY_USER="foo"
export GITTEST_REMOTE_PROXY_PASS="bar"
run_test proxy
unset GITTEST_REMOTE_PROXY_HOST
unset GITTEST_REMOTE_PROXY_USER
unset GITTEST_REMOTE_PROXY_PASS
fi
if [ -z "$SKIP_NTLM_TESTS" ]; then
echo ""
echo "Running NTLM tests (IIS emulation)"
echo ""
export GITTEST_REMOTE_URL="http://localhost:9000/ntlm/test.git"
export GITTEST_REMOTE_USER="foo"
export GITTEST_REMOTE_PASS="baz"
run_test auth_clone_and_push
unset GITTEST_REMOTE_URL
unset GITTEST_REMOTE_USER
unset GITTEST_REMOTE_PASS
echo ""
echo "Running NTLM tests (Apache emulation)"
echo ""
export GITTEST_REMOTE_URL="http://localhost:9000/broken-ntlm/test.git"
export GITTEST_REMOTE_USER="foo"
export GITTEST_REMOTE_PASS="baz"
run_test auth_clone_and_push
unset GITTEST_REMOTE_URL
unset GITTEST_REMOTE_USER
unset GITTEST_REMOTE_PASS
fi
if [ -z "$SKIP_NEGOTIATE_TESTS" -a -n "$GITTEST_NEGOTIATE_PASSWORD" ]; then
echo ""
echo "Running SPNEGO tests"
echo ""
if [ "$(uname -s)" = "Darwin" ]; then
KINIT_FLAGS="--password-file=STDIN"
fi
echo $GITTEST_NEGOTIATE_PASSWORD | kinit $KINIT_FLAGS test@LIBGIT2.ORG
klist -5f
export GITTEST_REMOTE_URL="https://test.libgit2.org/kerberos/empty.git"
export GITTEST_REMOTE_DEFAULT="true"
run_test auth_clone
unset GITTEST_REMOTE_URL
unset GITTEST_REMOTE_DEFAULT
echo ""
echo "Running SPNEGO tests (expect/continue)"
echo ""
export GITTEST_REMOTE_URL="https://test.libgit2.org/kerberos/empty.git"
export GITTEST_REMOTE_DEFAULT="true"
export GITTEST_REMOTE_EXPECTCONTINUE="true"
run_test auth_clone
unset GITTEST_REMOTE_URL
unset GITTEST_REMOTE_DEFAULT
unset GITTEST_REMOTE_EXPECTCONTINUE
kdestroy -A
fi
if [ -z "$SKIP_SSH_TESTS" ]; then
export GITTEST_REMOTE_USER=$USER
export GITTEST_REMOTE_SSH_KEY="${HOME}/.ssh/id_rsa"
export GITTEST_REMOTE_SSH_PUBKEY="${HOME}/.ssh/id_rsa.pub"
export GITTEST_REMOTE_SSH_PASSPHRASE=""
export GITTEST_REMOTE_SSH_FINGERPRINT="${SSH_FINGERPRINT}"
echo ""
echo "Running ssh tests"
echo ""
export GITTEST_REMOTE_URL="ssh://localhost:2222/$SSHD_DIR/test.git"
run_test ssh
unset GITTEST_REMOTE_URL
echo ""
echo "Running ssh tests (scp-style paths)"
echo ""
export GITTEST_REMOTE_URL="[localhost:2222]:$SSHD_DIR/test.git"
run_test ssh
unset GITTEST_REMOTE_URL
unset GITTEST_REMOTE_USER
unset GITTEST_REMOTE_SSH_KEY
unset GITTEST_REMOTE_SSH_PUBKEY
unset GITTEST_REMOTE_SSH_PASSPHRASE
unset GITTEST_REMOTE_SSH_FINGERPRINT
fi
if [ -z "$SKIP_FUZZERS" ]; then
echo ""
echo "##############################################################################"
echo "## Running fuzzers"
echo "##############################################################################"
ctest -V -R 'fuzzer'
fi
cleanup
if [ "$SUCCESS" -ne 1 ]; then
echo "Some tests failed."
exit 1
fi
echo "Success."
exit 0
|
use physx_sys::{PxFoundation, PxAllocatorCallback, PxErrorCallback, PxDefaultErrorCallback, PxDefaultAllocator};
fn initialize_physx_foundation() -> Result<PxFoundation, String> {
unsafe {
let allocator = PxDefaultAllocator;
let error_callback = PxDefaultErrorCallback;
let foundation = PxFoundation::new(version::PX_PHYSICS_VERSION, allocator, error_callback);
if foundation.is_null() {
Err("Failed to initialize PhysX foundation".to_string())
} else {
Ok(foundation)
}
}
}
|
def square_numbers(numbers):
squares = []
for num in numbers:
squares.append(num ** 2)
return squares
# Create new array containing the squares
squares = square_numbers(numbers)
|
<filename>modelci/app/experimental/endpoints/cv_tuner.py<gh_stars>100-1000
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Author: yuanmingleee
Email:
Date: 1/29/2021
"""
import torch
from fastapi import APIRouter
from modelci.experimental.model.model_structure import Structure, Operation
from modelci.hub.registrar import register_model
from modelci.hub.manager import get_remote_model_weight
from modelci.hub.utils import generate_path_plain
from modelci.persistence.service import ModelService
from modelci.types.bo import ModelVersion, Engine, IOShape, ModelStatus
from modelci.types.models.mlmodel import MLModel
from modelci.types.type_conversion import model_data_type_to_torch, type_to_data_type
from modelci.utils.exceptions import ModelStructureError
router = APIRouter()
@router.patch('/finetune/{id}')
def update_finetune_model_as_new(id: str, updated_layer: Structure, dry_run: bool = False): # noqa
"""
Temporary function for finetune CV models. The function's functionality is overlapped with
`update_model_structure_as_new`. Please use the `update_model_structure_as_new` in next release.
Examples:
Fine-tune the model by modify the layer with name 'fc' (last layer). The layer
has a changed argument out_features = 10. op_='M' indicates the operation to this layer ('fc')
is 'Modify'. There is no changes in layer connections.
Therefore, the structure change summary is
[M] fc: (...) out_features=10
>>> from collections import OrderedDict
>>> structure_data = {
... 'layer': OrderedDict({'fc': {'out_features': 10, 'op_': 'M', 'type_': 'torch.nn.Linear'}})
... }
>>> update_finetune_model_as_new(id=..., updated_layer=Structure.parse_obj(structure_data))
Args:
id (str): ID of the model to be updated.
updated_layer (Structure): Contains layers to be fine-tuned.
dry_run (bool): Test run for verify if the provided parameter (i.e. model specified in `id`
and updated layers) is valid.
Returns:
"""
if len(updated_layer.layer.items()) == 0:
return True
model = ModelService.get_model_by_id(id)
if model.engine != Engine.PYTORCH:
raise ValueError(f'model {id} is not supported for editing. '
f'Currently only support model with engine=PYTORCH')
# download model as local cache
cache_path = get_remote_model_weight(model=model)
net = torch.load(cache_path)
for layer_name, layer_param in updated_layer.layer.items():
layer_op = getattr(layer_param, 'op_')
# update layer
if layer_op == Operation.MODIFY:
# check if the layer name exists
# TODO check if layer path exists eg."layer1.0.conv1"
if not hasattr(net, layer_name):
raise ModelStructureError(f'Structure layer name `{layer_name}` not found in model {id}.')
net_layer = getattr(net, layer_name)
# check if the provided type matches the original type
layer_type = type(net_layer)
layer_type_provided = eval(layer_param.type_.value) # nosec
if layer_type is not layer_type_provided:
raise ModelStructureError(f'Expect `{layer_name}.type_` to be {layer_type}, '
f'but got {layer_type_provided}')
# get layer parameters
layer_param_old = layer_param.parse_layer_obj(net_layer)
layer_param_data = layer_param_old.dict(exclude_none=True, exclude={'type_', 'op_'})
layer_param_update_data = layer_param.dict(exclude_none=True, exclude={'type_', 'op_'})
# replace 'null' with None. See reason :class:`ModelLayer`.
for k, v in layer_param_update_data.items():
if v == 'null':
layer_param_update_data[k] = None
# update the layer parameters
layer_param_data.update(layer_param_update_data)
layer = layer_type(**layer_param_data)
setattr(net, layer_name, layer)
else:
# if layer_op is Operation.ADD,
# 1. check if the layer name not exists
# 2. add a layer
# 3. change the `forward` function according to the connections
# if layer_op is Operation.DELETE,
# 1. check if the layer exists
# 2. delete the layer
# 3. change the `forward` function
raise ValueError('Operation not permitted. Please use `update_model_structure_as_new`.')
input_tensors = list()
bs = 1
for input_ in model.inputs:
input_tensor = torch.rand(bs, *input_.shape[1:]).type(model_data_type_to_torch(input_.dtype))
input_tensors.append(input_tensor)
# parse output tensors
output_shapes = list()
output_tensors = net(*input_tensors)
if not isinstance(output_tensors, (list, tuple)):
output_tensors = (output_tensors,)
for output_tensor in output_tensors:
output_shape = IOShape(shape=[bs, *output_tensor.shape[1:]], dtype=type_to_data_type(output_tensor.dtype))
output_shapes.append(output_shape)
if not dry_run:
# TODO return validation result for dry_run mode
# TODO apply Semantic Versioning https://semver.org/
# TODO reslove duplicate model version problem in a more efficient way
version = ModelVersion(model.version.ver + 1)
previous_models = ModelService.get_models(
architecture=model.architecture,
task=model.task,
framework=model.framework,
engine=Engine.NONE
)
if len(previous_models):
last_version = max(previous_models, key=lambda k: k.version.ver).version.ver
version = ModelVersion(last_version + 1)
saved_path = generate_path_plain(
architecture=model.architecture,
task=model.task,
framework=model.framework,
engine=Engine.NONE,
version=version
)
saved_path.parent.mkdir(parents=True, exist_ok=True)
torch.save(model,saved_path.with_suffix('.pt') )
mlmodelin = MLModel(
dataset='',
metric={key: 0 for key in model.metric.keys()},
task=model.task,
inputs=model.inputs,
outputs=output_shapes,
architecture=model.name,
framework=model.framework,
engine=Engine.NONE,
model_status=[ModelStatus.DRAFT],
parent_model_id=model.id,
version=version,
weight=saved_path
)
register_model(
mlmodelin,
convert=False, profile=False
)
model_bo = ModelService.get_models(
architecture=model.architecture,
task=model.task,
framework=model.framework,
engine=Engine.NONE,
version=version
)[0]
return {'id': model_bo.id}
|
<reponame>bike7/testingtasks
package pl.kasieksoft.addressbook.model;
public class GroupDataBuilder {
private int id = Integer.MAX_VALUE;
private String name;
private String header;
private String footer;
private GroupDataBuilder() {
}
public static GroupDataBuilder aGroupData() {
return new GroupDataBuilder();
}
public GroupDataBuilder withId(int id) {
this.id = id;
return this;
}
public GroupDataBuilder withName(String name) {
this.name = name;
return this;
}
public GroupDataBuilder withHeader(String header) {
this.header = header;
return this;
}
public GroupDataBuilder withFooter(String footer) {
this.footer = footer;
return this;
}
public GroupData build() {
return new GroupData(id, name, header, footer);
}
}
|
<reponame>freerware/negotiator
/* Copyright 2020 Freerware
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package representation
// RepresentationMetadata is the metadata about each representation in the
// representation list.
type Metadata struct {
ContentType string `json:"contentType,omitempty"`
ContentLanguage string `json:"contentLanguage,omitempty"`
ContentEncoding []string `json:"contentEncoding,omitempty"`
ContentLocation string `json:"contentLocation,omitempty"`
ContentCharset string `json:"contentCharset,omitempty"`
ContentFeatures []string `json:"contentFeatures,omitempty"`
SourceQuality float32 `json:"sourceQuality"`
}
// List represents a representation containing a list of descriptions of representations
// for a particular resource.
type List struct {
Base
Representations []Metadata `json:"representations"`
}
// SetRepresentations modifies the represention list within the list representation.
func (l *List) SetRepresentations(reps ...Representation) {
for _, rep := range reps {
loc := rep.ContentLocation()
l.Representations = append(l.Representations, Metadata{
ContentType: rep.ContentType(),
ContentLanguage: rep.ContentLanguage(),
ContentEncoding: rep.ContentEncoding(),
ContentLocation: (&loc).String(),
ContentCharset: rep.ContentCharset(),
ContentFeatures: rep.ContentFeatures(),
SourceQuality: rep.SourceQuality(),
})
}
}
// Bytes retrieves the serialized form of the list representation.
func (l List) Bytes() ([]byte, error) {
return l.Base.Bytes(&l)
}
// FromBytes constructs the list representation from its serialized form.
func (l *List) FromBytes(b []byte) error {
return l.Base.FromBytes(b, l)
}
|
<reponame>dogboydog/wanikani-cli
/* tslint:disable */
/* eslint-disable */
/**
* WaniKani
* WaniKani: The API
*
* OpenAPI spec version: 20170710.0
*
*
*/
/**
*
* @export
* @interface SummaryData
*/
export interface SummaryData {
/**
* Details about subjects available for lessons. See table below for object structure.
* @type {Array<SummaryLessons>}
* @memberof SummaryData
*/
lessons: any;
/**
* Earliest date when the reviews are available. Is null when the user has no reviews scheduled.
* @type {Date}
* @memberof SummaryData
*/
nextReviewsAt: any | null;
/**
* Details about subjects available for reviews now and in the next 24 hours by the hour (total of 25 objects). See table below for object structure.
* @type {Array<SummaryReviews>}
* @memberof SummaryData
*/
reviews: any;
}
|
package io.github.marcelbraghetto.football.framework.providers.football.contracts;
import android.net.Uri;
import android.support.annotation.NonNull;
import java.util.List;
import io.github.marcelbraghetto.football.framework.providers.football.models.FootballGame;
/**
* Created by <NAME> on 6/12/15.
*
* Provider to give access to football related
* requests and queries.
*/
public interface FootballProvider {
/**
* Get the content provider Uri for all
* football games registered on the given
* date specified as year/month/day.
* @param year of the game.
* @param month of the game.
* @param day of the game.
* @return content Uri that can fetch this
* filtered set of data.
*/
@NonNull Uri getGamesUri(int year, int month, int day);
/**
* Save or update the collection of football games.
* @param games to save or update.
*/
void saveGames(@NonNull List<FootballGame> games);
/**
* Initiate a data refresh request manually.
*/
void startDataRefresh();
}
|
OUTPUT_DIR="$(pwd)/vitis_run"
# name of the top function
TOP=kernel0
# choose the target device
PLATFORM=xilinx_u250_xdma_201830_2
#PLATFORM=xilinx_u280_xdma_201920_3
XO="$(pwd)/kernel0.xo"
# For different approaches see UG904-vivado-implementation
#STRATEGY="Default"
STRATEGY="EarlyBlockPlacement"
# remove the unused '--connectivity.sp' option for v++ if some DDRs are not used
# Example: if we map p1 to DDR 3 and p2 to DDR 0
#
# void kernel0(ap_uint<512> *p1, ap_uint<512> *p2)
# {
# #pragma HLS INTERFACE m_axi port=p1 offset=slave bundle=gmem_A
# #pragma HLS INTERFACE m_axi port=p2 offset=slave bundle=gmem_B
#
# load_p1 (p1, ...);
# load_p2 (p2, ...);
# }
#
# ARG_FOR_DDR_0=p2
# ARG_FOR_DDR_3=p1
# Should remove '--connectivity.sp' for DDR1 and DDR2
ARG_FOR_DDR_1=cin
ARG_FOR_DDR_2=w
#ARG_FOR_DDR_3="YOUR_HLS_ARGUMENT_NAME_FOR_DDR_3"
ARG_FOR_DDR_4=cout
# the constraint file containing the floorplan results
# WARNING: must use absolute address
CONSTRAINT="$(pwd)/constraint.tcl"
if [ ! -f "$CONSTRAINT" ]; then
echo "no constraint file found"
exit
fi
v++ \
--link \
--output "${OUTPUT_DIR}/${TOP}_${PLATFORM}.xclbin" \
--kernel ${TOP} \
--platform ${PLATFORM} \
--target hw \
--report_level 2 \
--temp_dir "${OUTPUT_DIR}/${TOP}_${PLATFORM}.temp" \
--optimize 3 \
--connectivity.nk ${TOP}:1:${TOP}_1 \
--max_memory_ports ${TOP} \
--save-temps \
${XO} \
--connectivity.sp ${TOP}_1.${ARG_FOR_DDR_1}:DDR[0] \
--connectivity.sp ${TOP}_1.${ARG_FOR_DDR_2}:DDR[1] \
--connectivity.sp ${TOP}_1.${ARG_FOR_DDR_4}:DDR[3] \
--kernel_frequency 300 \
--vivado.prop run.impl_1.STEPS.PLACE_DESIGN.ARGS.DIRECTIVE=$STRATEGY \
--vivado.prop run.impl_1.STEPS.OPT_DESIGN.TCL.PRE=$CONSTRAINT
|
import numpy as np
from itertools import combinations
def optimize_generator_placement(n, m, k):
adj_matrix = np.zeros((n, n)) # Initialize the adjacency matrix with zeros
# Populate the adjacency matrix with transmission costs (example values)
# Replace the following with actual transmission costs in the network
adj_matrix[0][1] = 5
adj_matrix[0][2] = 8
adj_matrix[1][2] = 6
adj_matrix[1][3] = 7
adj_matrix[2][3] = 9
adj_matrix[2][4] = 5
adj_matrix[3][4] = 6
adj_matrix[3][5] = 8
adj_matrix[4][5] = 7
adj_matrix[4][6] = 9
adj_matrix[5][6] = 6
adj_matrix[5][7] = 5
adj_matrix[6][7] = 8
adj_matrix[6][8] = 7
adj_matrix[7][8] = 9
adj_matrix[7][9] = 5
adj_matrix[8][9] = 6
adj_matrix[8][10] = 8
adj_matrix[9][10] = 7
adj_matrix[9][11] = 9
adj_matrix[10][11] = 6
min_cost = float('inf')
optimal_nodes = []
# Generate all combinations of k nodes from n
node_combinations = list(combinations(range(n), k))
# Iterate through all combinations and calculate the total cost
for nodes in node_combinations:
total_cost = 0
for i in range(k):
for j in range(i + 1, k):
total_cost += adj_matrix[nodes[i]][nodes[j]]
# Update the optimal nodes if the total cost is minimized
if total_cost < min_cost:
min_cost = total_cost
optimal_nodes = list(nodes)
return optimal_nodes
|
import * as utils from '../src/utils.js';
import {registerBidder} from '../src/adapters/bidderFactory.js';
const PAPYRUS_ENDPOINT = 'https://prebid.papyrus.global';
const PAPYRUS_CODE = 'papyrus';
export const spec = {
code: PAPYRUS_CODE,
/**
* Determines whether or not the given bid request is valid. Valid bid request must have placementId and hbid
*
* @param {BidRequest} bid The bid params to validate.
* @return boolean True if this is a valid bid, and false otherwise.
*/
isBidRequestValid: bid => {
return !!(bid && bid.params && bid.params.address && bid.params.placementId);
},
/**
* Make a server request from the list of BidRequests.
*
* @param {BidRequest[]} validBidRequests - an array of bids
* @return ServerRequest Info describing the request to the server.
*/
buildRequests: function(validBidRequests) {
const bidParams = [];
utils._each(validBidRequests, function(bid) {
bidParams.push({
address: bid.params.address,
placementId: bid.params.placementId,
bidId: bid.bidId,
transactionId: bid.transactionId,
sizes: utils.parseSizesInput(bid.sizes)
});
});
return {
method: 'POST',
url: PAPYRUS_ENDPOINT,
data: bidParams
};
},
/**
* Unpack the response from the server into a list of bids.
*
* @param {*} serverResponse A successful response from the server.
* @return {Bid[]} An array of bids which were nested inside the server.
*/
interpretResponse: function(serverResponse, request) {
const bidResponses = [];
if (serverResponse && serverResponse.body && serverResponse.body.bids) {
serverResponse.body.bids.forEach(bid => {
const bidResponse = {
requestId: bid.id,
creativeId: bid.id,
adId: bid.id,
transactionId: bid.transactionId,
cpm: bid.cpm,
width: bid.width,
height: bid.height,
currency: bid.currency,
netRevenue: true,
ttl: 300,
ad: bid.ad
}
bidResponses.push(bidResponse);
});
}
return bidResponses;
}
};
registerBidder(spec);
|
<gh_stars>0
"""Constants for the Zeversolar Inverter local integration."""
from homeassistant.const import Platform
# Base component constants
NAME = "Zeversolar Local Integration"
DEVICE_NAME = "Zeversolar Inverter"
DEVICE_MODEL = "Universal Inverter Device"
MANUFACTURER_NAME = "Zeversolar"
ISSUE_URL = "https://github.com/nech/zeversolar_local/issues"
"""The domain name."""
DOMAIN = "zeversolar_local"
DOMAIN_DATA = f"{DOMAIN}_data"
CONF_SERIAL_NO = "zever_serial_number"
ENTRY_COORDINATOR = "zever_coordinator"
ENTRY_DEVICE_INFO = "zever_device_info"
OPT_DATA_INTERVAL = "zever_data_interval"
OPT_DATA_INTERVAL_VALUE: int = 30
"""The actual version of the integration."""
VERSION = "1.1.0"
"""List of platforms that are supported."""
PLATFORMS = [Platform.SENSOR, Platform.BUTTON]
# Additional
STARTUP_MESSAGE = f"""
-------------------------------------------------------------------
{NAME}
Version: {VERSION}
Fetch data from a Zeversolar inverter using its local API.
This is a custom integration!
If you have any issues with this you need to open an issue here:
{ISSUE_URL}
-------------------------------------------------------------------
"""
DOMAIN = "zeversolar_local"
|
// filtering of operator list based on search term
(function() {
let lastSearch = null;
function makeSearchResultDraggable($elem) {
if ($elem.hasClass('drag-initialized')) {
return;
}
$elem.addClass('drag-initialized');
$elem.find('.draggable').draggable({
helper() {
var box = $(this).children('.dragDiv');
box.show();
return box;
},
stop(event, ui) {
ui.helper.hide();
$.ui.ddmanager.current.cancelHelperRemoval = true;
},
});
}
function operatorSearchFn() {
const searchTerm = $(this)
.val()
.toLocaleLowerCase();
if (searchTerm === '') {
// show regular grouped view if search term empty
$('#operators-grouped').show();
$('#operators-search-result').hide();
} else {
// show ungrouped list of operators if we have a non-empty search term
$('#operators-grouped').hide();
$('#operators-search-result').show();
const $operatorList = $('#operatorList');
$('#at_least_two_alert').hide();
let match = 0;
let operators = $operatorList.find('.operator');
if (_.startsWith(searchTerm, lastSearch)) {
operators = operators.filter(':not(.search-invisible)');
}
operators.each(function() {
const $elem = $(this);
if (match >= 50) {
$elem.addClass('search-invisible');
return;
}
// the operator's index terms to match against
const text = $elem
.find('.operator-index')
.text()
.toLocaleLowerCase();
if (text.indexOf(searchTerm) >= 0) {
$elem.removeClass('search-invisible');
match += 1;
generateNewIdsForTooltips($elem);
activateDeferredMDL($elem);
makeSearchResultDraggable($elem);
const $label = $elem.find('p.non-breaking-label');
$label.unmark();
$label.mark(searchTerm);
} else {
$elem.addClass('search-invisible');
}
});
const $noMatch = $('#no_match_alert');
if (match > 0) {
$noMatch.hide();
} else {
$noMatch.show();
}
const maxMatch = $('#max_results_alert');
if (match < 50) {
maxMatch.hide();
} else {
maxMatch.show();
}
lastSearch = searchTerm;
}
}
$('#operator_search_term').keyup(_.debounce(operatorSearchFn, 300));
})();
|
package api
import (
"context"
internalHTTP "github.com/matrix-org/dendrite/internal/http"
"github.com/matrix-org/gomatrixserverlib"
"github.com/opentracing/opentracing-go"
)
const (
// RoomserverPerformJoinPath is the HTTP path for the PerformJoin API.
RoomserverPerformJoinPath = "/api/roomserver/performJoin"
// RoomserverPerformLeavePath is the HTTP path for the PerformLeave API.
RoomserverPerformLeavePath = "/api/roomserver/performLeave"
)
type PerformJoinRequest struct {
RoomIDOrAlias string `json:"room_id_or_alias"`
UserID string `json:"user_id"`
Content map[string]interface{} `json:"content"`
ServerNames []gomatrixserverlib.ServerName `json:"server_names"`
}
type PerformJoinResponse struct {
}
func (h *httpRoomserverInternalAPI) PerformJoin(
ctx context.Context,
request *PerformJoinRequest,
response *PerformJoinResponse,
) error {
span, ctx := opentracing.StartSpanFromContext(ctx, "PerformJoin")
defer span.Finish()
apiURL := h.roomserverURL + RoomserverPerformJoinPath
return internalHTTP.PostJSON(ctx, span, h.httpClient, apiURL, request, response)
}
type PerformLeaveRequest struct {
RoomID string `json:"room_id"`
UserID string `json:"user_id"`
}
type PerformLeaveResponse struct {
}
func (h *httpRoomserverInternalAPI) PerformLeave(
ctx context.Context,
request *PerformLeaveRequest,
response *PerformLeaveResponse,
) error {
span, ctx := opentracing.StartSpanFromContext(ctx, "PerformLeave")
defer span.Finish()
apiURL := h.roomserverURL + RoomserverPerformLeavePath
return internalHTTP.PostJSON(ctx, span, h.httpClient, apiURL, request, response)
}
|
#!/bin/bash
. cfg.sh
#
var=${1:?You have to provide an EDB text filename with no spaces as an argument}
# Creates a timestamp for the directory name
timestamp=$(date +%m%d%y-%H%M%S)
# Creates a folder with a name composed of the timestamp and the text file argument
mkdir $timestamp$1-tbe-noSpeaker
#
#
while IFS= read -r line
do
# Extract audio filename from EDB file
prompt_audio=`echo $line|cut -f1 -d:` &&
# Extract audio basename to be used as the TTS prompt name
prompt_basename=$(basename "$prompt_audio" .wav) &&
prompt_name="${prompt_basename}_noSpeaker" &&
# Extract Speaker Model ID from JSON file
echo "Generating audio file " $prompt_audio-tbe-noSpeaker.wav " using prompt " $prompt_name "............" &&
curl -X POST -u $useCred --header "Content-Type: application/json" --header "Accept: audio/wav" --data "{\"text\":\"<ibm:prompt id='$prompt_name'/>\"}" --output ./$timestamp$1-tbe-noSpeaker/$prompt_audio-tbe-noSpeaker.wav "$url/v1/synthesize?customization_id=$customID&voice=$voice"
done < "$1"
|
<reponame>tylerw1369/diverDriver
package ipccommon
import (
"bytes"
"errors"
"github.com/lunixbochs/struc"
"github.com/sigurn/crc8"
)
const (
IpcCmdNotification = 0x01 // S => C: Text messages to the client
IpcCmdResponse = 0x02 // S => C: Response to a IPC_CMD
IpcCmdError = 0x03 // S => C: Exceptions that should be raised in the client
IpcCmdGetServerVersion = 0x04 // C => S: Get the version of this application
IpcCmdGetPowType = 0x05 // C => S: Get the name of the used POW implementation (e.g. PiDiver)
IpcCmdGetPowVersion = 0x06 // C => S: Get the version of the used POW implementation (e.g. PiDiver FPGA Core Version)
IpcCmdPowFunc = 0x07 // C => S: Do POW
// Different states of the receivement of the frame via interprocess communication
FrameStateSearchEnq byte = 1 // FrameStateSearchEnq: Search the Start byte of the frame
FrameStateSearchVersion byte = 2 // Search the Version byte of the frame
FrameStateSearchLength byte = 3 // Search the length information of the frame
FrameStateSearchData byte = 4 // Search all the data embedded in the frame
FrameStateSearchCRC byte = 5 // Search the CRC checksum of the embedded data
)
var Crc8Table = crc8.MakeTable(crc8.CRC8_MAXIM)
// IpcFrameV1 contains the information of the IPC communication
type IpcFrameV1 struct {
ReqID byte `struc:"byte"`
Command byte `struc:"byte"`
DataLength int `struc:"uint16,sizeof=Data"`
Data []byte `struc:"[]byte"`
}
// ToBytes converts an IpcFrameV1 to a byte slice
func (f *IpcFrameV1) ToBytes() ([]byte, error) {
var buf bytes.Buffer
err := struc.Pack(&buf, f)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
// NewIpcMessageV1 creates a new IpcFrameV1 embedded in an IpcMessage
func NewIpcMessageV1(requestID byte, command byte, data []byte) (*IpcMessage, error) {
frameLength := len(data)
if frameLength > 0xFFFF {
return nil, errors.New("Message is too big")
}
frame := &IpcFrameV1{ReqID: requestID, Command: command, DataLength: len(data), Data: data}
frameBytes, err := frame.ToBytes()
if err != nil {
return nil, err
}
crc8 := crc8.Checksum(frameBytes, Crc8Table)
message := &IpcMessage{StartByte: 0x05, FrameVersion: 0x01, FrameLength: frameLength, FrameData: frameBytes, CRC8: crc8}
return message, nil
}
// IpcMessage is the container of an IPC frame with additional communication control data
type IpcMessage struct {
StartByte byte `struc:"byte"`
FrameVersion byte `struc:"byte"`
FrameLength int `struc:"uint16,sizeof=FrameData"`
FrameData []byte `struc:"[]byte"`
CRC8 byte `struc:"byte"`
}
// ToBytes converts an IpcMessage to a byte slice
func (m *IpcMessage) ToBytes() ([]byte, error) {
var buf bytes.Buffer
err := struc.Pack(&buf, m)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
// BytesToIpcMessage converts a byte slice to an IpcMessage
func BytesToIpcMessage(data []byte) (*IpcMessage, error) {
buf := bytes.NewBuffer(data)
msg := new(IpcMessage)
err := struc.Unpack(buf, &msg)
if err != nil {
return nil, err
}
return msg, nil
}
// BytesToIpcFrameV1 converts a byte slice to an IpcFrameV1
func BytesToIpcFrameV1(data []byte) (*IpcFrameV1, error) {
buf := bytes.NewBuffer(data)
frame := new(IpcFrameV1)
err := struc.Unpack(buf, &frame)
if err != nil {
return nil, err
}
return frame, nil
}
|
from sklearn.preprocessing import LabelEncoder
labelencoder = LabelEncoder()
x_train, x_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=0)
# Encode the categorical features
x_train[:, 3] = labelencoder.fit_transform(x_train[:, 3].astype(str))
x_test[:, 3] = labelencoder.fit_transform(x_test[:, 3].astype(str))
# Build the model
model = RandomForestClassifier(n_estimators = 100, random_state = 0)
model.fit(x_train, y_train)
# Evaluate performance
accuracy = model.score(x_test, y_test)
print("The model has an accuracy of:", accuracy)
|
import sys
class VersionControlSystem:
def __init__(self):
self.staged_files = []
self.stashed_files = []
def add_files(self):
self.staged_files = ["file1", "file2", "file3"] # Simulating adding files to the staging area
print("Added all files to the staging area.")
def commit_changes(self):
if self.staged_files:
print("Committed changes in the staging area.")
self.staged_files = []
else:
print("No changes to commit.")
def stash_changes(self):
self.stashed_files.extend(self.staged_files + ["untracked_file1", "untracked_file2"]) # Simulating stashing changes and including untracked files
self.staged_files = []
print("Stashed changes, including staged and untracked files.")
def run_program(self):
while True:
command = input("Enter a command: ")
if command == "git add -A":
self.add_files()
elif command == "git commit":
self.commit_changes()
elif command == "git stash -k -u":
self.stash_changes()
elif command == "sys.exit(1)":
print("Exiting the program with exit code 1.")
sys.exit(1)
else:
print("Invalid command. Please enter a valid git command.")
if __name__ == "__main__":
vcs = VersionControlSystem()
vcs.run_program()
|
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from .mixins import OpenMetricsScraperMixin
from ..base import AgentCheck
from ...errors import CheckException
class OpenMetricsBaseCheck(OpenMetricsScraperMixin, AgentCheck):
"""
OpenMetricsBaseCheck is a class that helps instantiating PrometheusCheck only
with YAML configurations. As each check has it own states it maintains a map
of all checks so that the one corresponding to the instance is executed
Minimal example configuration:
instances:
- prometheus_url: http://foobar/endpoint
namespace: "foobar"
metrics:
- bar
- foo
"""
DEFAULT_METRIC_LIMIT = 2000
def __init__(self, name, init_config, agentConfig, instances=None, default_instances=None, default_namespace=None):
super(OpenMetricsBaseCheck, self).__init__(name, init_config, agentConfig, instances=instances)
self.config_map = {}
self.default_instances = {} if default_instances is None else default_instances
self.default_namespace = default_namespace
# pre-generate the scraper configurations
if instances is not None:
for instance in instances:
self.get_scraper_config(instance)
def check(self, instance):
# Get the configuration for this specific instance
scraper_config = self.get_scraper_config(instance)
# We should be specifying metrics for checks that are vanilla OpenMetricsBaseCheck-based
if not scraper_config['metrics_mapper']:
raise CheckException("You have to collect at least one metric from the endpoint: {}".format(
scraper_config['prometheus_url']))
self.process(scraper_config)
def get_scraper_config(self, instance):
endpoint = instance.get('prometheus_url')
if endpoint is None:
raise CheckException("Unable to find prometheus URL in config file.")
# If we've already created the corresponding scraper configuration, return it
if endpoint in self.config_map:
return self.config_map[endpoint]
# Otherwise, we create the scraper configuration
config = self.create_scraper_configuration(instance)
# Add this configuration to the config_map
self.config_map[endpoint] = config
return config
def _finalize_tags_to_submit(self, _tags, metric_name, val, metric, custom_tags=None, hostname=None):
"""
Format the finalized tags
This is generally a noop, but it can be used to change the tags before sending metrics
"""
return _tags
|
import '../../src/ext/array/to-set';
describe('Array.toSet', () => {
const testData = [
{ id: 3, name: 'Bob' },
{ id: 2, name: 'Char' },
{ id: 4, name: 'Alex' },
{ id: 1, name: 'Bob' },
];
it('key = id', () => {
// exercise
const actual = testData.toSet((item) => item.id);
// verify
expect(actual.size).toBe(4);
expect(actual.has(1)).toBeTruthy();
expect(actual.has(100)).toBeFalsy();
});
it('key = name', () => {
// exercise
const actual = testData.toSet((item) => item.name);
// verify
expect(actual.size).toBe(3);
expect(actual.has('Bob')).toBeTruthy();
expect(actual.has('Bub')).toBeFalsy();
});
});
|
#include <chrono>
#include <iostream>
#include <string>
#include <cstring> // memcpy
#define GCC_VERSION (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__)
#ifdef __clang__
// clang version 3.6.2 sets GNUC fields to version to 4.2.1
#define PRIVATE_OMP_H 40201
#else
// g++ (Ubuntu 5.2.1-22ubuntu2) 5.2.1 20151010
#define PRIVATE_OMP_H 50201
#endif
#if GCC_VERSION == PRIVATE_OMP_H
#include "/usr/lib/gcc/x86_64-linux-gnu/5/include/omp.h"
#else
#include <omp.h>
#endif
using namespace std;
void DuffsDevice(float *from, float *to, int count)
{
int n=(count+7)/8;
switch(count%8){
case 0: do { *to++ = *from++;
case 7: *to++ = *from++;
case 6: *to++ = *from++;
case 5: *to++ = *from++;
case 4: *to++ = *from++;
case 3: *to++ = *from++;
case 2: *to++ = *from++;
case 1: *to++ = *from++;
} while(--n>0);
}
}
class Timer { // use C++11 std::chrono features to create a stop-watch timer class
std::chrono::time_point<std::chrono::high_resolution_clock> start;
std::chrono::time_point<std::chrono::high_resolution_clock> stop;
public:
Timer() {}
void Start() { start = std::chrono::high_resolution_clock::now(); }
void Stop () { stop = std::chrono::high_resolution_clock::now(); }
// basic form to calculate time differences, illustrate with microseconds
uint64_t usecs() {
typedef std::chrono::duration<int,std::micro> microsecs_t ;
microsecs_t duration_get( std::chrono::duration_cast<microsecs_t>(stop-start) ) ;
uint64_t us = duration_get.count();
return us;
}
// Macro to define member functions that return milli, micro, and nano seconds
#define RET(UNITS) uint64_t UNITS##secs() { \
typedef std::chrono::duration<int,std::UNITS> UNITS##secs_t ; \
UNITS##secs_t duration_get( std::chrono::duration_cast<UNITS##secs_t>(stop-start) ) ; \
uint64_t us = duration_get.count(); \
return us; \
}
RET(milli) // creates member function 'uint64_t millisecs()' - which returns 'stop-start' in millisecs
RET(micro) // creates member function 'uint64_t microsecs()' - which returns 'stop-start' in microsecs
RET(nano) // creates member function 'uint64_t nanosecs()' - which returns 'stop-start' in nanosecs
};
const int SIZE = .5*1000*1000*1000;
float from[SIZE];
float to[SIZE];
int main(int argc, char** argv)
{
Timer t;
auto PrintTime = [&t] (std::string msg) -> uint64_t {
t.Stop();
uint64_t elapsedMicroseconds = t.microsecs();
std::cout << msg << " " << 1e-6*elapsedMicroseconds << " secs\n";
return elapsedMicroseconds;
};
t.Start();
DuffsDevice(from, to, SIZE);
PrintTime (std::string("Duff Device"));
t.Start();
memmove(to, from, sizeof(*from) * SIZE);
PrintTime (std::string("memmove"));
t.Start();
memcpy(to, from, sizeof(*from) * SIZE);
PrintTime (std::string("memcpy"));
t.Start();
for(int i = 0; i < SIZE; i++) to[i] = from[i];
PrintTime (std::string("for loop"));
t.Start();
#pragma omp parallel for
for(int i = 0; i < SIZE; i++) to[i] = from[i];
PrintTime (std::string("omp for loop"));
t.Start();
std::copy(from, from+SIZE, to);
PrintTime (std::string("std::copy"));
}
/*
g++ -Wall -std=c++11 -ggdb -fopenmp DuffsDevice.cpp -o DuffsDevice -lrt -pthread
Duff Device 1818454 usecs
memmove 519961 usecs
memcpy 521334 usecs
for loop 2089405 usecs
omp for loop 447924 usecs
std::copy 513170 usecs
g++ -Ofast -Wall -std=c++11 -ggdb -fopenmp DuffsDevice.cpp -o DuffsDevice -lrt -pthread
Duff Device 1316391 usecs
memmove 542370 usecs
memcpy 540780 usecs
for loop 542649 usecs
omp for loop 338114 usecs
std::copy 500315 usecs
clang++ -Wall -std=c++11 -ggdb -fopenmp DuffsDevice.cpp -o DuffsDevice -lrt -pthread
Duff Device 2461728 usecs
memmove 610164 usecs
memcpy 600569 usecs
for loop 2225044 usecs
omp for loop 2242320 usecs
std::copy 582771 usecs
clang++ -Ofast -Wall -std=c++11 -ggdb -fopenmp DuffsDevice.cpp -o DuffsDevice -lrt -pthread
Duff Device 1228481 usecs
memmove 519989 usecs
memcpy 522559 usecs
for loop 533571 usecs
omp for loop 527695 usecs
std::copy 529786 usecs
*/
|
<filename>heima-leadnews-model/src/main/java/com/heima/model/behavior/pojos/ApForwardBehavior.java<gh_stars>0
package com.heima.model.behavior.pojos;
import com.heima.model.annotation.IdEncrypt;
import lombok.Data;
import java.util.Date;
@Data
public class ApForwardBehavior {
private Long id;
@IdEncrypt
private Integer entryId;
private Integer articleId;
private Integer dynamicId;
private Date createdTime;
}
|
#!/usr/bin/env bash
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
bin=`dirname "$0"`
bin=`cd "$bin"; pwd`
# get Flink config
. "$bin"/config.sh
if [ "$FLINK_IDENT_STRING" = "" ]; then
FLINK_IDENT_STRING="$USER"
fi
JVM_ARGS="$JVM_ARGS -Xmx512m"
# auxilliary function to construct a lightweight classpath for the
# Flink CLI client
constructCLIClientClassPath() {
for jarfile in $FLINK_LIB_DIR/*.jar ; do
if [[ $CC_CLASSPATH = "" ]]; then
CC_CLASSPATH=$jarfile;
else
CC_CLASSPATH=$CC_CLASSPATH:$jarfile
fi
done
echo $CC_CLASSPATH
}
CC_CLASSPATH=`manglePathList $(constructCLIClientClassPath)`
log=$FLINK_LOG_DIR/flink-$FLINK_IDENT_STRING-yarn-session-$HOSTNAME.log
log_setting="-Dlog.file="$log" -Dlog4j.configuration=file:"$FLINK_CONF_DIR"/log4j-yarn-session.properties -Dlogback.configurationFile=file:"$FLINK_CONF_DIR"/logback-yarn.xml"
export FLINK_CONF_DIR
$JAVA_RUN $JVM_ARGS -classpath $CC_CLASSPATH:$HADOOP_CLASSPATH $log_setting org.apache.flink.yarn.Client -ship $bin/../ship/ -confDir $FLINK_CONF_DIR -j $FLINK_LIB_DIR/*yarn-uberjar.jar $*
|
#! /bin/bash
#SBATCH -o /home/hpc/pr63so/di69fol/workspace/SWEET_2015_12_26/benchmarks_performance/rexi_tests_lrz_freq_waves/2016_01_03_scalability_rexi_fd_high_res_run3/run_rexi_fd_par_m0512_t014_n0128_r0014_a1.txt
###SBATCH -e /home/hpc/pr63so/di69fol/workspace/SWEET_2015_12_26/benchmarks_performance/rexi_tests_lrz_freq_waves/2016_01_03_scalability_rexi_fd_high_res_run3/run_rexi_fd_par_m0512_t014_n0128_r0014_a1.err
#SBATCH -J rexi_fd_par_m0512_t014_n0128_r0014_a1
#SBATCH --get-user-env
#SBATCH --clusters=mpp2
#SBATCH --ntasks=14
#SBATCH --cpus-per-task=14
#SBATCH --exclusive
#SBATCH --export=NONE
#SBATCH --time=03:00:00
#declare -x NUMA_BLOCK_ALLOC_VERBOSITY=1
declare -x KMP_AFFINITY="granularity=thread,compact,1,0"
declare -x OMP_NUM_THREADS=14
echo "OMP_NUM_THREADS=$OMP_NUM_THREADS"
echo
. /etc/profile.d/modules.sh
module unload gcc
module unload fftw
module unload python
module load python/2.7_anaconda_nompi
module unload intel
module load intel/16.0
module unload mpi.intel
module load mpi.intel/5.1
module load gcc/5
cd /home/hpc/pr63so/di69fol/workspace/SWEET_2015_12_26/benchmarks_performance/rexi_tests_lrz_freq_waves/2016_01_03_scalability_rexi_fd_high_res_run3
cd ../../../
. local_software/env_vars.sh
# force to use FFTW WISDOM data
declare -x SWEET_FFTW_LOAD_WISDOM_FROM_FILE="FFTW_WISDOM_nofreq_T0"
time -p mpiexec.hydra -genv OMP_NUM_THREADS 14 -envall -ppn 2 -n 14 ./build/rexi_fd_par_m_tno_a1 --initial-freq-x-mul=2.0 --initial-freq-y-mul=1.0 -f 1 -g 1 -H 1 -X 1 -Y 1 --compute-error 1 -t 50 -R 4 -C 0.3 -N 128 -U 0 -S 0 --use-specdiff-for-complex-array 0 --rexi-h 0.8 --timestepping-mode 1 --staggering 0 --rexi-m=512 -C -5.0
|
/*
* Copyright 2002-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.beans.factory.config;
import java.lang.reflect.Constructor;
import org.springframework.beans.BeansException;
import org.springframework.lang.Nullable;
/**
* 扩展 InstantiationAwareBeanPostProcessor 接口,添加一个回调用于预测已处理 bean 的最终类型。
* 注:该接口是一个特殊用途的接口,主要用于框架内部使用。
* 通常,应用程序提供的后置处理器应该简单地实现普通的 BeanPostProcessor 接口,或者从
* InstantiationAwareBeanPostProcessorAdapter 类派生。即使在早些版本中,也可以向该接口添加新方法。
* @author <NAME>
* @see InstantiationAwareBeanPostProcessorAdapter
* @since 2.0.3
*/
public interface SmartInstantiationAwareBeanPostProcessor extends InstantiationAwareBeanPostProcessor {
/**
* 预测Bean的类型,返回第一个预测成功的Class类型,如果不能预测返回null
* @param beanClass the raw class of the bean
* @param beanName the name of the bean
* @return the type of the bean, or {@code null} if not predictable
* @throws org.springframework.beans.BeansException in case of errors
*/
@Nullable
default Class<?> predictBeanType(Class<?> beanClass, String beanName) throws BeansException {
return null;
}
/**
* 选择合适的构造器,比如目标对象有多个构造器,在这里可以进行一些定制化,选择合适的构造器
* beanClass参数表示目标实例的类型,beanName是目标实例在 Spring 容器中的 name
* 返回值是个构造器数组,如果返回 null,会执行下一个 PostProcessor的determineCandidateConstructors 方法;
* 否则选取该 PostProcessor 选择的构造器
* @param beanClass the raw class of the bean (never {@code null})
* @param beanName the name of the bean
* @return the candidate constructors, or {@code null} if none specified
* @throws org.springframework.beans.BeansException in case of errors
*/
@Nullable
default Constructor<?>[] determineCandidateConstructors(Class<?> beanClass, String beanName)
throws BeansException {
return null;
}
/**
* 获得提前暴露的 bean 引用。主要用于解决循环引用的问题,只有单例对象才会调用此方法
* @param bean the raw bean instance
* @param beanName the name of the bean
* @return the object to expose as bean reference
* (typically with the passed-in bean instance as default)
* @throws org.springframework.beans.BeansException in case of errors
*/
default Object getEarlyBeanReference(Object bean, String beanName) throws BeansException {
return bean;
}
}
|
<gh_stars>10-100
package server
import (
"github.com/go-kratos/kratos/v2/log"
"github.com/go-kratos/kratos/v2/middleware/logging"
"github.com/go-kratos/kratos/v2/middleware/metrics"
"github.com/go-kratos/kratos/v2/middleware/recovery"
"github.com/go-kratos/kratos/v2/middleware/tracing"
"github.com/go-kratos/kratos/v2/middleware/validate"
"github.com/go-kratos/kratos/v2/transport/grpc"
api2 "github.com/goxiaoy/go-saas-kit/pkg/api"
"github.com/goxiaoy/go-saas-kit/pkg/authn/jwt"
"github.com/goxiaoy/go-saas-kit/pkg/authn/middleware/authentication"
"github.com/goxiaoy/go-saas-kit/pkg/conf"
"github.com/goxiaoy/go-saas-kit/pkg/server"
"github.com/goxiaoy/go-saas-kit/pkg/uow"
"github.com/goxiaoy/go-saas-kit/saas/api"
v1 "github.com/goxiaoy/go-saas-kit/saas/api/tenant/v1"
"github.com/goxiaoy/go-saas-kit/saas/private/service"
"github.com/goxiaoy/go-saas/common"
"github.com/goxiaoy/go-saas/common/http"
"github.com/goxiaoy/go-saas/kratos/saas"
uow2 "github.com/goxiaoy/uow"
)
// NewGRPCServer new a gRPC server.
func NewGRPCServer(c *conf.Services, tokenizer jwt.Tokenizer, ts common.TenantStore, uowMgr uow2.Manager, tenant *service.TenantService, mOpt *http.WebMultiTenancyOption, apiOpt *api2.Option, logger log.Logger) *grpc.Server {
var opts = []grpc.ServerOption{
grpc.Middleware(
recovery.Recovery(),
tracing.Server(),
logging.Server(logger),
metrics.Server(),
validate.Validator(),
authentication.ServerExtractAndAuth(tokenizer, logger),
saas.Server(mOpt, nil, ts),
api2.ServerMiddleware(apiOpt),
uow.Uow(logger, uowMgr),
),
}
opts = server.PatchGrpcOpts(logger, opts, api.ServiceName, c)
srv := grpc.NewServer(opts...)
v1.RegisterTenantServiceServer(srv, tenant)
return srv
}
|
def find_cluster_center(matrix):
# initialize the cluster center to zeros
cluster_center = [0, 0]
# loop through the elements of the matrix
for point in matrix:
# add each element to the cluster center
cluster_center[0] += point[0]
cluster_center[1] += point[1]
# calculate the mean of the cluster center
num_points = len(matrix)
cluster_center[0] /= num_points
cluster_center[1] /= num_points
# return the cluster center
return cluster_center
print(find_cluster_center([[1, 2], [3, 4], [5, 6], [7, 8]]))
|
FRAMEWORK=$1
BUILD_DIR="."
OUTPUT="${BUILD_DIR}/Debug-Universal"
framework=$FRAMEWORK
rm -rf "${OUTPUT}"
mkdir -p "${OUTPUT}"
cp -R "${BUILD_DIR}/Debug-iphoneos/${framework}.framework" "${OUTPUT}/"
lipo -create -output "${OUTPUT}/${framework}.framework/${framework}" "${BUILD_DIR}/Debug-iphoneos/${framework}.framework/${framework}" "${BUILD_DIR}/Debug-iphonesimulator/${framework}.framework/${framework}"
cp -r "${BUILD_DIR}/Debug-iphonesimulator/${framework}.framework/Modules/${framework}.swiftmodule/" "${OUTPUT}/${framework}.framework/Modules/${framework}.swiftmodule"
|
<reponame>Rhobal/objectify<gh_stars>0
package com.googlecode.objectify.impl;
import com.google.cloud.datastore.KeyValue;
import com.google.cloud.datastore.ListValue;
import com.google.cloud.datastore.NullValue;
import com.google.cloud.datastore.Value;
import com.googlecode.objectify.Key;
import com.googlecode.objectify.Objectify;
import com.googlecode.objectify.ObjectifyFactory;
import com.googlecode.objectify.TxnType;
import com.googlecode.objectify.Work;
import com.googlecode.objectify.annotation.Entity;
import com.googlecode.objectify.cache.PendingFutures;
import com.googlecode.objectify.cmd.Deferred;
import com.googlecode.objectify.cmd.Deleter;
import com.googlecode.objectify.cmd.Loader;
import com.googlecode.objectify.cmd.Saver;
import com.googlecode.objectify.impl.translate.CreateContext;
import com.googlecode.objectify.impl.translate.SaveContext;
import com.googlecode.objectify.impl.translate.Translator;
import com.googlecode.objectify.impl.translate.TypeKey;
import com.googlecode.objectify.util.Closeable;
import com.googlecode.objectify.util.Values;
import lombok.Getter;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.List;
/**
* <p>Implementation of the Objectify interface. This is also suitable for subclassing; you
* can return your own subclass by overriding ObjectifyFactory.begin().</p>
*
* <p>Note we *always* use the AsyncDatastoreService
* methods that use transactions to avoid the confusion of implicit transactions.</p>
*
* @author <NAME> <<EMAIL>>
*/
public class ObjectifyImpl implements Objectify, Closeable
{
/** The factory that produced us */
protected final ObjectifyFactory factory;
/** */
@Getter
protected final ObjectifyOptions options;
/** */
protected final Transactor transactor;
/**
*/
public ObjectifyImpl(final ObjectifyFactory fact) {
this.factory = fact;
this.options = new ObjectifyOptions();
this.transactor = new TransactorNo(this);
}
public ObjectifyImpl(final ObjectifyFactory factory, final ObjectifyOptions options, final TransactorSupplier supplier) {
this.factory = factory;
this.options = options;
this.transactor = supplier.get(this);
}
/* (non-Javadoc)
* @see com.googlecode.objectify.Objectify#getFactory()
*/
public ObjectifyFactory factory() {
return this.factory;
}
@Override
public Objectify namespace(final String namespace) {
return makeNew(options.namespace(namespace));
}
/* (non-Javadoc)
* @see com.googlecode.objectify.Objectify#find()
*/
@Override
public Loader load() {
return new LoaderImpl(this);
}
/* (non-Javadoc)
* @see com.googlecode.objectify.Objectify#put()
*/
@Override
public Saver save() {
return new SaverImpl(this);
}
/* (non-Javadoc)
* @see com.googlecode.objectify.Objectify#delete()
*/
@Override
public Deleter delete() {
return new DeleterImpl(this);
}
/* (non-Javadoc)
* @see com.googlecode.objectify.Objectify#defer()
*/
@Override
public Deferred defer() {
return new DeferredImpl(this);
}
/* (non-Javadoc)
* @see com.googlecode.objectify.Objectify#deadline(java.lang.Double)
*/
@Override
public Objectify deadline(final Double value) {
// A no-op
return this;
}
/* (non-Javadoc)
* @see com.googlecode.objectify.Objectify#cache(boolean)
*/
@Override
public Objectify cache(boolean value) {
return makeNew(options.cache(value));
}
/* (non-Javadoc)
* @see com.googlecode.objectify.Objectify#mandatoryTransactions(boolean)
*/
@Override
public Objectify mandatoryTransactions(boolean value) {
return makeNew(options.mandatoryTransactions(value));
}
/** Same transactor, different options */
private ObjectifyImpl makeNew(final ObjectifyOptions opts) {
return makeNew(opts, ofy -> transactor);
}
/** Same options, different transactor */
ObjectifyImpl makeNew(final TransactorSupplier supplier) {
return makeNew(options, supplier);
}
/** Can be overriden if you want to subclass the ObjectifyImpl */
protected ObjectifyImpl makeNew(final ObjectifyOptions opts, final TransactorSupplier supplier) {
return new ObjectifyImpl(factory, opts, supplier);
}
/* (non-Javadoc)
* @see com.googlecode.objectify.Objectify#transactionless()
*/
@Deprecated
@Override
public Objectify transactionless() {
return transactor.transactionless(this);
}
/* (non-Javadoc)
* @see com.googlecode.objectify.Objectify#getTxn()
*/
public AsyncTransaction getTransaction() {
return transactor.getTransaction();
}
/* (non-Javadoc)
* @see com.googlecode.objectify.Objectify#execute(com.googlecode.objectify.TxnType, com.googlecode.objectify.Work)
*/
@Override
public <R> R execute(final TxnType txnType, final Work<R> work) {
return transactor.execute(this, txnType, work);
}
@Override
public void execute(final TxnType txnType, final Runnable work) {
execute(txnType, (Work<Void>)() -> {
work.run();
return null;
});
}
@Override
public <R> R transactionless(final Work<R> work) {
return transactor.transactionless(this, work);
}
@Override
public void transactionless(final Runnable work) {
transactionless((Work<Void>)() -> {
work.run();
return null;
});
}
/* (non-Javadoc)
* @see com.googlecode.objectify.Objectify#transact(com.googlecode.objectify.Work)
*/
@Override
public <R> R transact(Work<R> work) {
return transactor.transact(this, work);
}
@Override
public void transact(final Runnable work) {
transact((Work<Void>)() -> {
work.run();
return null;
});
}
/* (non-Javadoc)
* @see com.googlecode.objectify.Objectify#transact(com.googlecode.objectify.Work)
*/
@Override
public <R> R transactNew(Work<R> work) {
return this.transactNew(Transactor.DEFAULT_TRY_LIMIT, work);
}
@Override
public void transactNew(final Runnable work) {
transactNew((Work<Void>)() -> {
work.run();
return null;
});
}
/* (non-Javadoc)
* @see com.googlecode.objectify.Objectify#transactNew(com.googlecode.objectify.Work)
*/
@Override
public <R> R transactNew(int limitTries, Work<R> work) {
return transactor.transactNew(this, limitTries, work);
}
@Override
public void transactNew(int limitTries, final Runnable work) {
transactNew(limitTries, (Work<Void>)() -> {
work.run();
return null;
});
}
/* (non-Javadoc)
* @see com.googlecode.objectify.Objectify#clear()
*/
@Override
public void clear() {
transactor.getSession().clear();
}
/**
*/
protected AsyncDatastoreReaderWriter asyncDatastore() {
return transactor.asyncDatastore();
}
/**
* Use this once for one operation and then throw it away
* @return a fresh engine that handles fundamental datastore operations for saving and deleting
*/
protected WriteEngine createWriteEngine() {
if (options.isMandatoryTransactions() && getTransaction() == null)
throw new IllegalStateException("You have attempted save/delete outside of a transaction, but you have enabled ofy().mandatoryTransactions(true). Perhaps you wanted to start a transaction first?");
return new WriteEngine(this, asyncDatastore(), transactor.getSession(), transactor.getDeferrer());
}
/**
* <p>Translates the value of a filter clause into something the datastore understands. Key<?> goes to native Key,
* entities go to native Key, java.sql.Date goes to java.util.Date, etc. It uses the same translation system
* that is used for standard entity fields, but does no checking to see if the value is appropriate for the field.</p>
*
* <p>Unrecognized types are returned as-is.</p>
*
* <p>A future version of this method might check for type validity.</p>
*
* @return whatever can be put into a filter clause.
*/
protected Value<?> makeFilterable(Object value) {
if (value == null)
return NullValue.of();
// This is really quite a dilemma. We need to convert that value into something we can filter by, but we don't
// really have a lot of information about it. We could use type information from the matched field, but there's
// no guarantee that there is a field to check - it could be a typeless query or a query on an old property value.
// The only real solution is to create a (non root!) translator on the fly. Even that is not straightforward,
// because erasure wipes out any component type information in a collection. We don't know what the collection
// contains.
//
// The answer: Check for collections explicitly. Create a separate translator for every item in the collection;
// after all, it could be a heterogeneous list. This is not especially efficient but GAE only allows a handful of
// items in a IN operation and at any rate processing will still be negligible compared to the cost of a query.
// If this is an array, make life easier by turning it into a list first. Because of primitive
// mismatching we can't trust Arrays.asList().
if (value.getClass().isArray()) {
final int len = Array.getLength(value);
final List<Object> asList = new ArrayList<>(len);
for (int i=0; i<len; i++)
asList.add(Array.get(value, i));
value = asList;
}
if (value instanceof Iterable) {
final List<Value<?>> result = new ArrayList<>(50); // hard limit is 30, but wth
for (final Object obj: (Iterable<?>)value)
result.add(makeFilterable(obj));
return ListValue.of(result);
} else {
// Special case entity pojos that become keys
if (value.getClass().isAnnotationPresent(Entity.class)) {
return KeyValue.of(factory().keys().rawKeyOf(value));
} else {
// Run it through a translator
final Translator<Object, Value<?>> translator = factory().getTranslators().get(new TypeKey<>(value.getClass()), new CreateContext(factory()), Path.root());
// For some reason we have to force all values used as filters to be indexed
return Values.index(translator.save(value, false, new SaveContext(), Path.root()), true);
}
}
}
/** */
protected Session getSession() {
return this.transactor.getSession();
}
/* (non-Javadoc)
* @see com.googlecode.objectify.Objectify#isLoaded(com.googlecode.objectify.Key)
*/
@Override
public boolean isLoaded(final Key<?> key) {
return transactor.getSession().contains(key);
}
@Override
public void flush() {
transactor.getDeferrer().flush();
}
/**
* Defer the saving of one entity. Updates the session cache with this new value.
*/
void deferSave(final Object entity) {
transactor.getDeferrer().deferSave(entity);
}
/**
* Defer the deletion of one entity. Updates the session cache with this new value.
*/
void deferDelete(final Key<?> key) {
transactor.getDeferrer().deferDelete(key);
}
/**
* Ends this transactional scope.
*/
@Override
public void close() {
// The order of these three operations is significant
flush();
PendingFutures.completeAllPendingFutures();
factory().close(this);
}
}
|
package cmu.xprize.comp_spelling;
import org.json.JSONObject;
import java.util.List;
import cmu.xprize.util.ILoadableObject;
import cmu.xprize.util.IScope;
import cmu.xprize.util.JSON_Helper;
/**
* Automatically generated w/ script by <NAME>.
*/
public class CSpelling_Data implements ILoadableObject{
// json loadable
public String level;
public String task;
public String layout;
public String image;
public String[] word;
public String sound;
@Override
public void loadJSON(JSONObject jsonObj, IScope scope) {
JSON_Helper.parseSelf(jsonObj, this, CClassMap.classMap, scope);
}
}
|
#!/bin/bash
set -eux
if [ "x${1:-}" = x ]; then
echo "Missing version number" >&2
exit 1
fi
VERSION="$1"
cd "$(dirname "$0")/../.."
cp scripts/dist/pyinstaller_entrypoint.py scripts/dist/macos/macos.spec .
poetry install
scripts/update_translations.sh
rm -rf build dist
pyinstaller macos.spec
rm -rf dist/taguette
cp scripts/dist/macos/taguette_console_wrapper dist/Taguette.app/Contents/MacOS/
cp -a /Applications/calibre.app dist/Taguette.app/Contents/Resources/
(cd dist/Taguette.app/Contents && patch -p0 <../../../scripts/dist/macos/plist.patch)
codesign --deep -s Taguette dist/Taguette.app
rm -f taguette.dmg
cp scripts/dist/LICENSE.txt dist/LICENSE.txt
ln -s /Applications dist/Applications
sleep 1
hdiutil create taguette.dmg -srcfolder dist -volname "Taguette $VERSION"
|
package org.hisp.dhis.aggregation.impl;
/*
* Copyright (c) 2004-2012, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.hisp.dhis.aggregation.AggregationService;
import org.hisp.dhis.aggregation.impl.cache.AggregationCache;
import org.hisp.dhis.aggregation.impl.dataelement.AbstractDataElementAggregation;
import org.hisp.dhis.aggregation.impl.indicator.IndicatorAggregation;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataelement.DataElementCategoryOption;
import org.hisp.dhis.dataelement.DataElementCategoryOptionCombo;
import org.hisp.dhis.indicator.Indicator;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import static org.hisp.dhis.system.util.DateUtils.*;
import static org.hisp.dhis.system.util.MathUtils.getAverage;
import static org.hisp.dhis.system.util.MathUtils.getSum;
import static org.hisp.dhis.dataelement.DataElement.*;
/**
* @author <NAME>
* @version $Id: DefaultAggregationService.java 5116 2008-05-08 10:51:21Z larshelg $
*/
public class DefaultAggregationService
implements AggregationService
{
// -------------------------------------------------------------------------
// Dependencies
// -------------------------------------------------------------------------
private AbstractDataElementAggregation sumIntDataElementAggregation;
public void setSumIntDataElementAggregation( AbstractDataElementAggregation sumIntDataElementAggregation )
{
this.sumIntDataElementAggregation = sumIntDataElementAggregation;
}
private AbstractDataElementAggregation sumBoolDataElementAggregation;
public void setSumBoolDataElementAggregation( AbstractDataElementAggregation sumBoolDataElementAggregation )
{
this.sumBoolDataElementAggregation = sumBoolDataElementAggregation;
}
private AbstractDataElementAggregation averageIntDataElementAggregation;
public void setAverageIntDataElementAggregation( AbstractDataElementAggregation averageIntDataElementAggregation )
{
this.averageIntDataElementAggregation = averageIntDataElementAggregation;
}
private AbstractDataElementAggregation averageIntSingleValueAggregation;
public void setAverageIntSingleValueAggregation( AbstractDataElementAggregation averageIntSingleValueAggregation )
{
this.averageIntSingleValueAggregation = averageIntSingleValueAggregation;
}
private AbstractDataElementAggregation averageBoolDataElementAggregation;
public void setAverageBoolDataElementAggregation( AbstractDataElementAggregation averageBoolDataElementAggregation )
{
this.averageBoolDataElementAggregation = averageBoolDataElementAggregation;
}
private IndicatorAggregation indicatorAggregation;
public void setIndicatorAggregation( IndicatorAggregation indicatorAggregation )
{
this.indicatorAggregation = indicatorAggregation;
}
private AggregationCache aggregationCache;
public void setAggregationCache( AggregationCache aggregationCache )
{
this.aggregationCache = aggregationCache;
}
// -------------------------------------------------------------------------
// DataElement
// -------------------------------------------------------------------------
public Double getAggregatedDataValue( DataElement dataElement, DataElementCategoryOptionCombo optionCombo, Date startDate, Date endDate,
OrganisationUnit organisationUnit )
{
AbstractDataElementAggregation dataElementAggregation =
getInstance( dataElement.getType(), dataElement.getAggregationOperator(), startDate, endDate, dataElement );
return dataElementAggregation.getAggregatedValue( dataElement, optionCombo, startDate, endDate, organisationUnit );
}
public Double getAggregatedDataValue( DataElement dataElement, Date startDate, Date endDate, OrganisationUnit organisationUnit, DataElementCategoryOption categoryOption )
{
final List<Double> values = new ArrayList<Double>();
for ( DataElementCategoryOptionCombo optionCombo : categoryOption.getCategoryOptionCombos() )
{
values.add( getAggregatedDataValue( dataElement, optionCombo, startDate, endDate, organisationUnit ) );
}
return dataElement.getAggregationOperator().equals( AGGREGATION_OPERATOR_SUM ) ? getSum( values ) : getAverage( values );
}
// -------------------------------------------------------------------------
// Indicator
// -------------------------------------------------------------------------
public Double getAggregatedIndicatorValue( Indicator indicator, Date startDate, Date endDate,
OrganisationUnit organisationUnit )
{
return indicatorAggregation.getAggregatedIndicatorValue( indicator, startDate, endDate, organisationUnit );
}
public Double getAggregatedNumeratorValue( Indicator indicator, Date startDate, Date endDate,
OrganisationUnit organisationUnit )
{
return indicatorAggregation.getAggregatedNumeratorValue( indicator, startDate, endDate, organisationUnit );
}
public Double getAggregatedDenominatorValue( Indicator indicator, Date startDate, Date endDate,
OrganisationUnit organisationUnit )
{
return indicatorAggregation.getAggregatedDenominatorValue( indicator, startDate, endDate, organisationUnit );
}
public void clearCache()
{
aggregationCache.clearCache();
}
// -------------------------------------------------------------------------
// Supportive methods
// -------------------------------------------------------------------------
private AbstractDataElementAggregation getInstance( String valueType, String aggregationOperator, Date startDate, Date endDate, DataElement dataElement )
{
if ( valueType.equals( VALUE_TYPE_INT ) && aggregationOperator.equals( AGGREGATION_OPERATOR_SUM ) )
{
return sumIntDataElementAggregation;
}
else if ( valueType.equals( VALUE_TYPE_BOOL ) && aggregationOperator.equals( AGGREGATION_OPERATOR_SUM ) )
{
return sumBoolDataElementAggregation;
}
else if ( valueType.equals( VALUE_TYPE_INT ) && aggregationOperator.equals( AGGREGATION_OPERATOR_AVERAGE ) && dataElement.getFrequencyOrder() >= getDaysInclusive( startDate, endDate ) )
{
return averageIntSingleValueAggregation;
}
else if ( valueType.equals( VALUE_TYPE_INT ) && aggregationOperator.equals( AGGREGATION_OPERATOR_AVERAGE ) )
{
return averageIntDataElementAggregation;
}
else if ( valueType.equals( VALUE_TYPE_BOOL ) && aggregationOperator.equals( AGGREGATION_OPERATOR_AVERAGE ) )
{
return averageBoolDataElementAggregation;
}
else
{
throw new IllegalArgumentException( "Unsupported aggregation operator ("
+ aggregationOperator + ") or data element value type (" + valueType + ")" );
}
}
}
|
#!/bin/bash
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
# bash run_standalone_train_ascend.sh
cd ..
python train.py > train.log 2>&1 &
|
<reponame>KaiVolland/geoext2<gh_stars>0
/*
* Copyright (c) 2008-2015 The Open Source Geospatial Foundation
*
* Published under the BSD license.
* See https://github.com/geoext/geoext2/blob/master/license.txt for the full
* text of the license.
*/
Ext.require([
'GeoExt.panel.Map',
'GeoExt.slider.Zoom',
'GeoExt.slider.Tip'
]);
var panel, slider;
Ext.onReady(function() {
// create a map panel with an embedded slider
panel = Ext.create('GeoExt.MapPanel', {
title: "Map",
renderTo: "map-container",
height: 300,
width: 400,
map: {
controls: [new OpenLayers.Control.Navigation()],
maxResolution: 0.703125,
zoomMethod: null
},
layers: [new OpenLayers.Layer.WMS(
"Global Imagery",
"http://maps.opengeo.org/geowebcache/service/wms",
{layers: "bluemarble"}
)],
extent: [-5, 35, 15, 55],
items: [{
xtype: "gx_zoomslider",
vertical: true,
height: 100,
x: 10,
y: 20,
plugins: Ext.create('GeoExt.SliderTip', {
getText: function(thumb) {
var slider = thumb.slider;
var out = '<div>Zoom Level: {0}</div>' +
'<div>Resolution: {1}</div>' +
'<div>Scale: 1 : {2}</div>';
return Ext.String.format(out, slider.getZoom(), slider.getResolution(), slider.getScale());
}
})
}]
});
// create a separate slider bound to the map but displayed elsewhere
slider = Ext.create('GeoExt.ZoomSlider', {
map: panel.map,
aggressive: true,
width: 200,
plugins: Ext.create('GeoExt.SliderTip', {
getText: function(thumb) {
return Ext.String.format('<div>Zoom Level: {0}</div>', thumb.slider.getZoom());
}
}),
renderTo: document.body
});
});
|
#!/usr/bin/env bash
cd ../
python build_videos.py ../../data/sthv1/rawframes/ ../../data/sthv1/videos/ --fps 12 --level 1 --start-idx 1 --filename-tmpl '%05d'
echo "Encode videos"
cd sthv1/
|
package cfg;
import java.awt.EventQueue;
import javax.swing.JButton;
import javax.swing.JFrame;
public class ConfigReaderMain {
private JFrame frame;
public static void main(String[] args) {
EventQueue.invokeLater(new Runnable() {
public void run() {
try {
ConfigReaderMain window = new ConfigReaderMain();
window.frame.setVisible(true);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
/**
* Create the application.
*/
public ConfigReaderMain() {
initialize();
System.out.println("我要打印当前路径啦");
System.out.println(AppDefine.instance.getBasePath());
}
/**
* Initialize the contents of the frame.
*/
private void initialize() {
frame = new JFrame();
frame.setBounds(100, 100, 450, 300);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.getContentPane().setLayout(null);
JButton button = new JButton("测试");
button.setBounds(170, 211, 93, 23);
frame.getContentPane().add(button);
}
}
|
module.exports = [{
plugin: require('/Users/anjalidevakumar/portfolioweb2.0/node_modules/gatsby-plugin-google-analytics/gatsby-browser.js'),
options: {"plugins":[],"trackingId":"UA-XXXXXXXX-X","anonymize":true},
},{
plugin: require('/Users/anjalidevakumar/portfolioweb2.0/gatsby-browser.js'),
options: {"plugins":[]},
}]
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.entity.webapp.jboss;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import java.io.File;
import java.net.URL;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import brooklyn.entity.basic.Entities;
import brooklyn.entity.proxying.EntitySpec;
import brooklyn.entity.webapp.AbstractWebAppFixtureIntegrationTest;
import brooklyn.entity.webapp.HttpsSslConfig;
import brooklyn.location.basic.LocalhostMachineProvisioningLocation;
import brooklyn.test.Asserts;
import brooklyn.test.HttpTestUtils;
import brooklyn.test.entity.TestApplication;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
/**
* TODO re-write this like WebAppIntegrationTest, inheriting, rather than being jboss7 specific.
*/
public class JBoss7ServerNonInheritingIntegrationTest {
private URL warUrl;
private LocalhostMachineProvisioningLocation localhostProvisioningLocation;
private TestApplication app;
private File keystoreFile;
@BeforeMethod(alwaysRun=true)
public void setUp() throws Exception {
String warPath = "hello-world.war";
warUrl = getClass().getClassLoader().getResource(warPath);
localhostProvisioningLocation = new LocalhostMachineProvisioningLocation();
app = TestApplication.Factory.newManagedInstanceForTests();
keystoreFile = AbstractWebAppFixtureIntegrationTest.createTemporaryKeyStore("myname", "mypass");
}
@AfterMethod(alwaysRun=true)
public void tearDown() throws Exception {
if (app != null) Entities.destroyAll(app.getManagementContext());
if (keystoreFile != null) keystoreFile.delete();
}
@Test(groups = "Integration")
public void testHttp() throws Exception {
final JBoss7Server server = app.createAndManageChild(EntitySpec.create(JBoss7Server.class)
.configure("war", warUrl.toString()));
app.start(ImmutableList.of(localhostProvisioningLocation));
String httpUrl = "http://"+server.getAttribute(JBoss7Server.HOSTNAME)+":"+server.getAttribute(JBoss7Server.HTTP_PORT)+"/";
String httpsUrl = "https://"+server.getAttribute(JBoss7Server.HOSTNAME)+":"+server.getAttribute(JBoss7Server.HTTPS_PORT)+"/";
assertEquals(server.getAttribute(JBoss7Server.ROOT_URL).toLowerCase(), httpUrl.toLowerCase());
HttpTestUtils.assertHttpStatusCodeEventuallyEquals(httpUrl, 200);
HttpTestUtils.assertContentContainsText(httpUrl, "Hello");
HttpTestUtils.assertUrlUnreachable(httpsUrl);
Asserts.succeedsEventually(new Runnable() {
public void run() {
assertNotNull(server.getAttribute(JBoss7Server.REQUEST_COUNT));
assertNotNull(server.getAttribute(JBoss7Server.ERROR_COUNT));
assertNotNull(server.getAttribute(JBoss7Server.TOTAL_PROCESSING_TIME));
assertNotNull(server.getAttribute(JBoss7Server.MAX_PROCESSING_TIME));
assertNotNull(server.getAttribute(JBoss7Server.BYTES_RECEIVED));
assertNotNull(server.getAttribute(JBoss7Server.BYTES_SENT));
}});
}
@Test(groups = {"Integration"})
public void testHttps() throws Exception {
final JBoss7Server server = app.createAndManageChild(EntitySpec.create(JBoss7Server.class)
.configure("war", warUrl.toString())
.configure(JBoss7Server.ENABLED_PROTOCOLS, ImmutableSet.of("https"))
.configure(JBoss7Server.HTTPS_SSL_CONFIG, new HttpsSslConfig().keyAlias("myname").keystorePassword("<PASSWORD>").keystoreUrl(keystoreFile.getAbsolutePath())));
app.start(ImmutableList.of(localhostProvisioningLocation));
String httpUrl = "http://"+server.getAttribute(JBoss7Server.HOSTNAME)+":"+server.getAttribute(JBoss7Server.HTTP_PORT)+"/";
String httpsUrl = "https://"+server.getAttribute(JBoss7Server.HOSTNAME)+":"+server.getAttribute(JBoss7Server.HTTPS_PORT)+"/";
assertEquals(server.getAttribute(JBoss7Server.ROOT_URL).toLowerCase(), httpsUrl.toLowerCase());
HttpTestUtils.assertUrlUnreachable(httpUrl);
// FIXME HttpTestUtils isn't coping with https, giving
// javax.net.ssl.SSLHandshakeException: sun.security.validator.ValidatorException: PKIX path building failed: sun.security.provider.certpath.SunCertPathBuilderException: unable to find valid certification path to requested target
// Uncomment this as soon as HttpTestUtils is fixed
// Manual inspection with breakpoint and web-browser confirmed this was working
// HttpTestUtils.assertHttpStatusCodeEventuallyEquals(httpsUrl, 200);
// HttpTestUtils.assertContentContainsText(httpsUrl, "Hello");
// FIXME querying for http://localhost:9990/management/subsystem/web/connector/http/read-resource?include-runtime=true
// gives 500 when http is disabled, but if miss out "?include-runtime=true" then it works fine.
// So not getting these metrics!
// TestUtils.executeUntilSucceeds(new Runnable() {
// public void run() {
// assertNotNull(server.getAttribute(JBoss7Server.REQUEST_COUNT));
// assertNotNull(server.getAttribute(JBoss7Server.ERROR_COUNT));
// assertNotNull(server.getAttribute(JBoss7Server.TOTAL_PROCESSING_TIME));
// assertNotNull(server.getAttribute(JBoss7Server.MAX_PROCESSING_TIME));
// assertNotNull(server.getAttribute(JBoss7Server.BYTES_RECEIVED));
// assertNotNull(server.getAttribute(JBoss7Server.BYTES_SENT));
// }});
}
@Test(groups = {"Integration"})
public void testHttpAndHttps() throws Exception {
final JBoss7Server server = app.createAndManageChild(EntitySpec.create(JBoss7Server.class)
.configure("war", warUrl.toString())
.configure(JBoss7Server.ENABLED_PROTOCOLS, ImmutableSet.of("http", "https"))
.configure(JBoss7Server.HTTPS_SSL_CONFIG, new HttpsSslConfig().keyAlias("myname").keystorePassword("<PASSWORD>").keystoreUrl(keystoreFile.getAbsolutePath())));
app.start(ImmutableList.of(localhostProvisioningLocation));
String httpUrl = "http://"+server.getAttribute(JBoss7Server.HOSTNAME)+":"+server.getAttribute(JBoss7Server.HTTP_PORT)+"/";
String httpsUrl = "https://"+server.getAttribute(JBoss7Server.HOSTNAME)+":"+server.getAttribute(JBoss7Server.HTTPS_PORT)+"/";
assertEquals(server.getAttribute(JBoss7Server.ROOT_URL).toLowerCase(), httpsUrl.toLowerCase());
HttpTestUtils.assertHttpStatusCodeEventuallyEquals(httpUrl, 200);
HttpTestUtils.assertContentContainsText(httpUrl, "Hello");
// FIXME HttpTestUtils isn't coping with https, giving
// javax.net.ssl.SSLHandshakeException: sun.security.validator.ValidatorException: PKIX path building failed: sun.security.provider.certpath.SunCertPathBuilderException: unable to find valid certification path to requested target
// Uncomment this as soon as HttpTestUtils is fixed
// Manual inspection with breakpoint and web-browser confirmed this was working
//HttpTestUtils.assertHttpStatusCodeEventuallyEquals(httpsUrl, 200);
//HttpTestUtils.assertContentContainsText(httpsUrl, "Hello");
Asserts.succeedsEventually(new Runnable() {
public void run() {
assertNotNull(server.getAttribute(JBoss7Server.REQUEST_COUNT));
assertNotNull(server.getAttribute(JBoss7Server.ERROR_COUNT));
assertNotNull(server.getAttribute(JBoss7Server.TOTAL_PROCESSING_TIME));
assertNotNull(server.getAttribute(JBoss7Server.MAX_PROCESSING_TIME));
assertNotNull(server.getAttribute(JBoss7Server.BYTES_RECEIVED));
assertNotNull(server.getAttribute(JBoss7Server.BYTES_SENT));
}});
}
@Test(groups = {"Integration"})
public void testUsingPortOffsets() throws Exception {
final JBoss7Server serverA = app.createAndManageChild(EntitySpec.create(JBoss7Server.class)
.configure("portIncrement", 100));
final JBoss7Server serverB = app.createAndManageChild(EntitySpec.create(JBoss7Server.class)
.configure("portIncrement", 200));
app.start(ImmutableList.of(localhostProvisioningLocation));
Asserts.succeedsEventually(new Runnable() {
public void run() {
assertNotNull(serverA.getAttribute(JBoss7Server.BYTES_SENT));
assertNotNull(serverB.getAttribute(JBoss7Server.BYTES_SENT));
}});
}
}
|
#######################################################################################
# 请配置以下参数
COLMAP_FOLDER_PATH=/home/mm/ARWorkspace/colmap/
PROJECT_PATH=/data/largescene/C11_base/
SRC_PATH=$PROJECT_PATH/videos
DST_PATH=$PROJECT_PATH/images
SMALL_DST_PATH=$PROJECT_PATH/images_ds
INTERVAL=15
SHORT_SIZE=640
IMAGE_LIST_SUFFIX=''
MULTITHREAD=true
#######################################################################################
cd $COLMAP_FOLDER_PATH/scripts/sfm_toolkits/ezxr_sfm/colmap_process_loop_folders/
if $MULTITHREAD; then
if [ -z "$IMAGE_LIST_SUFFIX" ]; then
python videos_to_images_with_lists.py \
--srcpath $SRC_PATH \
--dstpath $DST_PATH \
--smalldstpath $SMALL_DST_PATH \
--interval $INTERVAL \
--shortsize $SHORT_SIZE \
--imageListSuffix '' \
--multithread
else
python videos_to_images_with_lists.py \
--srcpath $SRC_PATH \
--dstpath $DST_PATH \
--smalldstpath $SMALL_DST_PATH \
--interval $INTERVAL \
--shortsize $SHORT_SIZE \
--imageListSuffix $IMAGE_LIST_SUFFIX \
--multithread
fi
else
if [ -z "$IMAGE_LIST_SUFFIX" ]; then
python videos_to_images_with_lists.py \
--srcpath $SRC_PATH \
--dstpath $DST_PATH \
--smalldstpath $SMALL_DST_PATH \
--interval $INTERVAL \
--shortsize $SHORT_SIZE \
--imageListSuffix ''
else
python videos_to_images_with_lists.py \
--srcpath $SRC_PATH \
--dstpath $DST_PATH \
--smalldstpath $SMALL_DST_PATH \
--interval $INTERVAL \
--shortsize $SHORT_SIZE \
--imageListSuffix $IMAGE_LIST_SUFFIX
fi
fi
|
class HashTable {
private int buckets;
private List<HashNode> nodes;
public HashTable(int buckets) {
this.buckets = buckets;
nodes = new ArrayList<HashNode>();
for (int i = 0; i < buckets; i++) {
nodes.add(new HashNode());
}
}
public void add(String key, Object value) {
int hashCode = key.hashCode();
int bucket = Math.abs(hashCode % buckets);
nodes.get(bucket).add(key, value);
}
public Object get(String key) {
int hashCode = key.hashCode();
int bucket = Math.abs(hashCode % buckets);
HashNode node = nodes.get(bucket);
return node.get(key);
}
}
class HashNode {
private List<KeyValuePair> data;
public HashNode() {
data = new ArrayList<KeyValuePair>();
}
public void add(String key, Object value) {
data.add(new KeyValuePair(key, value));
}
public Object get(String key) {
for (KeyValuePair pair : data) {
if (pair.key.equals(key)) {
return pair.value;
}
}
return null;
}
}
class KeyValuePair {
public String key;
public Object value;
public KeyValuePair(String key, Object value) {
this.key = key;
this.value = value;
}
}
|
if node['cloudless-box']['firewall'] != false
include_recipe 'iptables::default'
%w{http ssh}.each do |rule|
iptables_rule "#{rule}" do
source "firewall/#{rule}.erb"
action :enable
end
end
end
|
cd prices
cargo build
cp target/debug/prices ../executables/prices
|
module Leafy
module Coder
Default = JSON
end
end
|
<gh_stars>0
//
// SAMControllerTool.h
// SamosWallet
//
// Created by zys on 2018/8/29.
// Copyright © 2018年 zys. All rights reserved.
//
/**
项目Controller相关的方法
*/
#import <Foundation/Foundation.h>
@interface SAMControllerTool : NSObject
/**
设置root vc:
1.第一次安装,显示蓝色管理钱包页(设置钱包密码弹窗)
2.创建或导入钱包后,第一次显示引导页
3.设置了密码,创建或导入钱包后,显示tabvc
*/
+ (void)chooseRootVC;
/**
通过scheme跳转
@param scheme 跳转地址
*/
+ (void)chooseVCWithScheme:(NSString *)scheme;
/**
获取当前vc
@return 当前显示的vc
*/
+ (UIViewController *)currentVC;
/**
显示vc
@param vc 要显示的vc
*/
+ (void)showVC:(UIViewController *)vc;
@end
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.