text stringlengths 1 1.05M |
|---|
<filename>src/components/Cards/Photo/PhotoCard.styled.js
import styled from "styled-components"
import { default as component } from "./PhotoCard"
const PhotoCard = styled(component)`
max-width: 45%;
height: auto;
transition: opacity 0.4s;
transition: max-width 0.4s;
@media (max-width: 600px) {
max-width: 100%;
.details {
margin-bottom: 100%;
}
}
@media (min-width: ${props => props.theme.desktop}) {
&:hover {
max-width: 50%;
}
.details {
position: relative;
top: -100px;
transition 0.4s;
}
&:hover {
.details {
top: 0px;
}
}
}
.image {
z-index: 1;
}
`
export default PhotoCard
|
"use strict";
const expect = require("chai").expect;
const camelCase = require("../camel-case");
const unCamelCase = require("../un-camel-case");
const data = {
borderTopLeftRadius: "border-top-left-radius",
backgroundImage: "background-image",
xwebkitAnimation: "-xwebkit-animation",
webkitAnimation: "-webkit-animation",
epubAnimation: "-epub-animation",
mozAnimation: "-moz-animation",
msAnimation: "-ms-animation",
OAnimation: "-o-animation",
XAnimation: "-x-animation",
webkitApp: "-webkit-app",
onChange: "on-change",
OnChange: "-on-change",
overflowWrap: "overflow-wrap",
overflowX: "overflow-x",
zIndex: "z-index",
"::selection": "::selection",
"::mozSelection": "::-moz-selection",
"::mozSelection,::selection": "::-moz-selection,::selection",
"--margin-top": "--margin-top",
"margin--top": "margin--top",
"height: webkitCalc(2vh-20px);": "height: -webkit-calc(2vh-20px);",
"calc(2vh-20px)": "calc(2vh-20px)",
"calc(2vh--20px)": "calc(2vh--20px)",
};
const testCases = Object.keys(data).map(prop => {
return {
camel: prop,
unCamel: data[prop],
};
});
const symbols = Array.from("@*:;\n,(){} ");
describe("camelCase", () => {
testCases.forEach(testCase => {
it(`${testCase.unCamel} => ${testCase.camel}`, () => {
expect(camelCase(testCase.unCamel)).to.equal(testCase.camel);
});
});
describe("symbols", () => {
symbols.forEach(symbol => {
it(JSON.stringify(symbol), () => {
expect(camelCase(testCases.map(testCase => testCase.unCamel).join(symbol))).to.equal(testCases.map(testCase => testCase.camel).join(symbol));
});
});
});
});
describe("unCamelCase", () => {
testCases.forEach(testCase => {
it(`${testCase.camel} => ${testCase.unCamel}`, () => {
expect(unCamelCase(testCase.camel)).to.equal(testCase.unCamel);
});
});
describe("symbols", () => {
symbols.forEach(symbol => {
it(JSON.stringify(symbol), () => {
expect(unCamelCase(testCases.map(testCase => testCase.camel).join(symbol))).to.equal(testCases.map(testCase => testCase.unCamel).join(symbol));
});
});
});
});
|
<reponame>cityofaustin/transportation-data-utils
"""
Python client for logging job results via a postgrest interface.
This class logs the outcome of scripted tasks in a pre-configured database whose API
is made available via postgrest (https://postgrest.com/).
The CONGIG and instance parameters must match the job database schema.
"""
import arrow
import requests
CONFIG = {
"destination_field": "destination",
"end_date_field": "end_date",
"id_field": "id",
"message_field": "message",
"name_field": "name",
"records_processed_field": "records_processed",
"source_field": "source",
"start_date_field": "start_date",
"status_field": "status",
}
class Job(object):
"""
Class to interact with job control API.
"""
def __init__(self, auth=None, destination=None, name=None, url=None, source=None):
self.auth = auth
self.destination = destination
self.name = name
self.url = url
self.source = source
self.destination_field = CONFIG["destination_field"]
self.end_date_field = CONFIG["end_date_field"]
self.id_field = CONFIG["id_field"]
self.name_field = CONFIG["name_field"]
self.message_field = CONFIG["message_field"]
self.records_processed_field = CONFIG["records_processed_field"]
self.source_field = CONFIG["source_field"]
self.start_date_field = CONFIG["start_date_field"]
self.status_field = CONFIG["status_field"]
self.data = None
def most_recent(self, status="success"):
"""Return end date of the most-recent job run."""
url = f"{self.url}?{self.name_field}=eq.{self.name}&{self.status_field}=eq.{status}&order={self.end_date_field}.desc&limit=1"
res = self._query("SELECT", url)
try:
return arrow.get(res[0][self.start_date_field]).timestamp
except (IndexError, KeyError) as e:
return None
def start(self):
"""Start a new job with given name."""
data = {
self.name_field: self.name,
self.start_date_field: arrow.now().format(),
self.end_date_field: None,
self.status_field: "in_progress",
self.source_field: self.source,
self.destination_field: self.destination,
}
self.data = self._query("INSERT", self.url, data=data)[0]
return self.data
def result(self, _result, message=None, records_processed=0):
"""Update job status to specified result. """
if _result not in ["success", "error"]:
raise Exception("Unknown result specified.")
data = {
self.id_field: self.data[self.id_field],
self.end_date_field: arrow.now().format(),
self.status_field: _result,
self.message_field: message,
self.records_processed_field: records_processed,
}
self.data = self._query("UPDATE", self.url, data=data)[0]
return self.data
def delete(self):
"""Delete all job entries of specified name."""
print(
f"""
WARNING: You are about to delete all jobs with name {self.name}.
"""
)
answer = input("Type 'Yes' to continue: ")
if answer.upper() == "YES":
url = f"{self.url}?{self.name_field}=eq.{self.name}"
return self._query("DELETE", url)
else:
raise Exception("Delete aborted.")
def _query(self, method, url, data=None):
"""
Private method to execute API calls.
Returns response dict, which (if successful) is an array representation
of the affected records (due to the header param return=representation).
"""
headers = {
"Authorization": f"Bearer {self.auth}",
"Content-Type": "application/json",
"Prefer": "return=representation", # return entire record json in response
}
if method.upper() == "SELECT":
res = requests.get(url, headers=headers)
elif method.upper() == "INSERT":
res = requests.post(url, headers=headers, json=data)
elif method.upper() == "UPDATE":
# require ID match to prevent unintentional batch update
_id = data.pop(self.id_field)
url = f"{url}?id=eq.{_id}"
res = requests.patch(url, headers=headers, json=data)
elif method.upper() == "DELETE":
# this will delete all rows that match query!
res = requests.delete(self.url, headers=headers)
else:
raise Exception("Unknown method requested.")
res.raise_for_status()
return res.json()
# Tests
if __name__ == "__main__":
import _setpath
from config.secrets import *
job = Job(
"test_job", JOB_DB_API_URL, "test_source", "test_dest", auth=JOB_DB_API_TOKEN
)
most_recent = job.most_recent()
print(f"most recent: {most_recent}")
print(job.start())
print(job.result("success"))
job = Job(
"test_job", JOB_DB_API_URL, "test_source", "test_dest", auth=JOB_DB_API_TOKEN
)
most_recent = job.most_recent()
print(most_recent)
print(f"most recent: {most_recent}")
print(job.start())
print(job.result("error", message="Something went wrong!"))
job = Job(
"test_job", JOB_DB_API_URL, "test_source", "test_dest", auth=JOB_DB_API_TOKEN
)
job.delete() |
#!/usr/bin/env node
"use strict";
const { minifyFile } = require("../lib/index.js");
Promise.all(process.argv.slice(2).map(minifyFile)).catch(function(e) {
console.error(e);
process.exit(1);
});
|
# This is a little functionality module that I'm going to call:
### The Columnizer!!! ###
# The whole point here is to take a list and display it in a nicely ordered
# column format. Based upon Kuros' code from the original Slither base.
def columnize(player, list, columns, sorted = True, padding = 2):
# Start out with a blank line for some nice whitespace padding
player.writePlain("\r\n")
# First, we need to make sure that our list isn't empty
try:
assert list[0] != 0
except:
return
# Some variables we're going to use in the course of our function
iter = 1
length = 0
numColumns = columns
# Now we check to see if we want the list displayed alphabetically
if sorted == True:
list.sort()
# Now we get down to actually displaying the list...
# We start by measuring the size of each object in the list, to figure out
# how much space we need to make it look right...
for item in list:
if len(item) > length:
# You can pass a different padding size if you want, we default
# to two because it makes a nicely padded list.
length = len(item) + padding
# Here, we're actually printing out the list...
for item in list:
if iter == numColumns:
# We throw in a carriage return when we've reached out column limit
player.writePlain( item + '\r\n')
# We reset iter to 1 because with zero indexing, we'd have to make
# things ugly subtracting 1 from the number of columns that were
# passed, easier to fudge 1-indexing.
iter = 1
else:
z = length - len(item)
player.writePlain(item)
for a in range(z):
player.writePlain(" ")
iter += 1
player.writeWithPrompt("\r\n") |
<filename>public/js/search.js
$(function(){
$('.form-holder').delegate("input", "focus", function(){
$('.form-holder').removeClass("active");
$(this).parent().addClass("active");
})
})
$(document).ready(function() {
const Toast = Swal.mixin({
toast: true,
position: 'top-end',
showConfirmButton: false,
timer: 3000
});
$("#register").click(function(e){
e.preventDefault();
//When users click in register button send a post request with data
//entered if data sent is ok, redirects to authentication and creates
//a new user in DB, else show error.
$.post("/clients/getClient", {cc: $("#cc").val()}).done(function(res) {
if(res){
Toast.fire({
type: 'success',
title: 'Se ha encontrado el usuario'
})
var divContainer = document.getElementById("products");
console.log(res)
var toShow = res[0].products
console.log(toShow)
$('#name').val(res[0].name)
$('#date').val(res[0].createdAt)
divContainer.innerHTML = "";
for(product in res[0].products){
divContainer.innerHTML += '<input type="text" class="form-control" style="margin-left: auto;margin-right: auto;" value="'+toShow[product]+'" disabled></input>'
}
}else{
Toast.fire({
type: 'error',
title: 'Ha ocurrido un error...'
})
}
})
error: (error) => {
console.log(JSON.stringify(error));
}
});
});
|
export class MicroClassVideo {
title: string;
description: string;
videoSrc: string;
constructor(private params? : any) {
if (params) {
this.title = params['title'];
this.description = params['description'];
this.videoSrc = params['videoSrc'];
}
}
} |
#!/bin/bash
set -euo pipefail
# ensure data subdirectory exists
mkdir -p /data/polylines/;
# enumerate a list of PBF files
shopt -s nullglob
PBF_FILES=(/data/openstreetmap/*.pbf);
# ensure there is at least one PBF file in the osm directory
if [[ ${#PBF_FILES[@]} -eq 0 ]]; then
2>&1 echo 'no *.pbf files found in /data/openstreetmap directory';
exit 1;
fi
# truncate polylines file
echo '' > /data/polylines/extract.0sv;
# iterate over all PBF files in the osm directory
for PBF_FILE in "${PBF_FILES[@]}"; do
# give a warning if the filesize is over 1GB
# the missinglink/pbf library is memory-bound and cannot safely handle very large extracts
find "${PBF_FILE}" -maxdepth 1 -size +1G | while read file; do
2>&1 echo '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!';
2>&1 echo "${PBF_FILE} is very large.";
2>&1 echo 'You will likely experience memory issues working with large extracts like this.';
2>&1 echo 'We strongly recommend using Valhalla to produce extracts for large PBF extracts.';
2>&1 echo 'see: https://github.com/pelias/polylines#download!data';
2>&1 echo '!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!';
done
# convert pbf file to 0sv (polylines) format, appending results to polyline file
echo "converting ${PBF_FILE} to /data/polylines/extract.0sv";
pbf streets "${PBF_FILE}" >> /data/polylines/extract.0sv;
done
# debugging info
echo 'wrote polylines extract';
ls -lah /data/polylines/extract.0sv;
|
#!/bin/bash
#
# Xenera as mensaxes de benvida aos cursos en liña
#
# Uso: script.sh alumnos modelo datos arg1 ... argn
# Os argumentos son a ruta relativa aos ficheiros adxuntos da mensaxe.
#
# alumnos - username,password,firstname,lastname,email,course1,group1,enrolperiod1
# modelo - Texto para o corpo da mensaxe. Hai campos que substituir cos datos do alumno
# datos - nomeprofesor,nomecurso,urlcurso
#
# IMPORTANTE: Debemos ter o Thunderbird pechado para que as mensaxes vaian aparecendo consecutivamente.
# En caso contrario, creanse todas no mesmo momento puidendo causar problemas para manexalos.
#
# Creanse tres ficheiros temporais:
# replace.tmp- cada liña contén unha substitución para o modelo base
# datos.tmp- ficheiro datos.txt coa URL modificada para poder utilizarse co comando sed
# body.tmp- ficheiro co texto final que vai no corpo da mensaxe
#
if [ $# -lt 3 ] ; then
echo "Uso: script.sh participantes.csv modelo.txt datos.txt [arg1 ...]"
exit 1
fi
# Pechamos calquera instancia de Thunberbird para evitar a creación das novas mensaxes de xeito simultáneo.
killall thunderbird
alumnos=$1
modelo=$2
datos=$3
shift 3
# Creamos a cadea cos nomes dos ficheiros adxuntos
for arg in "$@"
do
adxuntos=$adxuntos$(pwd)/$arg,
done
# Eliminamos a última coma da cadea
adxuntos="${adxuntos%?}"
# Percorremos a lista de alumnos do ficheiro pasado como argumento evitando a primeira liña
sed 1d $alumnos | while read linea
do
# Debemos protexer os slash da URL do curso para poder executar o comando sed
# evitamos a primeira liña co nome dos campos
sed 's#\/#\\\/#g' $datos | sed 1d - > datos.tmp
# Xeneramos un ficheiro replace.txt con todas aquelas substitucions a facer no modelo base
echo "s/Nomeusuario/$(echo $linea | cut -d',' -f1)/g" > replace.tmp
echo "s/Contrasinalusuario/$(echo $linea | cut -d',' -f2)/g" >> replace.tmp
echo "s/Nomealumno/$(echo $linea | cut -d',' -f3)/g" >> replace.tmp
echo "s/Nomeprofesor/$(cat datos.tmp | cut -d',' -f1)/g" >> replace.tmp
echo "s/Nomecurso/$(cat datos.tmp | cut -d',' -f2)/g" >> replace.tmp
echo "s/URLcurso/$(cat datos.tmp | cut -d',' -f3)/g" >> replace.tmp
# Creamos o texto personalizado para o corpo da mensaxe de benvida
sed -f replace.tmp< $modelo > body.tmp
# Preparamos os datos para compoñer a mensaxe en Thunderbird
emailalumno=$(echo $linea | cut -d',' -f5)
nomecurso=$(cat datos.tmp | cut -d',' -f2)
# Creamos a nova mensaxe en Thunderbird
thunderbird -compose "to='$emailalumno',subject='Benvida ao curso $nomecurso',body='$(cat $(pwd)/body.tmp)',attachment='$adxuntos'"
done
rm datos.tmp replace.tmp body.tmp
|
groupadd family
groupadd guest
groupadd star
# We don't need home folders (-M)
useradd -MG family alice
useradd -MG family cooper
useradd -MG guest baxter
useradd -MG star django
# Home folder is already created (-M)
useradd -M supinfo
# Because of earlier ssh setup
chown -R supinfo:supinfo /home/supinfo
|
<gh_stars>0
'use strict';
const TestBase = require( '../class.base' );
class TestLogA extends TestBase {
constructor( deps ) {
super( deps, 'TestLogA' );
this._logger = deps.get( 'logger' );
this._logger( 'info', 'TestLogA constructor', { meta: 'data' } );
this._logb = deps.get( './test/lib/log/class.b' );
}
init( done ) {
this._logger( 'debug', 'TestLogA init', { meta: 'data' } );
this._logger( 'warn', 'TestLogA init warn' );
this._logger( 'warning', 'TestLogA init warning' );
super.init( done );
}
dinit( done ) {
this._logger( 'debug', 'TestLogA dinit', { meta: 'data' } );
super.dinit( done );
}
}
module.exports = TestLogA;
|
# Copyright (c) 2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#network interface on which to limit traffic
IF="eth0"
#limit of the network interface in question
LINKCEIL="1gbit"
#limit outbound Schleems protocol traffic to this rate
LIMIT="160kbit"
#defines the IPv4 address space for which you wish to disable rate limiting
LOCALNET_V4="192.168.0.0/16"
#defines the IPv6 address space for which you wish to disable rate limiting
LOCALNET_V6="fe80::/10"
#delete existing rules
tc qdisc del dev ${IF} root
#add root class
tc qdisc add dev ${IF} root handle 1: htb default 10
#add parent class
tc class add dev ${IF} parent 1: classid 1:1 htb rate ${LINKCEIL} ceil ${LINKCEIL}
#add our two classes. one unlimited, another limited
tc class add dev ${IF} parent 1:1 classid 1:10 htb rate ${LINKCEIL} ceil ${LINKCEIL} prio 0
tc class add dev ${IF} parent 1:1 classid 1:11 htb rate ${LIMIT} ceil ${LIMIT} prio 1
#add handles to our classes so packets marked with <x> go into the class with "... handle <x> fw ..."
tc filter add dev ${IF} parent 1: protocol ip prio 1 handle 1 fw classid 1:10
tc filter add dev ${IF} parent 1: protocol ip prio 2 handle 2 fw classid 1:11
if [ ! -z "${LOCALNET_V6}" ] ; then
# v6 cannot have the same priority value as v4
tc filter add dev ${IF} parent 1: protocol ipv6 prio 3 handle 1 fw classid 1:10
tc filter add dev ${IF} parent 1: protocol ipv6 prio 4 handle 2 fw classid 1:11
fi
#delete any existing rules
#disable for now
#ret=0
#while [ $ret -eq 0 ]; do
# iptables -t mangle -D OUTPUT 1
# ret=$?
#done
#limit outgoing traffic to and from port 4579. but not when dealing with a host on the local network
# (defined by $LOCALNET_V4 and $LOCALNET_V6)
# --set-mark marks packages matching these criteria with the number "2" (v4)
# --set-mark marks packages matching these criteria with the number "4" (v6)
# these packets are filtered by the tc filter with "handle 2"
# this filter sends the packages into the 1:11 class, and this class is limited to ${LIMIT}
iptables -t mangle -A OUTPUT -p tcp -m tcp --dport 4579 ! -d ${LOCALNET_V4} -j MARK --set-mark 0x2
iptables -t mangle -A OUTPUT -p tcp -m tcp --sport 4579 ! -d ${LOCALNET_V4} -j MARK --set-mark 0x2
if [ ! -z "${LOCALNET_V6}" ] ; then
ip6tables -t mangle -A OUTPUT -p tcp -m tcp --dport 4579 ! -d ${LOCALNET_V6} -j MARK --set-mark 0x4
ip6tables -t mangle -A OUTPUT -p tcp -m tcp --sport 4579 ! -d ${LOCALNET_V6} -j MARK --set-mark 0x4
fi
|
<reponame>tiancihe/X6
import { Point } from '../../struct/point'
import { Viewbox } from '../../vector/container/viewbox'
import { Morpher } from '../../animating/morpher/morpher'
import { MorphableBox } from '../../animating/morpher/box'
import { SVGAnimator } from '../svg'
export class SVGViewboxAnimator<
TSVGElement extends
| SVGSVGElement
| SVGSymbolElement
| SVGPatternElement
| SVGMarkerElement,
TOwner extends Viewbox<TSVGElement> = Viewbox<TSVGElement>,
> extends SVGAnimator<TSVGElement, TOwner> {
zoom(level: number, point: Point.PointLike) {
if (this.retarget('zoom', level, point)) {
return this
}
const origin = { x: point.x, y: point.y }
const morpher = new Morpher<[number], number, number>(this.stepper).to(
level,
)
this.queue<number, Point.PointLike>(
(animator) => {
morpher.from(animator.element().zoom())
},
(animator, pos) => {
animator.element().zoom(morpher.at(pos), origin)
return morpher.done()
},
(animator, newLevel, newPoint) => {
origin.x = newPoint.x
origin.y = newPoint.y
morpher.to(newLevel)
},
)
this.remember('zoom', morpher)
return this
}
viewbox(x: number, y: number, width: number, height: number) {
return this.queueObject('viewbox', new MorphableBox(x, y, width, height))
}
}
|
class Car:
def __init__(self):
self.speed = 0
def start(self):
self.speed = 1
print("Car started!")
def stop(self):
self.speed = 0
print("Car stopped!")
def accelerate(self, speed):
self.speed += speed
print("Car accelerated to {} km/h".format(self.speed)) |
package relay
import (
"bufio"
"errors"
"net"
"net/http"
)
// modes.go contains specific modification structs or decorators over standard
//go interfaces for useful bits
// ResponseWriter provides a clean interface decorated over the http.ResponseWriter
type ResponseWriter interface {
http.ResponseWriter
http.Flusher
http.Hijacker
// http.CloseNotifier
Status() int
Size() int
Written() bool
WritePayload(int, []byte) error
}
// responseWriter provides the concrete implementation of ResponseWriter
type responseWriter struct {
w http.ResponseWriter
status int
size int
}
// NewResponseWriter returns a new responseWriter
func NewResponseWriter(w http.ResponseWriter) ResponseWriter {
rw := responseWriter{w: w}
return &rw
}
// ErrNotHijackable is returned when a response writer can not be hijacked
var ErrNotHijackable = errors.New("ResponseWriter cant be Hijacked")
// Hijack checks if it can hijack the internal http.ResponseWriter
func (rw *responseWriter) Hijack() (net.Conn, *bufio.ReadWriter, error) {
if hw, ok := rw.w.(http.Hijacker); ok {
return hw.Hijack()
}
return nil, nil, ErrNotHijackable
}
// WritePayload writes a payload ignoring the type
func (rw *responseWriter) WritePayload(c int, p []byte) error {
_, err := rw.w.Write(p)
return err
}
// WriteHeader writes the status code for the http response
func (rw *responseWriter) WriteHeader(c int) {
if rw.Written() {
return
}
rw.status = c
rw.w.WriteHeader(c)
}
// Header returns the response header
func (rw *responseWriter) Header() http.Header {
return rw.w.Header()
}
// Write writes the supplied data into the internal resposne writer
func (rw *responseWriter) Write(b []byte) (int, error) {
if !rw.Written() {
rw.status = http.StatusOK
}
n, err := rw.w.Write(b)
rw.size += n
return n, err
}
// Status returns the status of the resposne,defaults to 200 OK
func (rw *responseWriter) Status() int {
return rw.status
}
// Flush calls the http.Flusher flush method
func (rw *responseWriter) Flush() {
if fw, ok := rw.w.(http.Flusher); ok {
fw.Flush()
}
}
// CloseNotify returns a receive-only channel
func (rw *responseWriter) CloseNotify() <-chan bool {
return rw.w.(http.CloseNotifier).CloseNotify()
}
// Written returns true/false if the status code has been written
func (rw *responseWriter) Written() bool {
return rw.status != 0
}
// Size returns the size of written data
func (rw *responseWriter) Size() int {
return rw.size
}
|
package com.abubusoft.filmfinder.view.adapter;
public class FilmAdapter {
}
|
"""Const variables."""
DEFAULT_CONF_FILE_PATH = (
"~/.troupial.yaml",
"~/.troupial.yml",
"~/.config/troupial/conf.yaml",
"~/.config/troupial/conf.yml",
)
DEFAULT_CONFIG_DATA = {
"theme": "default",
"key_bindings": {
"c-c": "exit",
"c-d": "exit",
"c-right": "layout.focus_next",
"c-left": "layout.focus_previous",
},
"window": {
"vertical-border": "|",
"horizontal-border": "-",
"tab-position": "top",
},
}
|
package com.qa.demo.persistence.repos;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
import com.qa.demo.persistence.domain.Shelter;
@Repository
public interface ShelterRepo extends JpaRepository<Shelter, Long> {
// CRUD Functionality
}
|
<filename>app.test.js
const request = require('supertest');
const app = require('./app').app;
const build = require('./app').productBuilder;
// Integration tests
// async means we will wait for the response to come back.
describe('GET requests', () => {
test('GET product/read endpoint, expect 200', async () => {
const res = await request(app).get('/product/read')
expect(res.statusCode).toBe(200);
});
test('GET bad endpoint, expect 404', async () => {
const res = await request(app).get('/badEndPoint')
expect(res.statusCode).toBe(404);
});
});
//Integration test - It only checks connection between two points not worried about checking its in the db.
describe('CREATE request', () => {
test('CREATE product test', async () => {
const res = await request(app)
.post('/product/create')
.send({
name : "<NAME>",
description : "test desc",
price : 0
});
expect(res.statusCode).toBe(201);
});
});
//Unit Test
describe('Unit Tests', () => {
test('product object builder', () => {
expect(build('a name', 'a description', 42))
.toMatchObject({'name' : 'a name', 'description' : 'a description', price : 42});
});
}); |
#!/bin/bash
function attachedDevices() {
local devices=$($ANDROID_HOME/platform-tools/adb devices | awk 'NR>1 {print $1}')
echo $devices
}
function connectedPorts() {
# cuts out the port id from the device list
local ports="$( cut -d '-' -f 2- <<< "$devices" )"
echo $ports
}
function ncToken() {
local authToken=`cat ~/.emulator_console_auth_token`
echo $authToken
}
function killDevice() {
local authCmd="auth $1"
local killCmd="kill"
nc localhost $2 <<END
$authCmd
$killCmd
END
}
function main() {
local devices=$(attachedDevices)
if [ ! "$devices" ];then
echo "No devices attached, exiting..."
exit
fi
local ports=$(connectedPorts)
echo $ports
local authToken=$(ncToken)
for port in ports; do
if [ ! "$authToken" ];then
# just to regenerate the auth token
echo "quit" | nc localhost $port
authToken=$(token)
fi
killDevice $authToken $port
done
}
main
|
# Copyright 2018 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash
EXPERIMENT_ID='resnet-1'
BATCH_SIZES=(4 8 16 32 64 128)
TRAIN_STEPS=(10000 20000 30000 40000)
rm -rf $EXPERIMENT_ID
mkdir $EXPERIMENT_ID
for BATCH_SIZE in "${BATCH_SIZES[@]}"
do
for TRAIN_STEP in "${TRAIN_STEPS[@]}"
do
JOB_ID=${EXPERIMENT_ID}-$BATCH_SIZE-$TRAIN_STEP
cat >$EXPERIMENT_ID/$EXPERIMENT_ID-$BATCH_SIZE-$TRAIN_STEP.yaml <<EOF
apiVersion: batch/v1
kind: Job
metadata:
name: ${JOB_ID}
labels:
experiment-id: '${EXPERIMENT_ID}'
batch-size: '${BATCH_SIZE}'
train-steps: '${TRAIN_STEP}'
spec:
template:
metadata:
labels:
experiment-id: '${EXPERIMENT_ID}'
batch-size: '${BATCH_SIZE}'
train-steps: '${TRAIN_STEP}'
EOF
cat >>$EXPERIMENT_ID/$EXPERIMENT_ID-$BATCH_SIZE-$TRAIN_STEP.yaml <<'EOF'
spec:
restartPolicy: Never
containers:
- name: resnet-gpu
image: gcr.io/vishnuk-cloud/tf-models-gpu:1.0
command:
- python
- /tensorflow_models/models/official/resnet/resnet_main.py
- --use_tpu=False
- --tpu=
- --precision=float32
- --data_dir=gs://gke-k8s-gcp-next-demo/imagenet
- --model_dir=gs://gke-k8s-gcp-next-demo/models/$(EXPERIMENT_ID)/$(BATCH_SIZE)/
- --train_batch_size=$(BATCH_SIZE)
- --train_steps=$(TRAIN_STEPS)
env:
- name: EXPERIMENT_ID
valueFrom:
fieldRef:
fieldPath: metadata.labels['experiment-id']
- name: BATCH_SIZE
valueFrom:
fieldRef:
fieldPath: metadata.labels['batch-size']
- name: TRAIN_STEPS
valueFrom:
fieldRef:
fieldPath: metadata.labels['train-steps']
resources:
limits:
nvidia.com/gpu: 1
EOF
done
done |
#!/usr/bin/env bash
case $1 in
reset )
xrandr --output eDP1
;;
hdmi1 )
xrandr \
--output eDP-1 \
--primary \
--mode 3840x2160 \
--pos 0x0 \
--rotate normal \
--panning 3840x2160+0+0 \
--output HDMI-1 \
--mode 1920x1080 \
--pos 3840x0 \
--rotate normal \
--scale 2x2 \
--right-of eDP-1 \
--panning 3840x2160+3840+0
;;
hdmi2 )
xrandr \
--output eDP-1 \
--primary \
--mode 3840x2160 \
--pos 0x0 \
--rotate normal \
--panning 3840x2160+0+0 \
--output HDMI-2 \
--mode 1920x1080 \
--pos 3840x0 \
--rotate normal \
--scale 2x2 \
--right-of eDP-1 \
--panning 3840x2160+3840+0
;;
hdmi-2-right )
xrandr \
--output eDP-1 \
--primary \
--mode 3840x2160 \
--pos 0x0 \
--rotate normal \
--panning 3840x2160+0+0 \
--output HDMI-2 \
--mode 1920x1080 \
--pos 3840x0 \
--rotate normal \
--scale 2x2 \
--right-of eDP-1 \
--panning 3840x2160+3840+0
;;
hdmi-2-left )
xrandr \
--output eDP-1 \
--primary \
--mode 3840x2160 \
--pos 0x0 \
--rotate normal \
--panning 3840x2160+3840+0 \
--output HDMI-2 \
--mode 1920x1080 \
--pos 3840x0 \
--rotate normal \
--scale 2x2 \
--left-of eDP-1 \
--panning 3840x2160+0+0
;;
dp3 )
xrandr \
--output eDP1 \
--primary \
--mode 3840x2160 \
--pos 0x0 \
--rotate normal \
--panning 3840x2160+0+0 \
--output DP3 \
--mode 1920x1080 \
--pos 3840x0 \
--rotate normal \
--scale 2x2 \
--right-of eDP1 \
--panning 3840x2160+3840+0
;;
dp-4-dp-1 )
xrandr \
--output DP-4 \
--primary \
--mode 3840x2160 \
--pos 0x0 \
--rotate normal \
--panning 3840x2160+0+0 \
--output DP-1 \
--mode 1920x1080 \
--pos 3840x0 \
--rotate normal \
--scale 2x2 \
--right-of DP-4 \
--panning 3840x2160+3840+0
;;
hdmi-1-2 )
# xrandr \
# --output eDP-1-1 \
# --primary \
# --mode 3840x2160 \
# --pos 0x0 \
# --rotate normal \
# --panning 3840x2160+0+0 \
# --output HDMI-1-2 \
# --mode 1920x1080 \
# --pos 3840x0 \
# --rotate normal \
# --scale 2x2 \
# --right-of eDP-1-1 \
# --panning 3840x2160+3840+0
# xrandr --fb 7680x2160 \
xrandr \
--output eDP-1-1 \
--primary \
--mode 3840x2160 \
--pos 0x0 \
--rotate normal \
--panning 3840x2160+0+0 \
--output HDMI-1-2 \
--mode 1920x1080 \
--pos 3840x0 \
--rotate normal \
--scale 2x2 \
--right-of eDP-1-1 \
--panning 3840x2160+3840+0
;;
nvdp5dp2 )
nvidia-settings --assign \
CurrentMetaMode="DPY-5: nvidia-auto-select @3840x2160 +0+0 {ViewPortIn=3840x2160, ViewPortOut=3840x2160+0+0}, DPY-2: nvidia-auto-select @3840x2160 +3840+0 {ViewPortIn=3840x2160, ViewPortOut=1920x1080+0+0, ForceFullCompositionPipeline=On}"
;;
dock-reset )
xrandr \
--output eDP1 --auto --panning 3840x2160+0+0 \
--output DP2-2 --off \
--output DP2-3 --off \
--output DP1 --off \
--output DP2 --off \
--output DP2-1 --off \
--output DP3 --off \
--output HDMI1 --off \
--output HDMI2 --off \
--output HDMI3 --off \
--output VIRTUAL1 --off
;;
dock )
xrandr \
--output eDP1 --mode 3840x2160 \
--pos 0x2160 --rotate normal --primary \
--output DP2-2 --mode 1920x1080 \
--pos 0x0 --rotate normal --above eDP1 \
--output DP2-3 --mode 1920x1080 \
--pos 1920x0 --rotate normal --above eDP1
;;
dock2 )
# xrandr --fb 11520x2160 \
xrandr \
--output DP2-2 --mode 1920x1080 --crtc 1 \
--pos 0x0 --rotate normal \
--scale 2x2 --left-of eDP1 --panning 3840x2160+0+0 \
--output eDP1 --mode 3840x2160 --crtc 0 \
--pos 3840x0 --rotate normal \
--primary --panning 3840x2160+3840+0 \
--output DP2-3 --mode 1920x1080 --crtc 2 \
--pos 7680x0 --rotate normal \
--scale 2x2 --right-of eDP1 --panning 3840x2160+7680+0 \
--output DP1 --off \
--output DP2 --off \
--output DP2-1 --off \
--output DP3 --off \
--output HDMI1 --off \
--output HDMI2 --off \
--output HDMI3 --off \
--output VIRTUAL1 --off
;;
dock3 )
# xrandr --fb 11520x2160 \
xrandr \
--output DP2-2 --mode 1920x1080 --crtc 1 \
--pos 0x0 --rotate normal \
--scale 2x2 --left-of eDP1 --panning 3840x2160+0+0 \
--output eDP1 --mode 3840x2160 --crtc 0 \
--pos 3840x0 --rotate normal \
--primary --panning 3840x2160+3840+0 \
--output DP2-3 --off \
--output DP1 --off \
--output DP2 --off \
--output DP2-1 --off \
--output DP3 --off \
--output HDMI1 --off \
--output HDMI2 --off \
--output HDMI3 --off \
--output VIRTUAL1 --off
;;
* )
echo "Unknown display configuration"
;;
esac
|
<gh_stars>0
angular.module('app.about', [])
.controller('AboutController', ['$scope', function ($scope) {
'use strict';
$scope.something = 'We do a bit of this and and a bit of that.';
}]);
|
package com.rox.logic.gate.binary;
import com.rox.logic.gate.type.AuditableLogicGate;
/**
* @Author rossdrew
*/
public class And extends AuditableLogicGate {
@Override
protected boolean performTransformation(boolean... values) {
for (boolean v : values){
if (!v){
return false;
}
}
return true;
}
@Override
protected void performSetInputPostOperations() {
}
public String getStringIdentifier() {
return "AND";
}
}
|
<reponame>yanyunchangfeng/NiceFish-React
import * as React from 'react';
import {NavLink} from 'react-router-dom'
function Exception404(){
return (
<div className="container mt-16px">
<div className="row no-gutters align-items-center">
<div className="exception-404 col-7"></div>
<div className="antd-pro-components-exception-index-content col-4 font-size-24 text-secondary">
<h1 className="font-size-48">404</h1>
<div>抱歉,你访问的页面不存在</div>
<div className="mt-16px">
<NavLink to="/" activeClassName="">
<button type="button" className="btn btn-primary">
<span>返回首页</span>
</button>
</NavLink>
</div>
</div>
</div>
</div>
)
}
export default Exception404 |
# File: S (Python 2.4)
from pandac.PandaModules import TextNode
from direct.gui.DirectGui import *
from direct.directnotify import DirectNotifyGlobal
from otp.otpgui import OTPDialog
from pirates.piratesbase import PLocalizer, PiratesGlobals
from pirates.piratesgui import PiratesGuiGlobals
from pirates.piratesgui import GuiButton
from pirates.piratesgui import CheckButton
class SkipTutorialFrame(DirectFrame):
notify = DirectNotifyGlobal.directNotify.newCategory('SkipTutorialFrame')
def __init__(self, callback):
topGui = loader.loadModel('models/gui/toplevel_gui')
lookoutGui = loader.loadModel('models/gui/lookout_gui')
DirectFrame.__init__(self, relief = None, image = topGui.find('**/pir_t_gui_gen_parchment'), image_scale = (0.33000000000000002, 0, 0.44))
self.initialiseoptions(SkipTutorialFrame)
self.title = DirectLabel(parent = self, relief = None, text = PLocalizer.SkipTutorialTitle, text_scale = PiratesGuiGlobals.TextScaleTitleSmall, text_fg = (0.59999999999999998, 0.0, 0.0, 1.0), text_font = PiratesGlobals.getPirateOutlineFont(), text_align = TextNode.ACenter, pos = (0.02, 0, 0.13))
self.message = DirectLabel(parent = self, relief = None, text = PLocalizer.SkipTutorialOffer, text_scale = PiratesGuiGlobals.TextScaleLarge, text_fg = PiratesGuiGlobals.TextFG0, text_align = TextNode.ACenter, text_shadow = PiratesGuiGlobals.TextShadow, text_wordwrap = 14, pos = (0.02, 0, 0.050000000000000003))
self.callback = callback
self.checkButton = GuiButton.GuiButton(parent = self, relief = None, text = PLocalizer.SkipTutorialNo, text_scale = PiratesGuiGlobals.TextScaleLarge, text_pos = (0.11, -0.01), text0_fg = PiratesGuiGlobals.TextFG0, text_shadow = PiratesGuiGlobals.TextShadow, image = (lookoutGui.find('**/lookout_submit'), lookoutGui.find('**/lookout_submit_down'), lookoutGui.find('**/lookout_submit_over'), lookoutGui.find('**/lookout_submit')), image_scale = 0.17000000000000001, image_color = (0.5, 0.0, 0.0, 1.0), pos = (-0.17000000000000001, 0, -0.10000000000000001), command = self._SkipTutorialFrame__handleNo)
self.cancelButton = GuiButton.GuiButton(parent = self, relief = None, text = PLocalizer.SkipTutorialYes, text_scale = PiratesGuiGlobals.TextScaleLarge, text_pos = (0.089999999999999997, -0.01), text0_fg = PiratesGuiGlobals.TextFG0, text_shadow = PiratesGuiGlobals.TextShadow, image = (lookoutGui.find('**/lookout_close_window'), lookoutGui.find('**/lookout_close_window_down'), lookoutGui.find('**/lookout_close_window_over'), lookoutGui.find('**/lookout_close_window')), image_scale = 0.17000000000000001, image_color = (0.5, 0.0, 0.0, 1.0), pos = (0.070000000000000007, 0, -0.10000000000000001), command = self._SkipTutorialFrame__handleYes)
topGui.removeNode()
lookoutGui.removeNode()
def destroy(self):
DirectFrame.destroy(self)
def _SkipTutorialFrame__handleYes(self):
self.callback(True)
self.destroy()
def _SkipTutorialFrame__handleNo(self):
self.callback(False)
self.destroy()
|
import json
import requests
#Get the URL for the website
url = 'https://example.com/blogpage'
r = requests.get(url)
#Check for errors
if r.status_code != 200:
print('Error:', r.status_code)
#Parse the page content
page_content = r.text
#Parse the page_content into a Key-Value format
dict = {}
for line in page_content.split('\n'):
words = line.split()
if len(words) > 1:
key = words[0]
value = words[1]
dict[key] = value
#Create a new JSON
data = json.dumps(dict);
#Print the JSON
print(data) |
<filename>dbSchema/src/main/java/sword/langbook3/android/db/IdPutInterface.java
package sword.langbook3.android.db;
import sword.database.DbSettableQueryBuilder;
public interface IdPutInterface {
void put(int columnIndex, DbSettableQueryBuilder builder);
}
|
#!/bin/sh
echo "Enter city id: "
read city
echo "Enter your api key: "
read key
echo "Getting city information. Please hold....."
curl --get "https://politicsandwar.com/api/city/id=$city&key=$key" |
#!/bin/bash
xctool -scheme run_it build test
|
<reponame>favid-inc/app-customer<filename>src/assets/images/index.ts
import { ImageSource } from './type';
export { ImageSource, RemoteImage } from './type';
export const imageProfile7Bg: ImageSource = {
imageSource: require('./source/image-background-profile-7.jpg'),
};
export const splashImage: ImageSource = {
imageSource: require('./source/splash.png'),
};
export const favidImage: ImageSource = {
imageSource: require('./source/favid-logo.png'),
};
|
mvn clean install -Dmaven.test.skip=true -Dmaven.javadoc.skip=true -Pdaily
|
import logging
class OnlineStoreLogger:
def __init__(self):
self.logger = logging.getLogger('OnlineStoreLogger')
self.logger.setLevel(logging.DEBUG)
self.log_format = logging.Formatter('%(asctime)s %(levelname)s: %(message)s')
def configure_logger(self, log_level, log_format):
levels = {
'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL
}
self.logger.setLevel(levels.get(log_level.lower(), logging.DEBUG))
self.log_format = logging.Formatter(log_format)
def log_event(self, event_type, message, timestamp):
file_handler = logging.FileHandler('online_store.log')
file_handler.setFormatter(self.log_format)
self.logger.addHandler(file_handler)
if event_type.lower() == 'debug':
self.logger.debug(message)
elif event_type.lower() == 'info':
self.logger.info(message)
elif event_type.lower() == 'warning':
self.logger.warning(message)
elif event_type.lower() == 'error':
self.logger.error(message)
elif event_type.lower() == 'critical':
self.logger.critical(message)
self.logger.removeHandler(file_handler)
# Example usage
logger = OnlineStoreLogger()
logger.configure_logger('info', '%(asctime)s %(levelname)s: %(message)s')
logger.log_event('info', 'User logged in', '2022-01-15 10:30:00') |
<filename>src/components/hero-tagline.js
import React from "react"
const HeroTagLine = ({ content }) => (
<div className="c-hero__tagline">{content}</div>
)
export default HeroTagLine |
def optimize_array(arr):
new_arr = []
for x in arr:
if x < 50:
new_arr.append(50)
elif x > 50:
new_arr.append(100)
return new_arr
arr = [45, 60, 32, 84]
output = optimize_array(arr)
print(output) #[50, 100, 50, 100] |
<reponame>skibq/vue-tables-2
'use strict';
module.exports = function (h) {
var _this = this;
return function (perpageValues, cls, id) {
cls = cls + ' VueTables__select-per-page';
return perpageValues.length > 1 ? h(
'el-select',
{ 'class': cls,
attrs: { value: _this.limit,
size: 'mini'
},
on: {
'change': _this.setLimit.bind(_this)
}
},
[perpageValues]
) : '';
};
}; |
package com.atjl.kafka.core;
import com.atjl.kafka.core.thread.BaseThread;
import com.atjl.util.common.CheckUtil;
import com.atjl.kafka.core.thread.FetchDataThread;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
/**
* 存储 线程 上下文
* @author jasonliu
* @since 1.0
*/
public class KafkaThreadContext {
private static Logger LOG = LoggerFactory.getLogger(KafkaThreadContext.class);
private static final Map<String,List<BaseThread>> threadContex= new HashMap<>();
public static final String FETCH_THREAD = "FETCH";
public static final String PROCESS_THREAD = "PROCESS";
public static final String TIMING_THREAD = "TIMING";
/**
* 仅用于初始化
* @param runnable
*/
public static void addThread(String type,BaseThread runnable){
List<BaseThread> list = threadContex.get(type);
if(list==null){
list = new LinkedList<>();
threadContex.put(type,list);
}
list.add(runnable);
if(LOG.isDebugEnabled()){
LOG.debug("thread {} added ",runnable);
}
}
public static List<BaseThread> getThreadList(String type){
return threadContex.get(type);
}
public static List<FetchDataThread> getFetchDataThreadList(){
List<BaseThread> baseList = threadContex.get(FETCH_THREAD);
CheckUtil.checkExistNull( baseList);
List<FetchDataThread> res = new LinkedList<>();
for(BaseThread bt:baseList){
res.add((FetchDataThread)bt);
}
return res;
}
}
|
import requests
import json
text = "Gravity is a force of attraction between two masses, which depends on the distance between them."
url = 'https://api.ai.stocktwits.com/recommend/similar'
payload = {
"omsIdType": 'url',
"text": text
}
req = requests.post(url, json=payload)
recommendations = json.loads(req.text)
for rec in recommendations[:3]:
print(rec['title'], rec['url']) |
const proc = require("../../process_option");
/* eslint-disable-next-line */
const { isRegExp } = require("util");
const { isTagNode } = require("../../knife/tag_utils");
const lintClassStyle = require("../class-style").lint;
module.exports = {
name: "id-style",
on: ["dom"],
need: "dom",
validateConfig(format) {
if (typeof format === "string" || isRegExp(format) === true) {
return format;
}
throw new Error(`Configuration for rule "${this.name}" is invalid: Expected string or RegExp got ${typeof format}`);
},
options: [
// REMOVE: For the v1
// Need to duplicate validateConfig to make it works with the old and the new Config system ><
{
need: "dom",
validateConfig(format) {
if (typeof format === "string" || isRegExp(format) === true) {
return format;
}
throw new Error(`Configuration for rule "${this.name}" is invalid: Expected string or RegExp got ${typeof format}`);
}
},
{
name: "id-class-style",
validateConfig(option) {
if (typeof option !== "string" && isRegExp(option) === false) {
throw new Error(`Configuration for rule "${this.name}" is invalid: Expected string|regexp got ${typeof option}`);
}
if (["none", "lowercase", "underscore", "dash", "camel", "bem"].indexOf(option) === -1 && isRegExp(option) === false) {
throw new Error(`Configuration for rule "${this.name}" is invalid: "${option}" is not accepted. Accepted values are "none", "lowercase", "underscore", "dash", "camel" and "bem".`);
}
return option;
},
rules: ["class-style", "id-style"],
lint(node, opts, { report, rules }) {
if (rules["id-style"] || rules["id-class-style"]) {
return [];
}
const ignore = opts["id-class-ignore-regex"];
lint(node, opts["id-class-style"], ignore, report);
lintClassStyle(node, opts, { report });
}
},
{
name: "id-class-ignore-regex",
validateConfig(options) {
if ((typeof options === "string" && options !== "") || isRegExp(options) === true) {
return options;
}
if (typeof options === "string") {
throw new Error(`Configuration for rule "${this.name}" is invalid: You provide an empty string value`);
}
throw new Error(`Configuration for rule "${this.name}" is invalid: Expected string or RegExp got ${typeof options}`);
},
rules: [] // 'class', 'id-style'
}
]
};
module.exports.lint = function(node, opts, { report, rules }) {
if (isTagNode(node) === false) {
return;
}
let format;
if (rules) {
format = opts["id-style"];
} else {
format = opts["id-style"] || opts["id-class-style"];
}
const ignore = opts["id-class-ignore-regex"];
return lint(node, format, ignore, report);
};
function lint(node, format, ignore, report) {
let attributes = Object.values(node.attribs);
// TODO: Remove after `raw-ignore-text` refacto
attributes = attributes.filter(attribute => /^¤+$/.test(attribute.rawName) === false);
// TODO: Remove after `raw-ignore-text` refacto
attributes = attributes.filter(attribute => attribute.rawName === "id");
attributes = attributes.filter(attribute => /^¤+$/.test(attribute.value) === false);
if (ignore) {
attributes = attributes.filter(attribute => ignore.test(attribute.value) === false);
}
attributes.forEach(attribute => {
const id = attribute.value;
const regex = proc.regex(format);
if (regex.test(id) === false) {
report({
code: "E011",
position: attribute.valueLineCol,
meta: {
data: {
attribute: "id",
format: format,
value: id
}
}
});
}
});
}
|
<reponame>rbreu/tr8n
class AddApplicationToLanguageCases < ActiveRecord::Migration
def self.up
add_column :tr8n_language_cases, :application, :string
end
def self.down
remove_column :tr8n_language_cases, :application
end
end
|
#include "stdafx.h"
Sigs *pSigs = new Sigs();
namespace GameFunctions
{
void PatchRecoil(bool bNoRecoil)
{
if (bNoRecoil)
Utilss::MemoryEdit(reinterpret_cast< void* >( OFFSET_PATCHRECOIL ), reinterpret_cast< BYTE* >( pSigs->GetSignature( pSigs->RECOIL_OFF ) ), 10 );
else
Utilss::MemoryEdit(reinterpret_cast< void* >( OFFSET_PATCHRECOIL ), reinterpret_cast< BYTE* >( pSigs->GetSignature ( pSigs->RECOIL_ON ) ), 10 );
}
void PatchDamage(bool bNoDamage)
{
if (bNoDamage)
{
Utilss::MemoryEdit(reinterpret_cast< void* >( OFFSET_PATCHHEALTH ), reinterpret_cast< BYTE* >( pSigs->GetSignature( pSigs->DAMAGE_HEALTH_OFF ) ), 6 );
Utilss::MemoryEdit(reinterpret_cast< void* >( OFFSET_PATCHARMOR ), reinterpret_cast< BYTE* >( pSigs->GetSignature( pSigs->DAMAGE_ARMOR_OFF ) ), 6 );
}
else
{
Utilss::MemoryEdit(reinterpret_cast< void* >( OFFSET_PATCHHEALTH ), reinterpret_cast< BYTE* >( pSigs->GetSignature( pSigs->DAMAGE_HEALTH_ON ) ), 6 );
Utilss::MemoryEdit(reinterpret_cast< void* >( OFFSET_PATCHARMOR ), reinterpret_cast< BYTE* >( pSigs->GetSignature( pSigs->DAMAGE_ARMOR_ON ) ), 6 );
}
}
void PatchRapidFire(bool bRapidFire)
{
if(bRapidFire)
Utilss::MemoryEdit( reinterpret_cast< void* >( OFFSET_RAPIDFIRE ), reinterpret_cast< BYTE* >( pSigs->GetSignature( pSigs->RAPID_FIRE_OFF ) ), 2 );
else
Utilss::MemoryEdit( reinterpret_cast< void* >( OFFSET_RAPIDFIRE ), reinterpret_cast< BYTE* >( pSigs->GetSignature( pSigs->RAPID_FIRE_ON ) ), 2 );
}
void NoScope( bool bNoScope )
{
if( bNoScope )
Utilss::MemoryEdit( reinterpret_cast< void* > ( OFFSET_NOSCOPE ), reinterpret_cast< BYTE* > ( pSigs->GetSignature( pSigs->NO_SCOPE_OFF ) ), 3 );
else
Utilss::MemoryEdit( reinterpret_cast< void* > ( OFFSET_NOSCOPE ), reinterpret_cast< BYTE* > ( pSigs->GetSignature( pSigs->NO_SCOPE_ON ) ), 3 );
}
void AutomaticGuns( bool bAutomatic )
{
if ( bAutomatic )
Utilss::MemoryEdit( reinterpret_cast< void* >( OFFSET_AUTOMATICGUN ), reinterpret_cast< BYTE* >( pSigs->GetSignature( pSigs->AUTOGUNS_ON ) ), 2 );
else
Utilss::MemoryEdit( reinterpret_cast< void* >( OFFSET_AUTOMATICGUN ), reinterpret_cast< BYTE* >( pSigs->GetSignature( pSigs->AUTOGUNS_OFF ) ), 2 );
}
void UnlimitedAmmo( bool bUnlimitedAmmo )
{
if (bUnlimitedAmmo)
Utilss::MemoryEdit( reinterpret_cast< void* >( OFFSET_AMMUNITIONS ), reinterpret_cast< BYTE* >( pSigs->GetSignature( pSigs->UNLIMITEDAMMO_ON ) ), 2 );
else
Utilss::MemoryEdit( reinterpret_cast< void* >( OFFSET_AMMUNITIONS ), reinterpret_cast< BYTE* >( pSigs->GetSignature( pSigs->UNLIMITEDAMMO_OFF) ), 2 );
}
void AddHudLine(char* pText)
{
using thudoutf = void(__cdecl *)(char* pText);
thudoutf ohudoutf = reinterpret_cast< thudoutf >(OFFSET_HUDOUTF);
ohudoutf(pText);
}
bool GotTeamMates()
{
DWORD dwGameMode = *reinterpret_cast< DWORD* >(OFFSET_GAMEMODE);
return (
dwGameMode == GMODE_BOTTEAMONESHOTONKILL ||
dwGameMode == GMODE_TEAMONESHOTONEKILL ||
dwGameMode == GMODE_BOTTEAMDEATHMATCH ||
dwGameMode == GMODE_TEAMDEATHMATCH ||
dwGameMode == GMODE_TEAMSURVIVOR ||
dwGameMode == GMODE_TEAMLSS ||
dwGameMode == GMODE_CTF ||
dwGameMode == GMODE_TEAMKEEPTHEFLAG ||
dwGameMode == GMODE_HUNTTHEFLAG ||
dwGameMode == GMODE_TEAMPF ||
dwGameMode == GMODE_BOTTEAMSURVIVOR ||
dwGameMode == GMODE_BOTTEAMONESHOTONKILL
);
}
bool WorldToScreen(Vec3 vPos, Vec3 *pvOut)
{
ScreenSettings *pScreenSettings = ScreenSettings::GetInstance();
if (!Utilss::IsValidPtr(pScreenSettings))
return false;
glmatrixf *mvpmatrix = reinterpret_cast< glmatrixf* >(OFFSET_MVPMATRIX);
float mX = (float)pScreenSettings->m_Width / 2.0F;
float mY = (float)pScreenSettings->m_Height / 2.0F;
float x = mvpmatrix->transformx(vPos);
float y = mvpmatrix->transformy(vPos);
float z = mvpmatrix->transformz(vPos);
float w = mvpmatrix->transformw(vPos);
if (w < 0.01F)
return false;
pvOut->x = mX + (mX * x / w);
pvOut->y = mY - (mY * y / w);
pvOut->z = w;
return true;
}
void EngineDrawString(char *pText, int x, int y, int r, int g, int b, int pUnknown, int pUnknown2)
{
__asm
{
push pUnknown2;
push pUnknown;
push b;
push r;
push y;
push x;
push pText;
mov eax, g;
mov ecx, OFFSET_ENGINE_DRAWTEXT;
call ecx;
add esp, 0x1C;
}
}
bool IsVisible(Vec3 vFrom, Vec3 vTo)
{
__asm
{
push vTo.z;
push vTo.y;
push vTo.x;
push vFrom.z;
push vFrom.y;
push vFrom.x;
xor cl, cl; //Tracer
xor eax, eax;
mov ebx, OFFSET_ISVISIBLE;
call ebx;
add esp, 0x18;
}
}
void DrawString(int x, int y, int r, int g, int b, char *pText, ...)
{
va_list va_alist;
char buf[256];
va_start(va_alist, pText);
_vsnprintf_s(buf, sizeof(buf), pText, va_alist);
va_end(va_alist);
float extraScale = 0.3F;
glScalef(extraScale, extraScale, 1);
EngineDrawString(buf, x / extraScale, y / extraScale, r, g, b);
glScalef(1 / extraScale, 1 / extraScale, 1);
}
}
|
$(document).ready(function(){
// $(window).scroll(function() {
// $(".onsale_img").mouseenter(function () {
// $(".onsale_content_overlay").show(500);
// });
// $(".onsale_img").mouseleave(function () {
// $(".onsale_content_overlay").hide(500);
// });
// var height = $(window).scrollTop();
// if (height > 100) {
// $('#back2Top').fadeIn();
// } else {
// $('#back2Top').fadeOut();
// }
// });
$("#back2Top").click(function(event) {
event.preventDefault();
$("html, body").animate({ scrollTop: 0 }, "slow");
return false;
});
$(document).scroll(function() {
var $nav = $("#mainNavbar");
$nav.toggleClass("scrolled", $(this).scrollTop() > $nav.height());
});
// Add minus icon for collapse element which is open by default
$(".collapse.show").each(function(){
$(this).prev(".card-header").find(".fa").addClass("fa-minus").removeClass("fa-plus");
});
// Toggle plus minus icon on show hide of collapse element
$(".collapse").on('show.bs.collapse', function(){
$(this).prev(".card-header").find(".fa").removeClass("fa-plus").addClass("fa-minus");
}).on('hide.bs.collapse', function(){
$(this).prev(".card-header").find(".fa").removeClass("fa-minus").addClass("fa-plus");
});
var slider = document.getElementById("myRange");
var output = document.getElementById("demo");
output.innerHTML = slider.value;
slider.oninput = function() {
output.innerHTML = this.value;
}
function sortProductsPriceAscending() {
// change variable name, so it's clear what it contains
var gridItems = $('.price_box');
gridItems.sort(function(a, b){
// we are sorting the gridItems, but we are sorting them on the nested
// product card prices. So we have to find the nested product card
// to get the price off of
return $('.product-card', a).data("price") - $('.product-card', b).data("price");
});
// when you put the grid items back on the container, just append them rather
// than using html(). Append will just move them.
$(".price_cart").append(gridItems);
}
});
function w3_open() {
document.getElementById("mySidebar").style.display = "block";
document.getElementById("myOverlay").style.display = "block";
}
function w3_close() {
document.getElementById("mySidebar").style.display = "none";
document.getElementById("myOverlay").style.display = "none";
} |
# Delete the data from 'Orders' table
DELETE FROM Orders; |
#!/usr/bin/env bash
# ----------------------------------------------------------------------------
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# ----------------------------------------------------------------------------
BUILDTOOLS_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null && pwd)"
PLC4X_ROOT_DIR=.
# BUNDLE_DIR is results of maven release:perform's creation of release candidate
BUNDLE_DIR=${PLC4X_ROOT_DIR}/target/checkout/target
PLC4X_ASF_GIT_URL=https://git-wip-us.apache.org/repos/asf/plc4x.git
PLC4X_ASF_DIST_URL=https://downloads.apache.org/plc4x
PLC4X_ASF_DIST_DYN_URL=https://www.apache.org/dyn/closer.cgi/plc4x
PLC4X_ASF_SVN_RELEASE_URL=https://dist.apache.org/repos/dist/release/plc4x
PLC4X_ASF_SVN_RC_URL=https://dist.apache.org/repos/dist/dev/plc4x
USAGE=
RELEASE_PROP_FILE=${PLC4X_ROOT_DIR}/plc4x.release.properties
function die() { # [$* msgs]
[ $# -gt 0 ] && echo "Error: $*"
exit 1
}
function setUsage() { # $1: usage string
USAGE=$1
}
function usage() { # [$*: msgs]
[ $# -gt 0 ] && echo "Error: $*"
echo "Usage: ${USAGE}"
exit 1
}
function handleHelp() { # usage: handleHelp "$@"
if [ "$1" == "-?" ] || [ "$1" == "--help" ]; then
usage
fi
}
function requireArg() { # usage: requireArgs "$@"
if [ $# -lt 1 ] || [[ $1 =~ ^- ]]; then
usage "missing argument"
fi
}
function noExtraArgs() { # usage: noExtraArgs "$@"
[ $# = 0 ] || usage "extra arguments"
}
function getAbsPath() { # $1: rel-or-abs-path
echo "$(cd "$(dirname "$1")" >> /dev/null || exit $?; pwd)/$(basename "$1")"
}
# shellcheck disable=SC2046
function confirm () { # [$1: question]
while true; do
# call with a prompt string or use a default
/bin/echo -n "${1:-Are you sure?}"
read -r -p " [y/n] " response
case $response in
[yY]) return $(true) ;;
[nN]) return $(false) ;;
*) echo "illegal response '$response'" ;;
esac
done
}
function dieSuperceeded { # no args
die "This tool is superceeded with the new maven build tooling. See src/site/asciidoc/releasing.adoc."
}
function checkPLC4XSourceRootGitDie { # no args; dies if !ok
[ -d "${PLC4X_ROOT_DIR}/.git" ] || die "Not an PLC4X source root git directory \"${PLC4X_ROOT_DIR}\""
}
function checkUsingMgmtCloneWarn() { # no args; warns if plc4x root isn't a mgmt clone
CLONE_DIR="$(cd ${PLC4X_ROOT_DIR} > /dev/null || exit $?; pwd)"
CLONE_DIRNAME="$(basename "${CLONE_DIR}")"
if [ ! "$(echo ${CLONE_DIRNAME} | grep -o -E '^mgmt-plc4x')" ]; then
echo "Warning: the PLC4X root dir \"${PLC4X_ROOT_DIR}\" is not a release mgmt clone!"
return 1
else
return 0
fi
}
function checkBundleDir() { # no args returns true/false (0/1)
if [ -d ${BUNDLE_DIR} ]; then
return 0
else
return 1
fi
}
function checkVerNum() { # $1: X.Y.Z returns true/false (0/1)
[[ $1 =~ ^[0-9]{1,2}\.[0-9]{1,2}\.[0-9]{1,2}$ ]] && return 0
return 1
}
function checkVerNumDie() { # $1: X.Y.Z dies if not ok
checkVerNum "$1" || die "Not a X.Y.Z version number \"$1\""
}
function checkRcNum() { # $1: rc-num returns true/false (0/1)
[[ $1 =~ ^[0-9]{1,2}$ ]] && return 0
return 1
}
function checkRcNumDie() { # $1: rc-num dies if not ok
checkRcNum "$1" || die "Not a release candidate number \"$1\""
}
function createReleaseProperties { # X.Y.Z
VER="$1"
checkVerNumDie "${VER}"
echo "releaseNum=${VER}" > ${RELEASE_PROP_FILE}
}
function getReleaseProperty { # <property-name>
PN=$1
PNVAL=$(grep "${PN}" ${RELEASE_PROP_FILE})
VAL=$(echo "${PNVAL}" | sed -e "s/^${PN}=//")
echo "${VAL}"
}
function getPLC4XVer() { # [$1 == "bundle"]
MSG="getPLC4XVer(): unknown mode \"$1\""
VER=""
if [ "$1" == "" ]; then
VER=$(getReleaseProperty releaseNum)
MSG="Unable to identify the release version id from ${RELEASE_PROP_FILE}"
elif [ "$1" == "gradle" ]; then
die "'getPLC4XVer() gradle' is no longer supported"
elif [ "$1" == "bundle" ]; then
# Get the X.Y.Z version from a build generated bundle's name
BUNDLE=$(echo "${BUNDLE_DIR}"/apache-plc4x-*-source-release.tar.gz)
VER=$(echo "${BUNDLE}" | grep -o -E '\d+\.\d+\.\d+')
MSG="Unable to identify the version id from bundle ${BUNDLE}"
fi
[ "${VER}" ] || die "${MSG}"
echo "$VER"
}
function getMajMinVerNum() { # $1: X.Y.Z returns X.Y
VER=$1; shift
checkVerNumDie "${VER}"
MAJ_MIN_VER=$(echo "${VER}" | sed -e 's/\.[0-9][0-9]*$//')
echo "${MAJ_MIN_VER}"
}
function getReleaseBranch() { # $1: X.Y.Z version
MAJ_MIN_NUM=$(getMajMinVerNum "$1")
echo "release/${MAJ_MIN_NUM}"
}
function getReleaseTag() { # $1: X.Y.Z [$2: rc-num]
VER=$1; shift
checkVerNumDie "${VER}"
RC_SFX=""
if [ $# -gt 0 ] && [ "$1" != "" ]; then
RC_SFX="-RC$1"
fi
echo "${VER}${RC_SFX}"
}
function getReleaseTagComment() { # $1: X.Y.Z [$2: rc-num]
VER=$1; shift
checkVerNumDie "${VER}"
RC_SFX=""
if [ $# -gt 0 ] && [ "$1" != "" ]; then
RC_SFX=" RC$1"
fi
echo "Apache PLC4X ${VER}${RC_SFX}"
}
|
#!/bin/bash
# Check if Nginx is installed
if ! command -v nginx &> /dev/null; then
echo "Nginx is not currently installed on Linux, begins installation."
# Install Nginx using yum package manager
sudo yum install -y nginx
# Create a user for Nginx without a home directory
sudo useradd --no-create-home nginx
# Enable and start the Nginx service
sudo systemctl enable nginx
sudo systemctl start nginx
# Check if Nginx is running
if sudo systemctl is-active --quiet nginx; then
echo "Nginx is running"
else
echo "Failed to start Nginx"
fi
else
echo "Nginx already installed"
fi |
def find_smallest_greater_than(arr, target):
for num in arr:
if num > target:
return num
return None |
# This shell script executes the Slurm jobs for computing reconstructions.
sbatch tsp19_name=speech_sc=none_J=16_wav=gammatone.sbatch
sbatch tsp19_name=speech_sc=none_J=16_wav=morlet.sbatch
sbatch tsp19_name=speech_sc=time_J=16_wav=gammatone.sbatch
sbatch tsp19_name=speech_sc=time_J=16_wav=morlet.sbatch
sbatch tsp19_name=speech_sc=time-frequency_J=16_wav=gammatone.sbatch
sbatch tsp19_name=speech_sc=time-frequency_J=16_wav=morlet.sbatch
|
<filename>spec/fixtures/rulesets.rb
VALID_RULESET = <<-CODE
scoring_rules do |rule|
rule.add_points 10, :if => lambda {self.age >= 18}
rule.remove_points 5, :if => :can_remove?
rule.add_points 5, :unless => lambda {self.is_new_user?}
rule.add_points 1, :each => :followers
end
CODE
INVALID_RULESET_EMPTY = <<-CODE
scoring_rules
CODE
INVALID_RULESET_MANY_CONDITIONS = <<-CODE
scoring_rules do |rule|
rule.add_points 10, :if => lambda {self.age >= 18}, :unless => lambda {true}
rule.remove_points 5, :if => :can_remove?
end
CODE
INVALID_RULESET_NO_CONDITIONS = <<-CODE
scoring_rules do |rule|
rule.add_points 10
rule.remove_points 5, :if => :can_remove?
end
CODE
INVALID_RULESET_NON_EXISTENT_CONDITIONS = <<-CODE
scoring_rules do |rule|
rule.add_points 10, :crazy => lambda {true}
rule.remove_points 5, :if => :can_remove?
end
CODE |
Tinytest.add("Title", function(test) {
var config = {
options: {
title: "Default Title",
suffix: "Suffix"
}
}
Meta.config(config);
Meta.setTitle("");
test.equal(Meta.getTitle(), config.options.title, "Is default title working ?");
Meta.setTitle("asd");
test.equal(Meta.getTitle(), "asd | " + config.options.suffix, "Is suffix working ?");
Meta.setTitle(function() {
return "test"
});
test.equal(Meta.getTitle(), "test | " + config.options.suffix, "can title be function ?");
});
// Tinytest.add("Tags", function(test) {
// var meta = {
// "og:title": "Titlee"
// }
// Meta.set("og:title", meta["og:title"]);
// test.equal(Meta.hash()["og:title"], meta["og:title"], "Can i add meta tag");
// Meta.unset("og:title");
// test.isUndefined(Meta.hash()["og:title"], "Can i remove meta tag");
// });
testAsyncMulti("HTML", [
function(test, expect) {
Meta.setTitle("test");
Tracker.flush();
Meteor.defer(expect(function() {
test.equal(document.title, Meta.getTitle(), "is title set on DOM ?");
}));
},
function(test, expect) {
Meta.setTitle(function() {
return "My Title"
});
Tracker.flush();
Meteor.defer(expect(function() {
test.equal(document.title, Meta.getTitle(), "is title set on DOM ?");
}));
},
function(test, expect) {
var title = "Open Graph Title";
Meta.set("og:title", title);
Meta.set("removed", "Will be removed");
Meta.unset("removed");
Tracker.flush();
debugger;
Meteor.defer(expect(function() {
test.equal($("meta[property='og:title']").attr("content"), title, "is og:title set on DOM ?");
test.isUndefined($("meta[property='removed']").attr("content"), "can remove a tag on DOM ?");
}));
},
function(test, expect) {
Meta.set([
{
name: "name",
property: "apple-mobile-web-app-capable",
content: "yes"
},
{
name: "property",
property: "og:locale",
content: "en_GB"
},
{
name: "attrName",
property: "tag3",
content: "attrContent"
}
]);
Meta.unset("tag3");
Tracker.flush();
debugger;
Meteor.defer(expect(function() {
test.equal($("meta[name='apple-mobile-web-app-capable']").attr("content"), "yes", "is apple-mobile-web-app-capable set on DOM?");
test.equal($("meta[property='og:locale']").attr("content"), "en_GB", "is og:locale set to en_GB on DOM?");
test.isUndefined($("meta[attrName='tag3']").attr("content"), "can remove tag3 on DOM ?");
}));
}
]); |
#!/bin/bash
#
# This script is used for building the muParser html page from
# html templates.
#
rm -rf ../*.html
#
# add navigation bar to all html templates starting with mup_*
#
for file in mup_*
do
echo processing $file
cat navigation.html | sed "/\$PLACEHOLDER/r $file" > ../$file
done
# create index.html
cp ../mup_intro.html ../index.html
cat ../mup_intro.html | sed "/\$STAT_COUNTER/r stat_counter.html" > ../index.html
|
/**
* Copyright (c) 2016-2019 人人开源 All rights reserved.
* <p>
* https://www.renren.io
* <p>
* 版权所有,侵权必究!
*/
package io.renren.modules.sys.service.impl;
import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
import com.baomidou.mybatisplus.core.conditions.update.UpdateWrapper;
import com.baomidou.mybatisplus.core.metadata.IPage;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import io.renren.common.exception.RRException;
import io.renren.common.utils.Constant;
import io.renren.common.utils.PageUtils;
import io.renren.common.utils.Query;
import io.renren.modules.arct.entity.AuthorEntity;
import io.renren.modules.arct.service.AuthorService;
import io.renren.modules.sys.dao.SysUserDao;
import io.renren.modules.sys.entity.SysUserEntity;
import io.renren.modules.sys.entity.SysUserRoleEntity;
import io.renren.modules.sys.service.SysRoleService;
import io.renren.modules.sys.service.SysUserRoleService;
import io.renren.modules.sys.service.SysUserService;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.shiro.crypto.hash.Sha256Hash;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.mail.javamail.JavaMailSenderImpl;
import org.springframework.mail.javamail.MimeMessageHelper;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.mail.MessagingException;
import javax.mail.internet.MimeMessage;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Map;
/**
* 系统用户
*
* @author Mark <EMAIL>
*/
@Service("sysUserService")
public class SysUserServiceImpl extends ServiceImpl<SysUserDao, SysUserEntity> implements SysUserService {
@Autowired
private SysUserRoleService sysUserRoleService;
@Autowired
private SysRoleService sysRoleService;
@Autowired
private JavaMailSenderImpl mailSender;
@Autowired
private AuthorService authorService;
@Autowired
private SysUserService sysUserService;
@Override
public void registerUser(String username, String password, String email) {
sendActiveMail(email, username);
SysUserEntity user = new SysUserEntity();
AuthorEntity authorEntity = new AuthorEntity();
user.setUsername(username);
user.setPassword(password);
user.setEmail(email);
user.setStatus(0);
long currentUser = sysUserService.list().toArray().length + 1;
SysUserRoleEntity sysUserRoleEntity = new SysUserRoleEntity();
sysUserRoleEntity.setUserId(currentUser);
sysUserRoleEntity.setRoleId(2L);
authorEntity.setAuthorName(username);
authorEntity.setStatus(1);
authorEntity.setAvatar("//img1.sycdn.imooc.com/5dafce1a00013fd501400140-160-160.jpg");
authorEntity.setFansCount(0L);
authorEntity.setFollowCount(0L);
authorEntity.setIntegralCount(0L);
authorService.save(authorEntity);
saveUser(user);
sysUserRoleService.save(sysUserRoleEntity);
}
@Override
public PageUtils queryPage(Map<String, Object> params) {
String username = (String) params.get("username");
Long createUserId = (Long) params.get("createUserId");
IPage<SysUserEntity> page = this.page(
new Query<SysUserEntity>().getPage(params),
new QueryWrapper<SysUserEntity>()
.like(StringUtils.isNotBlank(username), "username", username)
.eq(createUserId != null, "create_user_id", createUserId)
);
return new PageUtils(page);
}
@Override
public List<String> queryAllPerms(Long userId) {
return baseMapper.queryAllPerms(userId);
}
@Override
public List<Long> queryAllMenuId(Long userId) {
return baseMapper.queryAllMenuId(userId);
}
@Override
public SysUserEntity queryByUserName(String username) {
return baseMapper.queryByUserName(username);
}
@Override
@Transactional
public void saveUser(SysUserEntity user) {
user.setCreateTime(new Date());
//sha256加密
String salt = RandomStringUtils.randomAlphanumeric(20);
user.setPassword(new Sha256Hash(user.getPassword(), salt).toHex());
user.setSalt(salt);
this.save(user);
//检查角色是否越权
checkRole(user);
//保存用户与角色关系
sysUserRoleService.saveOrUpdate(user.getUserId(), user.getRoleIdList());
}
@Override
@Transactional
public void update(SysUserEntity user) {
if (StringUtils.isBlank(user.getPassword())) {
user.setPassword(null);
} else {
user.setPassword(new Sha256Hash(user.getPassword(), user.getSalt()).toHex());
}
this.updateById(user);
//检查角色是否越权
checkRole(user);
//保存用户与角色关系
sysUserRoleService.saveOrUpdate(user.getUserId(), user.getRoleIdList());
}
@Override
public void deleteBatch(Long[] userId) {
this.removeByIds(Arrays.asList(userId));
}
@Override
public boolean updatePassword(Long userId, String password, String newPassword) {
SysUserEntity userEntity = new SysUserEntity();
userEntity.setPassword(<PASSWORD>);
return this.update(userEntity,
new QueryWrapper<SysUserEntity>().eq("user_id", userId).eq("password", password));
}
@Override
public void activeUser(SysUserEntity user) {
UpdateWrapper<SysUserEntity> wrapper = new UpdateWrapper<>();
wrapper.eq("username", user.getUsername()).set("status", 1);
update(wrapper);
}
/**
* 检查角色是否越权
*/
private void checkRole(SysUserEntity user) {
if (user.getRoleIdList() == null || user.getRoleIdList().size() == 0) {
return;
}
//如果不是超级管理员,则需要判断用户的角色是否自己创建
if (user.getCreateUserId() == Constant.SUPER_ADMIN) {
return;
}
//查询用户创建的角色列表
List<Long> roleIdList = sysRoleService.queryRoleIdList(user.getCreateUserId());
//判断是否越权
if (!roleIdList.containsAll(user.getRoleIdList())) {
throw new RRException("新增用户所选角色,不是本人创建");
}
}
/**
* mimeMessage,表示这是一个复杂的邮件
* messageHelper,使用它为mimeMessage添加相应的信息
*
*/
@Async
public void sendActiveMail(String email, String username) {
MimeMessage mimeMessage = mailSender.createMimeMessage();
MimeMessageHelper messageHelper = new MimeMessageHelper(mimeMessage);
try {
messageHelper.setFrom("<EMAIL>");
messageHelper.setTo(email);
messageHelper.setSubject("注册激活");
messageHelper.setText("<a href = 'https://159.75.101.5/renren-fast/sys/user/active/" + username + "'>点击完成用户注册</a>", true);
} catch (MessagingException e) {
e.printStackTrace();
}
mailSender.send(mimeMessage);
}
} |
#!/bin/bash
AUTO_TEST_PATH=`pwd`
IOS=0
ANDROID=0
#Prepare GTEST
AUTO_TEST_SRC_PATH="../../"
cd ${AUTO_TEST_SRC_PATH}
if [ ! -d "./gtest" ]
then
make gtest-bootstrap
fi
cd ${AUTO_TEST_PATH}
#To find whether have android devices
echo please set the enviroment variable as:
echo export ANDROID_HOME="path of android sdk"
echo export ANDROID_NDK_HOME="path of android ndk"
ANDROID_SDK_PATH=${ANDROID_HOME}
ANDROID_NDK_PATH=${ANDROID_NDK_HOME}
if [ "#${ANDROID_SDK_PATH}" = "#" ]
then
echo Please set ANDROID_HOME with the path of Android SDK
exit 1
fi
if [ "#${ANDROID_NDK_PATH}" = "#" ]
then
echo Please set ANDROID_NDK_HOME with the path of Android NDK
exit 1
fi
#prepare devices
ADB=${ANDROID_SDK_PATH}/platform-tools/adb
#get devices
devices=`$ADB devices | awk -F" " '/\tdevice/{print $1}'`
if [ "#$devices" = "#" ];then
echo "Can not find any android devices!"
else
echo Start to run the unittest on android devices
ANDROID=1
cd ./android
bash run_AutoTest_android.sh >/dev/null 2>&1
if [ $? -ne 0 ];then
echo There is something wrong happened when runing unittest on android devices,please to check
else
echo Finish run the unittest on android devices sucessfully
fi
cd ${AUTO_TEST_PATH}
fi
#To find whether have ios devices
DEVICES=`system_profiler SPUSBDataType | sed -n -e '/iPad/,/Serial/p' -e '/iPhone/,/Serial/p' | grep "Serial Number:" | awk -F ": " '{print $2}'`
if [ "${DEVICES}#" == "#" ]
then
echo "Can not find any ios devices!"
else
echo Start to run the unittest on ios devices
IOS=1
cd ./ios
bash run_AutoTest_ios.sh >/dev/null 2>&1
if [ $? -ne 0 ];then
echo There is something wrong happened when runing unittest on ios devices,please to check
else
echo Finish run the unittest on android devices sucessfully
fi
cd ${AUTO_TEST_PATH}
fi
#To parse the unit test result file to find whether have failures
if [ ${ANDROID} = "1" ];then
echo "
<style>
.env {
background-color: #95B9C7;
font: 30px bold;
}</style>">>mail.log
echo "<br><font class="env">Run unit test on android devices</font>">>mail.log
bash run_ParseUTxml.sh ./android/report
ret=$?
if [ ${ret} -eq 0 ];then
echo Unit test run on the android devices have not any failure case
elif [ ${ret} -eq 2 ];then
echo Unit test have cases failed,please check
elif [ ${ret} -eq 1 ];then
echo Unit test run failed
fi
fi
if [ ${IOS} = "1" ];then
echo "<br><font class="env">Run unit test on ios devices with armv7 & arm64</font>">>mail.log
bash run_ParseUTxml.sh ./ios/report
ret=$?
if [ $ret -eq 0 ];then
echo Unit test run on the ios devices have not any failure case
elif [ $ret -eq 2 ];then
echo Unit test have cases failed,please check
elif [ $ret -eq 1 ];then
echo Unit test run failed
fi
fi
|
public class ObjectController
{
private void Start() {} // 0x00857BE0-0x00857BF0
private void Update() {} // 0x00857BF0-0x00857C00
public void ResetMoveInterpolateParam() {} // 0x00857E90-0x00857EA0
} |
# in the build.gradle file:
apply plugin: 'com.android.application'
android {
compileSdkVersion 29
buildToolsVersion "29.0.3"
defaultConfig {
applicationId "com.example.appname"
minSdkVersion 21
targetSdkVersion 29
versionCode 1
versionName "1.0"
}
// add build types and product flavors here
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
}
dependencies {
implementation 'androidx.appcompat:appcompat:1.2.0'
implementation 'com.google.android.material:material:1.2.1'
}
# and then run `./gradlew assembleRelease` to compile the application |
import { BreezeBorderRadius, BreezeRoundedSides } from "../components/types";
export const getRoundedTWClasses = (
roundedSides: BreezeRoundedSides[],
roundedRadius: BreezeBorderRadius = "xl"
) => {
const _classes: string[] = [];
const classes = roundedSides.map((rs, index) => {
if (roundedRadius === "none") {
roundedSides[index] !== "none" &&
console.warn(
`The \`${rs}\` type is not being used, you can remove it.`
);
return "rounded-none";
} else {
const _roundedRadius = `-${roundedRadius}`;
switch (rs) {
case "none":
_classes.push("rounded-none");
break;
case "allCorners":
_classes.push(`rounded${_roundedRadius}`);
break;
case "top":
_classes.push(`rounded-t${_roundedRadius}`);
break;
case "bottom":
_classes.push(`rounded-b${_roundedRadius}`);
break;
case "left":
_classes.push(`rounded-l${_roundedRadius}`);
break;
case "right":
_classes.push(`rounded-r${_roundedRadius}`);
break;
case "topRight":
_classes.push(`rounded-tr${_roundedRadius}`);
break;
case "bottomRight":
_classes.push(`rounded-br${_roundedRadius}`);
break;
case "topLeft":
_classes.push(`rounded-tl${_roundedRadius}`);
break;
case "bottomLeft":
_classes.push(`rounded-bl${_roundedRadius}`);
break;
}
const purged = purgeUnnecessaryClasses(_classes);
return purged;
}
});
const uniqueClasses = [...new Set(classes.flat())];
return uniqueClasses;
};
const purgeUnnecessaryClasses = (classes: string[]) => {
const _classes = classes
.map((c) => {
switch (c) {
case "rounded-none":
case "rounded-xs":
case "rounded-sm":
case "rounded-md":
case "rounded-lg":
case "rounded-xl":
case "rounded-2xl":
case "rounded-3xl":
return c;
}
})
.filter((c) => c);
return _classes.length > 0 ? _classes : classes;
};
|
use Illuminate\Database\Eloquent\Model;
class Company extends Model
{
protected $table = 'companies';
// Define the many-to-many relationship with the User model
public function users()
{
return $this->belongsToMany('App\User', 'users_companies', 'company_id', 'user_id');
}
// Implement the scope method to filter companies based on a specific user's ID
public function scopeForUser($query, $userId)
{
return $query->select('companies.*')
->join('users_companies', 'users_companies.company_id', '=', 'companies.id')
->where('users_companies.user_id', '=', $userId);
}
} |
#!/usr/bin/env bash
echo "/tmp/afile" |
<gh_stars>1-10
package builder
import (
"fmt"
"math/rand"
"os"
"path/filepath"
"strconv"
"time"
"io"
"io/ioutil"
"github.com/Sirupsen/logrus"
"github.com/docker/docker/client"
"github.com/rikvdh/ci/lib/buildcfg"
"github.com/rikvdh/ci/lib/config"
"github.com/rikvdh/ci/models"
)
var runningJobs *jobCounter
var buildDir string
func randomString(strlen int) string {
rand.Seed(time.Now().UTC().UnixNano())
const chars = "abcdefghijklmnopqrstuvwxyz0123456789"
result := make([]byte, strlen)
for i := 0; i < strlen; i++ {
result[i] = chars[rand.Intn(len(chars))]
}
return string(result)
}
// GetLogFromPos retrieves a log for a job from pos
func GetLogFromPos(job *models.Job, pos int64) string {
logfile := buildDir + "/" + strconv.Itoa(int(job.ID)) + ".log"
f, err := os.Open(logfile)
if err != nil {
logrus.Warnf("Error opening logfile: %s, %v", logfile, err)
return ""
}
defer f.Close()
f.Seek(pos, io.SeekStart)
d, err := ioutil.ReadAll(f)
if err == nil {
return string(d)
}
logrus.Warnf("Error on readall: %s, %v", logfile, err)
return ""
}
// GetLog retrieves a log for a job
func GetLog(job *models.Job) string {
return GetLogFromPos(job, 0)
}
func startJob(f io.Writer, job models.Job) {
fmt.Fprintf(f, "starting build job %d\n", job.ID)
job.BuildDir = buildDir + "/" + randomString(16)
job.Start = time.Now()
// We keep the status to new, because the container doesnt exist yet
job.SetStatus(models.StatusNew)
if err := cloneRepo(f, job.Build.URI, job.Branch.Name, job.Reference, job.BuildDir); err != nil {
job.SetStatus(models.StatusError, fmt.Sprintf("cloning repository failed: %v", err))
return
}
job.StoreMeta(getTag(job.BuildDir), getLastCommitMessage(job.BuildDir))
fmt.Fprintf(f, "reading configuration\n")
cfg := buildcfg.Read(job.BuildDir, job.Build.URI)
cli := getClient()
if err := fetchImage(f, cli, &cfg); err != nil {
job.SetStatus(models.StatusError, fmt.Sprintf("fetch image failed: %v", err))
return
}
fmt.Fprintf(f, "starting container...\n")
containerID, err := startContainer(cli, &cfg, job.BuildDir)
if err != nil {
job.SetStatus(models.StatusError, fmt.Sprintf("starting container failed: %v", err))
return
}
fmt.Fprintf(f, "container started, ID: %s\n", containerID)
job.Container = containerID
job.SetStatus(models.StatusBusy)
go waitForJob(f, cli, &job, &cfg)
runningJobs.Increment()
}
func waitForJob(f io.Writer, cli *client.Client, job *models.Job, cfg *buildcfg.Config) {
logrus.Infof("Wait for job %d", job.ID)
models.Handle().First(&job, job.ID)
code, err := readContainer(f, cli, job.Container)
if err != nil {
job.SetStatus(models.StatusError, err.Error())
} else if code != 0 {
job.SetStatus(models.StatusFailed, fmt.Sprintf("build failed with code: %d", code))
} else {
handleArtifacts(f, job, cfg)
}
os.RemoveAll(job.BuildDir)
runningJobs.Decrement()
cli.Close()
}
func GetEventChannel() <-chan uint {
logrus.Debugf("getting event channel")
for {
if runningJobs != nil {
break
}
time.Sleep(time.Millisecond * 100)
}
return runningJobs.GetEventChannel()
}
func retakeRunningJobs() {
var jobs []models.Job
models.Handle().Preload("Branch").Preload("Build").Where("status = ?", models.StatusBusy).Find(&jobs)
for _, job := range jobs {
logrus.Infof("Retake job %d", job.ID)
f, err := os.OpenFile(buildDir+"/"+strconv.Itoa(int(job.ID))+".log", os.O_APPEND|os.O_WRONLY, 0644)
if err != nil {
job.SetStatus(models.StatusError, fmt.Sprintf("reopening logfile failed: %v", err))
continue
}
defer f.Close()
cli := getClient()
go waitForJob(f, cli, &job, nil)
runningJobs.Increment()
}
}
// Run is the build-runner, it starts containers and runs up to 5 parallel builds
func Run() {
runningJobs = newJobCounter()
buildDir, _ = filepath.Abs(config.Get().BuildDir)
if _, err := os.Stat(buildDir); os.IsNotExist(err) {
os.Mkdir(buildDir, 0755)
}
retakeRunningJobs()
for {
if runningJobs.CanStartJob() {
var job models.Job
models.Handle().Preload("Branch").Preload("Build").Where("status = ?", models.StatusNew).First(&job)
if job.ID > 0 {
f, err := os.OpenFile(buildDir+"/"+strconv.Itoa(int(job.ID))+".log", os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
job.SetStatus(models.StatusError, fmt.Sprintf("creating logfile failed: %v", err))
continue
}
defer f.Close()
startJob(f, job)
} else {
time.Sleep(time.Second * 5)
}
} else {
logrus.Infof("Job ratelimiter: %d/%d", runningJobs, config.Get().ConcurrentBuilds)
time.Sleep(time.Second * 5)
}
}
}
|
<filename>node_modules/react-icons-kit/linea/ecommerce_megaphone.js
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ecommerce_megaphone = void 0;
var ecommerce_megaphone = {
"viewBox": "0 0 64 64",
"children": [{
"name": "polygon",
"attribs": {
"fill": "none",
"stroke": "#000000",
"stroke-width": "2",
"stroke-miterlimit": "10",
"points": "1,29 1,35 7,41 45.5,41 61,53 63,53 63,29 \r\n\t63,5 61,5 45.5,17 7,17 1,23 "
},
"children": []
}, {
"name": "polyline",
"attribs": {
"fill": "none",
"stroke": "#000000",
"stroke-width": "2",
"stroke-miterlimit": "10",
"points": "19,41 12,59 18,59 28,41 "
},
"children": []
}, {
"name": "line",
"attribs": {
"fill": "none",
"stroke": "#000000",
"stroke-width": "2",
"stroke-miterlimit": "10",
"x1": "43",
"y1": "17",
"x2": "43",
"y2": "41"
},
"children": []
}, {
"name": "line",
"attribs": {
"fill": "none",
"stroke": "#000000",
"stroke-width": "2",
"stroke-miterlimit": "10",
"x1": "36",
"y1": "41",
"x2": "36",
"y2": "17"
},
"children": []
}]
};
exports.ecommerce_megaphone = ecommerce_megaphone; |
#!/bin/bash
set -e
APPDIR="$(dirname "$(readlink -e "$0")")"
export LD_LIBRARY_PATH="${APPDIR}/usr/lib/:${APPDIR}/usr/lib/x86_64-linux-gnu${LD_LIBRARY_PATH+:$LD_LIBRARY_PATH}"
export PATH="${APPDIR}/usr/bin:${PATH}"
export LDFLAGS="-L${APPDIR}/usr/lib/x86_64-linux-gnu -L${APPDIR}/usr/lib"
exec "${APPDIR}/usr/bin/python3.6" -s "${APPDIR}/usr/bin/electrum-mona" "$@"
|
import React from 'react';
interface IProps {
children: string;
}
export const StatsHeader = ({ children }: IProps): JSX.Element => {
return (
<h2 className="text-2xl sm:text-3xl font-bold tracking-tighter mb-4">
{children}
</h2>
);
};
|
function generateLandingPage($enlace, $pre_enlace, $enlace_url, $imagen_fondo, $video_fondo_mp4) {
$enlaceHtml = $enlace ? "<a href='$enlace_url'>$pre_enlace</a>" : "";
$backgroundStyle = "style='background-image: url($imagen_fondo);'";
$videoSource = "<source src='$video_fondo_mp4' type='video/mp4'>";
$landingPageHtml = "
<div class='enlace-section'>
$enlaceHtml
</div>
<div class='background-section' $backgroundStyle>
<video autoplay muted loop>
$videoSource
</video>
</div>
<div class='form-section'>
" . generateForm1() . "
</div>
<div class='about-section'>
" . generateAboutSection('Advantages AeroLand Brings') . "
</div>
";
return $landingPageHtml;
}
function generateForm1() {
// Implement the logic to generate the form HTML based on the requirements
// ...
return "<form>...</form>";
}
function generateAboutSection($title) {
// Implement the logic to generate the about section HTML based on the requirements
// ...
return "<div class='about'>$title</div>";
}
// Example usage
$html = generateLandingPage(true, 'Questions?', '#', 'assets/images/hero/aeroland-payment-hero-bg.jpg', 'hero_bg/video/hero1.mp4');
echo $html; |
<reponame>Winens/coffee_kernel<gh_stars>1-10
//
// Created by winens on 2/18/22.
//
#include <mm/heap.h>
#include <stdint.h>
#include <stddef.h>
#include <stdio.h>
struct _HEAP *_heap;
void HEAP_Exec(){
_heap->_block = 0;
}
void HEAP_Add_Block(struct _HEAP *heap, uintptr_t address, uint32_t size, uint32_t bsize){
struct _HEAP_BLOCK *__block;
uint32_t bcnt, x;
uint8_t *bm;
__block = (struct _HEAP_BLOCK*)address;
__block->_size = size - sizeof(struct _HEAP_BLOCK);
__block->_bsize = bsize;
__block->_next = heap->_block;
heap->_block = __block;
bcnt = (__block->_bsize / __block->_size);
bm = (uint8_t*)&__block[1];
for(int q = 0; q < bcnt; ++q){
bm[q] = 0;
}
bcnt = (bcnt / bsize) * bsize < bcnt ? bcnt / bsize + 1 : bcnt / bsize;
for(int q = 0; q < bcnt; ++q){
bm[q] = 5;
}
__block->_lfb = bcnt - 1;
__block->_used = bcnt;
}
static uint8_t HEAP_Get_NID(uint8_t q, uint8_t w){
uint8_t r;
for(r = q + 1; r == w || r == 0; ++r);
return r;
}
void *HEAP_Alloc(struct _HEAP *heap, uint32_t size){
}
|
from .view_model import ViewModel
from flaskr.util import is_empty_string
class LoginForm(ViewModel):
def __init__(self, *, username, password, loginform_id):
super().__init__()
self.loginform_id = loginform_id
self.username = username
self.password = password
def validate(self):
# TODO validate againsta regular expression
if not self.username or is_empty_string(self.username):
self._append_error({'username': 'Invalid or absent username'});
if not self.password or is_empty_string(self.password):
self._append_error({'password': 'Invalid or absent password'});
self._is_valid = len(self._errors) == 0
return self._is_valid
|
#pragma once
#include <GL/GL.h>
#ifdef __cplusplus
extern "C" {
#endif // __cplusplus
struct ShaderInfo {
GLenum type;
const char* source;
GLuint shader;
};
enum LoadType {
LoadFromFile,
LoadFromString,
};
/**
* @brief load shader and return gl program
*
*/
GLuint LoadShaders(ShaderInfo* info, LoadType loadType = LoadFromString);
#ifdef __cplusplus
};
#endif // __cplusplus
|
import {
IBasicDataFeed,
} from 'shared/types/charting_library';
import { ICurrencyPair } from './markets';
import { Omit } from '../app';
export interface IChartItem {
ts: number;
open: number;
close: number;
high: number;
low: number;
volume: number;
}
export interface ITVChartCandle extends Omit<IChartItem, 'ts'> {
time: number;
}
export interface IDepthHistory {
asks: number[][];
bids: number[][];
}
export interface IServerCandle {
open: number;
close: number;
high: number;
low: number;
volume: number;
instrument: string;
start: string;
end: string;
}
export interface IDataFeed extends IBasicDataFeed {
setCurrentCurrencyPair(currentCurrencyPair: ICurrencyPair): void;
}
|
<reponame>ceumicrodata/respect-trade-similarity
import pandas as pd
from itertools import product
import glob
file_list = glob.glob('../temp/index/dirty/*')
for db in file_list:
data = pd.read_csv(db)
# cut those rows and columns which are not needed
data = data.iloc[:,:19].drop(index=[0,1]).reset_index(drop=True).T.reset_index(drop=True).T
# rename columns
cols = ["DECLARANT","PARTNER"] + [ "TCI_"+str(x) for x in range(2001,2018)]
data.columns = cols
# create final dataframe
# create an empty DF with all the possible country combinations
new_df = pd.DataFrame(list(product(data.DECLARANT.unique(), data.PARTNER.unique())), columns=["DECLARANT","PARTNER"])
new_df = new_df.loc[new_df["DECLARANT"]!=new_df["PARTNER"]]
# merge the empty dataframe with the data
merged = pd.merge(new_df,data, how="left",on=["DECLARANT","PARTNER"]) #.drop("Unnamed: 0",axis=1)
merged.to_csv("../output/TC_"+db[-14:])
|
<filename>e2e/commands_test.go
package e2e
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
"regexp"
"strings"
"testing"
"github.com/docker/app/internal"
"github.com/docker/app/internal/yaml"
"gotest.tools/assert"
is "gotest.tools/assert/cmp"
"gotest.tools/fs"
"gotest.tools/golden"
"gotest.tools/icmd"
"gotest.tools/skip"
)
func TestRenderTemplates(t *testing.T) {
skip.If(t, !hasExperimental, "experimental mode needed for this test")
appsPath := filepath.Join("testdata", "templates")
apps, err := ioutil.ReadDir(appsPath)
assert.NilError(t, err, "unable to get apps")
for _, app := range apps {
appPath := filepath.Join(appsPath, app.Name())
if !checkRenderers(app.Name(), renderers) {
t.Log("Required renderer not enabled")
continue
}
t.Run(app.Name(), testRenderApp(appPath, "DOCKERAPP_RENDERERS="+app.Name()))
}
}
func TestRender(t *testing.T) {
appsPath := filepath.Join("testdata", "render")
apps, err := ioutil.ReadDir(appsPath)
assert.NilError(t, err, "unable to get apps")
for _, app := range apps {
appPath := filepath.Join(appsPath, app.Name())
t.Run(app.Name(), testRenderApp(appPath))
}
}
func testRenderApp(appPath string, env ...string) func(*testing.T) {
return func(t *testing.T) {
envParameters := map[string]string{}
data, err := ioutil.ReadFile(filepath.Join(appPath, "env.yml"))
assert.NilError(t, err)
assert.NilError(t, yaml.Unmarshal(data, &envParameters))
args := []string{dockerApp, "render", filepath.Join(appPath, "my.dockerapp"),
"-f", filepath.Join(appPath, "parameters-0.yml"),
}
for k, v := range envParameters {
args = append(args, "-s", fmt.Sprintf("%s=%s", k, v))
}
result := icmd.RunCmd(icmd.Cmd{
Command: args,
Env: env,
}).Assert(t, icmd.Success)
assert.Assert(t, is.Equal(readFile(t, filepath.Join(appPath, "expected.txt")), result.Stdout()), "rendering mismatch")
}
}
func TestRenderFormatters(t *testing.T) {
appPath := filepath.Join("testdata", "simple", "simple.dockerapp")
result := icmd.RunCommand(dockerApp, "render", "--formatter", "json", appPath).Assert(t, icmd.Success)
golden.Assert(t, result.Stdout(), "expected-json-render.golden")
result = icmd.RunCommand(dockerApp, "render", "--formatter", "yaml", appPath).Assert(t, icmd.Success)
golden.Assert(t, result.Stdout(), "expected-yaml-render.golden")
}
func TestInit(t *testing.T) {
composeData := `version: "3.2"
services:
nginx:
image: nginx:latest
command: nginx $NGINX_ARGS ${NGINX_DRY_RUN}
`
meta := `# Version of the application
version: 0.1.0
# Name of the application
name: app-test
# A short description of the application
description: my cool app
# List of application maintainers with name and email for each
maintainers:
- name: bob
email:
- name: joe
email: <EMAIL>
`
envData := "# some comment\nNGINX_DRY_RUN=-t"
tmpDir := fs.NewDir(t, "app_input",
fs.WithFile(internal.ComposeFileName, composeData),
fs.WithFile(".env", envData),
)
defer tmpDir.Remove()
testAppName := "app-test"
dirName := internal.DirNameFromAppName(testAppName)
cmd := icmd.Cmd{Dir: tmpDir.Path()}
cmd.Command = []string{dockerApp,
"init", testAppName,
"-c", tmpDir.Join(internal.ComposeFileName),
"-d", "my cool app",
"-m", "bob",
"-m", "joe:<EMAIL>"}
icmd.RunCmd(cmd).Assert(t, icmd.Success)
manifest := fs.Expected(
t,
fs.WithMode(0755),
fs.WithFile(internal.MetadataFileName, meta, fs.WithMode(0644)), // too many variables, cheating
fs.WithFile(internal.ComposeFileName, composeData, fs.WithMode(0644)),
fs.WithFile(internal.ParametersFileName, "NGINX_ARGS: FILL ME\nNGINX_DRY_RUN: -t\n", fs.WithMode(0644)),
)
assert.Assert(t, fs.Equal(tmpDir.Join(dirName), manifest))
// validate metadata with JSON Schema
cmd.Command = []string{dockerApp, "validate", testAppName}
icmd.RunCmd(cmd).Assert(t, icmd.Success)
// test single-file init
cmd.Command = []string{dockerApp,
"init", "tac",
"-c", tmpDir.Join(internal.ComposeFileName),
"-d", "my cool app",
"-m", "bob",
"-m", "joe:<EMAIL>",
"-s",
}
icmd.RunCmd(cmd).Assert(t, icmd.Success)
appData, err := ioutil.ReadFile(tmpDir.Join("tac.dockerapp"))
assert.NilError(t, err)
golden.Assert(t, string(appData), "init-singlefile.dockerapp")
// Check various commands work on single-file app package
cmd.Command = []string{dockerApp, "inspect", "tac"}
icmd.RunCmd(cmd).Assert(t, icmd.Success)
cmd.Command = []string{dockerApp, "render", "tac"}
icmd.RunCmd(cmd).Assert(t, icmd.Success)
}
func TestDetectApp(t *testing.T) {
// cwd = e2e
dir := fs.NewDir(t, "detect-app-binary",
fs.WithDir("attachments.dockerapp", fs.FromDir("testdata/attachments.dockerapp")),
fs.WithDir("render",
fs.WithDir("app1.dockerapp", fs.FromDir("testdata/render/envvariables/my.dockerapp")),
fs.WithDir("app2.dockerapp", fs.FromDir("testdata/render/envvariables/my.dockerapp")),
),
)
defer dir.Remove()
icmd.RunCmd(icmd.Cmd{
Command: []string{dockerApp, "inspect"},
Dir: dir.Path(),
}).Assert(t, icmd.Success)
icmd.RunCmd(icmd.Cmd{
Command: []string{dockerApp, "inspect"},
Dir: dir.Join("attachments.dockerapp"),
}).Assert(t, icmd.Success)
icmd.RunCmd(icmd.Cmd{
Command: []string{dockerApp, "inspect", "."},
Dir: dir.Join("attachments.dockerapp"),
}).Assert(t, icmd.Success)
result := icmd.RunCmd(icmd.Cmd{
Command: []string{dockerApp, "inspect"},
Dir: dir.Join("render"),
})
result.Assert(t, icmd.Expected{
ExitCode: 1,
Err: "Error: multiple applications found in current directory, specify the application name on the command line",
})
}
func TestSplitMerge(t *testing.T) {
tmpDir := fs.NewDir(t, "split_merge")
defer tmpDir.Remove()
icmd.RunCommand(dockerApp, "merge", "testdata/render/envvariables/my.dockerapp", "-o", tmpDir.Join("remerged.dockerapp")).Assert(t, icmd.Success)
cmd := icmd.Cmd{Dir: tmpDir.Path()}
// test that inspect works on single-file
cmd.Command = []string{dockerApp, "inspect", "remerged"}
result := icmd.RunCmd(cmd).Assert(t, icmd.Success)
golden.Assert(t, result.Combined(), "envvariables-inspect.golden")
// split it
cmd.Command = []string{dockerApp, "split", "remerged", "-o", "split.dockerapp"}
icmd.RunCmd(cmd).Assert(t, icmd.Success)
cmd.Command = []string{dockerApp, "inspect", "remerged"}
result = icmd.RunCmd(cmd).Assert(t, icmd.Success)
golden.Assert(t, result.Combined(), "envvariables-inspect.golden")
// test inplace
cmd.Command = []string{dockerApp, "merge", "split"}
icmd.RunCmd(cmd).Assert(t, icmd.Success)
cmd.Command = []string{dockerApp, "split", "split"}
icmd.RunCmd(cmd).Assert(t, icmd.Success)
}
func TestBundle(t *testing.T) {
tmpDir := fs.NewDir(t, t.Name())
defer tmpDir.Remove()
// Using a custom DOCKER_CONFIG to store contexts in a temporary directory
cmd := icmd.Cmd{Env: append(os.Environ(), "DOCKER_CONFIG="+tmpDir.Path())}
// Running a docker in docker to bundle the application
dind := NewContainer("docker:18.09-dind", 2375)
dind.Start(t)
defer dind.Stop(t)
// Create a build context
cmd.Command = []string{dockerCli, "context", "create", "build-context", "--docker", fmt.Sprintf(`"host=tcp://%s"`, dind.GetAddress(t))}
icmd.RunCmd(cmd).Assert(t, icmd.Success)
// The dind doesn't have the cnab-app-base image so we save it in order to load it later
cmd.Command = []string{dockerCli, "save", fmt.Sprintf("docker/cnab-app-base:%s", internal.Version), "-o", tmpDir.Join("cnab-app-base.tar.gz")}
icmd.RunCmd(cmd).Assert(t, icmd.Success)
cmd.Env = append(cmd.Env, "DOCKER_CONTEXT=build-context")
cmd.Command = []string{dockerCli, "load", "-i", tmpDir.Join("cnab-app-base.tar.gz")}
icmd.RunCmd(cmd).Assert(t, icmd.Success)
// Bundle the docker application package to a CNAB bundle, using the build-context.
cmd.Command = []string{dockerApp, "bundle", filepath.Join("testdata", "simple", "simple.dockerapp"), "--out", tmpDir.Join("bundle.json")}
icmd.RunCmd(cmd).Assert(t, icmd.Success)
// Check the resulting CNAB bundle.json
golden.Assert(t, string(golden.Get(t, tmpDir.Join("bundle.json"))), "simple-bundle.json.golden")
// List the images on the build context daemon and checks the invocation image is there
cmd.Command = []string{dockerCli, "image", "ls", "--format", "{{.Repository}}:{{.Tag}}"}
icmd.RunCmd(cmd).Assert(t, icmd.Expected{ExitCode: 0, Out: "simple:1.1.0-beta1-invoc"})
// Copy all the files from the invocation image and check them
cmd.Command = []string{dockerCli, "create", "--name", "invocation", "simple:1.1.0-beta1-invoc"}
id := strings.TrimSpace(icmd.RunCmd(cmd).Assert(t, icmd.Success).Stdout())
cmd.Command = []string{dockerCli, "cp", "invocation:/cnab/app/simple.dockerapp", tmpDir.Join("simple.dockerapp")}
icmd.RunCmd(cmd).Assert(t, icmd.Success)
cmd.Command = []string{dockerCli, "rm", "--force", id}
icmd.RunCmd(cmd).Assert(t, icmd.Success)
appDir := filepath.Join("testdata", "simple", "simple.dockerapp")
manifest := fs.Expected(
t,
fs.WithMode(0755),
fs.WithFile(internal.MetadataFileName, readFile(t, filepath.Join(appDir, internal.MetadataFileName)), fs.WithMode(0644)),
fs.WithFile(internal.ComposeFileName, readFile(t, filepath.Join(appDir, internal.ComposeFileName)), fs.WithMode(0644)),
fs.WithFile(internal.ParametersFileName, readFile(t, filepath.Join(appDir, internal.ParametersFileName)), fs.WithMode(0644)),
)
assert.Assert(t, fs.Equal(tmpDir.Join("simple.dockerapp"), manifest))
}
func TestDockerAppLifecycle(t *testing.T) {
tmpDir := fs.NewDir(t, t.Name())
defer tmpDir.Remove()
cmd := icmd.Cmd{
Env: append(os.Environ(),
fmt.Sprintf("DUFFLE_HOME=%s", tmpDir.Path()),
fmt.Sprintf("DOCKER_CONFIG=%s", tmpDir.Path()),
"DOCKER_TARGET_CONTEXT=swarm-target-context",
),
}
// Running a swarm using docker in docker to install the application
// and run the invocation image
swarm := NewContainer("docker:18.09-dind", 2375)
swarm.Start(t)
defer swarm.Stop(t)
// The dind doesn't have the cnab-app-base image so we save it in order to load it later
icmd.RunCommand(dockerCli, "save", fmt.Sprintf("docker/cnab-app-base:%s", internal.Version), "-o", tmpDir.Join("cnab-app-base.tar.gz")).Assert(t, icmd.Success)
// We need two contexts:
// - one for `docker` so that it connects to the dind swarm created before
// - the target context for the invocation image to install within the swarm
cmd.Command = []string{dockerCli, "context", "create", "swarm-context", "--docker", fmt.Sprintf(`"host=tcp://%s"`, swarm.GetAddress(t)), "--default-stack-orchestrator", "swarm"}
icmd.RunCmd(cmd).Assert(t, icmd.Success)
// When creating a context on a Windows host we cannot use
// the unix socket but it's needed inside the invocation image.
// The workaround is to create a context with an empty host.
// This host will default to the unix socket inside the
// invocation image
cmd.Command = []string{dockerCli, "context", "create", "swarm-target-context", "--docker", "host=", "--default-stack-orchestrator", "swarm"}
icmd.RunCmd(cmd).Assert(t, icmd.Success)
// Initialize the swarm
cmd.Env = append(cmd.Env, "DOCKER_CONTEXT=swarm-context")
cmd.Command = []string{dockerCli, "swarm", "init"}
icmd.RunCmd(cmd).Assert(t, icmd.Success)
// Load the needed base cnab image into the swarm docker engine
cmd.Command = []string{dockerCli, "load", "-i", tmpDir.Join("cnab-app-base.tar.gz")}
icmd.RunCmd(cmd).Assert(t, icmd.Success)
// Install a Docker Application Package
cmd.Command = []string{dockerApp, "install", "testdata/simple/simple.dockerapp", "--name", t.Name()}
checkContains(t, icmd.RunCmd(cmd).Assert(t, icmd.Success).Combined(),
[]string{
fmt.Sprintf("Creating network %s_back", t.Name()),
fmt.Sprintf("Creating network %s_front", t.Name()),
fmt.Sprintf("Creating service %s_db", t.Name()),
fmt.Sprintf("Creating service %s_api", t.Name()),
fmt.Sprintf("Creating service %s_web", t.Name()),
})
// Query the application status
cmd.Command = []string{dockerApp, "status", t.Name()}
checkContains(t, icmd.RunCmd(cmd).Assert(t, icmd.Success).Combined(),
[]string{
fmt.Sprintf("[[:alnum:]]+ %s_db replicated [0-1]/1 postgres:9.3", t.Name()),
fmt.Sprintf(`[[:alnum:]]+ %s_web replicated [0-1]/1 nginx:latest \*:8082->80/tcp`, t.Name()),
fmt.Sprintf("[[:alnum:]]+ %s_api replicated [0-1]/1 python:3.6", t.Name()),
})
// Upgrade the application, changing the port
cmd.Command = []string{dockerApp, "upgrade", t.Name(), "--set", "web_port=8081"}
checkContains(t, icmd.RunCmd(cmd).Assert(t, icmd.Success).Combined(),
[]string{
fmt.Sprintf("Updating service %s_db", t.Name()),
fmt.Sprintf("Updating service %s_api", t.Name()),
fmt.Sprintf("Updating service %s_web", t.Name()),
})
// Query the application status again, the port should have change
cmd.Command = []string{dockerApp, "status", t.Name()}
icmd.RunCmd(cmd).Assert(t, icmd.Expected{ExitCode: 0, Out: "8081"})
// Uninstall the application
cmd.Command = []string{dockerApp, "uninstall", t.Name()}
checkContains(t, icmd.RunCmd(cmd).Assert(t, icmd.Success).Combined(),
[]string{
fmt.Sprintf("Removing service %s_api", t.Name()),
fmt.Sprintf("Removing service %s_db", t.Name()),
fmt.Sprintf("Removing service %s_web", t.Name()),
fmt.Sprintf("Removing network %s_front", t.Name()),
fmt.Sprintf("Removing network %s_back", t.Name()),
})
}
func checkContains(t *testing.T, combined string, expectedLines []string) {
for _, expected := range expectedLines {
exp := regexp.MustCompile(expected)
assert.Assert(t, exp.MatchString(combined), expected, combined)
}
}
|
import React from 'react';
import ReactDOM from 'react-dom';
import './style.less';
import './style2.css';
class Yjw extends React.Component{
componentDidMount() {
console.log('------yjw-------');
}
render() {
return (<div>
<div className='content'>Hello YJW.</div>
<div className="content2">hah</div>
</div>);
}
}
ReactDOM.render(
<Yjw />,
document.getElementById('root')
); |
package org.dimdev.rift.mixin.hook.client;
import net.minecraft.client.renderer.block.model.ModelBakery;
import net.minecraft.util.ResourceLocation;
import org.dimdev.rift.listener.client.TextureAdder;
import org.dimdev.riftloader.RiftLoader;
import org.spongepowered.asm.mixin.Final;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Shadow;
import java.util.Set;
@Mixin(ModelBakery.class)
public class MixinModelBakery {
@Shadow @Final private static Set<ResourceLocation> LOCATIONS_BUILTIN_TEXTURES;
static {
for (TextureAdder textureAdder : RiftLoader.instance.getListeners(TextureAdder.class)) {
LOCATIONS_BUILTIN_TEXTURES.addAll(textureAdder.getBuiltinTextures());
}
}
}
|
#!/usr/bin/env bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
set -euo pipefail
MY_DIR="$(cd "$(dirname "$0")" && pwd)"
pushd "${MY_DIR}" &>/dev/null || exit 1
echo
echo "Working in ${MY_DIR} folder"
echo
if [[ -f /.dockerenv ]]; then
# This script can be run both - in container and outside of it.
# Here we are inside the container which means that we should (when the host is Linux)
# fix permissions of the _build and _api folders via sudo.
# Those files are mounted from the host via docs folder and we might not have permissions to
# write to those directories (and remove the _api folder).
# We know we have sudo capabilities inside the container.
echo "Creating the _build and _api folders in case they do not exist"
sudo mkdir -pv _build
sudo mkdir -pv _api
echo "Created the _build and _api folders in case they do not exist"
else
# We are outside the container so we simply make sure that the directories exist
echo "Creating the _build and _api folders in case they do not exist"
mkdir -pv _build
mkdir -pv _api
echo "Creating the _build and _api folders in case they do not exist"
fi
echo "Removing content of the _build and _api folders"
rm -rf _build/*
rm -rf _api/*
echo "Removed content of the _build and _api folders"
set +e
# shellcheck disable=SC2063
NUM_INCORRECT_USE_LITERALINCLUDE=$(grep -inR --include \*.rst 'literalinclude::.\+example_dags' . | \
tee /dev/tty |
wc -l |\
tr -d '[:space:]')
set -e
echo
echo "Checking for presence of literalinclude in example DAGs"
echo
if [[ "${NUM_INCORRECT_USE_LITERALINCLUDE}" -ne "0" ]]; then
echo
echo "Unexpected problems found in the documentation. "
echo "You should use a exampleinclude directive to include example DAGs."
echo "Currently, ${NUM_INCORRECT_USE_LITERALINCLUDE} problem found."
echo
exit 1
else
echo
echo "No literalincludes in example DAGs found"
echo
fi
echo
echo "Checking the status of the operators-and-hooks-ref.rst file."
echo
mapfile -t DEPRECATED_MODULES < <(grep -R -i -l 'This module is deprecated.' ../airflow --include '*.py' | \
cut -d "/" -f 2- | \
sort | \
uniq | \
cut -d "." -f 1 | \
sed "s#/#.#g")
IGNORED_MISSING_MODULES=('airflow.gcp.hooks.base')
mapfile -t ALL_MODULES < <(find ../airflow/{,gcp/,contrib/,provider/*/*/}{operators,sensors,hooks} -name "*.py" | \
grep -v "__init__" | \
grep -v "__pycache__" | \
cut -d "/" -f 2- | \
cut -d "." -f 1 | \
sed "s#/#.#g" | \
sort | \
uniq | \
grep -vf <(printf '%s\n' "${DEPRECATED_MODULES[@]}") |\
grep -vf <(printf '%s\n' "${IGNORED_MISSING_MODULES[@]}"))
# shellcheck disable=SC2002
mapfile -t CURRENT_MODULES < <(cat operators-and-hooks-ref.rst | \
grep ":mod:" | \
cut -d '`' -f 2 | \
sort | \
uniq | \
grep -v "__pycache__")
mapfile -t MISSING_MODULES < \
<(\
comm -2 -3 \
<(printf '%s\n' "${ALL_MODULES[@]}" | sort ) \
<(printf '%s\n' "${CURRENT_MODULES[@]}" | sort)
)
if [[ "${#MISSING_MODULES[@]}" -ne "0" ]]; then
echo
echo "Unexpected problems found in the documentation."
echo "You should try to keep the list of operators and hooks up to date."
echo
echo "Missing modules:"
printf '%s\n' "${MISSING_MODULES[@]}"
echo
echo "Please add this module to operators-and-hooks-ref.rst file."
echo
exit 1
else
echo
echo "The operators-and-hooks-ref.rst file seems to be in good condition."
echo
fi
SUCCEED_LINE=$(make html |\
tee /dev/tty |\
grep 'build succeeded' |\
head -1)
NUM_CURRENT_WARNINGS=$(echo "${SUCCEED_LINE}" |\
sed -E 's/build succeeded, ([0-9]+) warnings?\./\1/g')
if [[ -f /.dockerenv ]]; then
# We are inside the container which means that we should fix back the permissions of the
# _build and _api folder files, so that they can be accessed by the host user
# The _api folder should be deleted by then but just in case we should change the ownership
echo "Changing ownership of docs/_build folder back to ${HOST_USER_ID}:${HOST_GROUP_ID}"
sudo chown "${HOST_USER_ID}":"${HOST_GROUP_ID}" _build
if [[ -d _api ]]; then
sudo chown "${HOST_USER_ID}":"${HOST_GROUP_ID}" _api
fi
echo "Changed ownership of docs/_build folder back to ${HOST_USER_ID}:${HOST_GROUP_ID}"
fi
if echo "${SUCCEED_LINE}" | grep -q "warning"; then
echo
echo "Unexpected problems found in the documentation. "
echo "Currently, ${NUM_CURRENT_WARNINGS} warnings found. "
echo
exit 1
fi
popd &>/dev/null || exit 1
|
import java.util.concurrent.ForkJoinPool;
public class Problem
{
public static void schedule(Runnable func, long delay)
{
long doNotRunBefore = System.currentTimeMillis() + delay;
ForkJoinPool.commonPool().execute(() -> {
do
{
try
{
Thread.sleep(doNotRunBefore - System.currentTimeMillis());
} catch (Exception e) {}
}
while (System.currentTimeMillis() < doNotRunBefore);
func.run();
});
}
} |
#!/bin/bash
#SBATCH --account=def-dkulic
#SBATCH --cpus-per-task=2 #Maximum of CPU cores per GPU request: 6 on Cedar, 16 on Graham.
#SBATCH --mem=8000M # memory per node
#SBATCH --time=0-01:00 # time (DD-HH:MM)
#SBATCH --output=./draw_interest_area_ffmpge_job_script_output/Camera1_Sep_19_1500_1600_Prescribed_Behavior_%N-%j.out # %N for node name, %j for jobID
## Main processing command
ffmpeg -i /project/6001934/lingheng/ROM_Video_Process/ROM_raw_videos/Sep_19/Camera1_Sep_19_1500_1600_Prescribed_Behavior.mp4 -i /project/6001934/lingheng/ROM_Video_Process/Openpose_Video_Analysis_Code/camera1_transparent_img.png -filter_complex "[0:v][1:v] overlay=0:0" -c:a copy /project/6001934/lingheng/ROM_Video_Process/ROM_raw_videos_with_interst_area_ffmpeg_new3/Sep_19/Camera1_Sep_19_1500_1600_Prescribed_Behavior_ffmpeg_with_interest_area.mp4 |
#!/bin/bash
##
## Copyright 2019 International Business Machines
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
#
# Simple tests for example snap actions.
#
verbose=0
snap_card=0
iteration=1
FUNC="./actions/hdl_example/sw/snap_example_nvme"
CONF="./software/tools/snap_nvme_init"
function test () # $1 = card, $2 = drive
{
local card=$1
local drive=$2
echo "Testing in Polling mode"
for b in 1 32 128 512 ; do
cmd="$FUNC -C $card -d $drive -b $b -v"
eval ${cmd}
if [ $? -ne 0 ]; then
echo "cmd: $cmd"
echo "failed"
exit 1
fi
done
echo "Testing in IRQ mode"
for b in 512 1024 ; do
echo "Using IRQ mode"
cmd="$FUNC -C $card -d $drive -b $b -i"
eval ${cmd}
if [ $? -ne 0 ]; then
echo "cmd: $cmd"
echo "failed"
exit 1
fi
done
}
function usage() {
echo "SNAP Example Action 10140000 NVME drive 0 and 1 Test"
echo "Usage:"
echo " $PROGRAM"
echo " [-C <card>] Snap Card to be used for the test (default 0)"
echo " [-t <trace_level>]"
echo " [-i <iteration>]"
}
#
# main starts here
#
PROGRAM=$0
while getopts "C:t:i:h" opt; do
case $opt in
C)
snap_card=$OPTARG;
;;
t)
SNAP_TRACE=$OPTARG;
;;
i)
iteration=$OPTARG;
;;
h)
usage;
exit 0;
;;
\?)
echo "Invalid option: -$OPTARG" >&2
;;
esac
done
# Get Card Name
echo -n "Detect Card[$snap_card] .... "
CARD=`./software/tools/snap_maint -C $snap_card -m 4`
if [ -z $CARD ]; then
echo "ERROR: Invalid Card."
exit 1
fi
case $CARD in
"AD8K5" )
echo "-> AlphaData $CARD Card"
;;
"S121B" )
echo "-> Semptian $CARD Card"
;;
"ADKU3" )
echo "-> AlphaData $CARD Card"
;;
"N250S" )
echo "-> Nallatech $CARD Card"
;;
"N250SP" )
echo "-> Nallatech $CARD Card"
;;
esac;
# Get if NVME is enabled
NVME=`./software/tools/snap_maint -C $snap_card -m 2`
if [ -z $NVME ]; then
echo "Skip Test: No NVME configured for $CARD[$snap_card]"
exit 0
fi
echo "Configure NVME for drive 0 and 1 for $CARD[$snap_card]"
cmd="$CONF --card $snap_card -v"
eval ${cmd}
if [ $? -ne 0 ]; then
echo "cmd: $cmd"
echo "failed"
exit 1
fi
for ((iter=1;iter <= iteration;iter++))
{
drive=0
echo "Iteration $iter of $iteration Drive SSD$drive"
test $snap_card $drive
drive=1
echo "Iteration $iter of $iteration Drive SSD$drive"
test $snap_card $drive
}
exit 0
|
var aboutTypeOf = assertAbout.createPlugin("url");
aboutTypeOf.defineAssertion("is", function(about, topic, type) {
about.string(topic)
.contains("http")
.greaterThan(6)
(function(resolve, reject) {
request(topic)
.then(function() {
about.number(duration)
.lowerThan(500)
.assert().then(resolve)
})
.catch(reject);
})
.assert()
}); |
#!/bin/sh
set -e
set -u
set -o pipefail
function on_error {
echo "$(realpath -mq "${0}"):$1: error: Unexpected failure"
}
trap 'on_error $LINENO' ERR
if [ -z ${UNLOCALIZED_RESOURCES_FOLDER_PATH+x} ]; then
# If UNLOCALIZED_RESOURCES_FOLDER_PATH is not set, then there's nowhere for us to copy
# resources to, so exit 0 (signalling the script phase was successful).
exit 0
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt
> "$RESOURCES_TO_COPY"
XCASSET_FILES=()
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
case "${TARGETED_DEVICE_FAMILY:-}" in
1,2)
TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone"
;;
1)
TARGET_DEVICE_ARGS="--target-device iphone"
;;
2)
TARGET_DEVICE_ARGS="--target-device ipad"
;;
3)
TARGET_DEVICE_ARGS="--target-device tv"
;;
4)
TARGET_DEVICE_ARGS="--target-device watch"
;;
*)
TARGET_DEVICE_ARGS="--target-device mac"
;;
esac
install_resource()
{
if [[ "$1" = /* ]] ; then
RESOURCE_PATH="$1"
else
RESOURCE_PATH="${PODS_ROOT}/$1"
fi
if [[ ! -e "$RESOURCE_PATH" ]] ; then
cat << EOM
error: Resource "$RESOURCE_PATH" not found. Run 'pod install' to update the copy resources script.
EOM
exit 1
fi
case $RESOURCE_PATH in
*.storyboard)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.xib)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}" || true
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.framework)
echo "mkdir -p ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
mkdir -p "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" $RESOURCE_PATH ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}" || true
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
;;
*.xcdatamodel)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH"`.mom\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodel`.mom"
;;
*.xcdatamodeld)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd\"" || true
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd"
;;
*.xcmappingmodel)
echo "xcrun mapc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm\"" || true
xcrun mapc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm"
;;
*.xcassets)
ABSOLUTE_XCASSET_FILE="$RESOURCE_PATH"
XCASSET_FILES+=("$ABSOLUTE_XCASSET_FILE")
;;
*)
echo "$RESOURCE_PATH" || true
echo "$RESOURCE_PATH" >> "$RESOURCES_TO_COPY"
;;
esac
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/Parse/Parse.bundle"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_resource "${PODS_CONFIGURATION_BUILD_DIR}/Parse/Parse.bundle"
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
if [[ "${ACTION}" == "install" ]] && [[ "${SKIP_INSTALL}" == "NO" ]]; then
mkdir -p "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
rm -f "$RESOURCES_TO_COPY"
if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ -n "${XCASSET_FILES:-}" ]
then
# Find all other xcassets (this unfortunately includes those of path pods and other targets).
OTHER_XCASSETS=$(find "$PWD" -iname "*.xcassets" -type d)
while read line; do
if [[ $line != "${PODS_ROOT}*" ]]; then
XCASSET_FILES+=("$line")
fi
done <<<"$OTHER_XCASSETS"
if [ -z ${ASSETCATALOG_COMPILER_APPICON_NAME+x} ]; then
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
else
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}" --app-icon "${ASSETCATALOG_COMPILER_APPICON_NAME}" --output-partial-info-plist "${TARGET_TEMP_DIR}/assetcatalog_generated_info_cocoapods.plist"
fi
fi
|
#!/bin/bash
echo ""
echo "Applying migration UseDifferentService"
echo "Adding routes to conf/app.routes"
echo "" >> ../conf/app.routes
echo "GET /:mrn/useDifferentService controllers.UseDifferentServiceController.onPageLoad(mrn: MovementReferenceNumber)" >> ../conf/app.routes
echo "Adding messages to conf.messages"
echo "" >> ../conf/messages.en
echo "useDifferentService.title = useDifferentService" >> ../conf/messages.en
echo "useDifferentService.heading = useDifferentService" >> ../conf/messages.en
echo "Migration UseDifferentService completed"
|
class CloudStorageException extends \Exception
{
}
class CloudStorage
{
private const MAX_STORAGE_LIMIT = 100; // in MB
public function uploadFile($fileName, $fileSize)
{
if ($fileSize > self::MAX_STORAGE_LIMIT) {
throw new CloudStorageException("File size exceeds the maximum storage limit");
} else {
return "File uploaded successfully";
}
}
}
// Usage
$cloudStorage = new CloudStorage();
try {
echo $cloudStorage->uploadFile("example.txt", 120); // Throws CloudStorageException
} catch (CloudStorageException $e) {
echo "Error: " . $e->getMessage();
}
// Output: Error: File size exceeds the maximum storage limit |
package mastermind.views.console;
import mastermind.controllers.Controller;
import mastermind.views.MessageView;
import santaTecla.utils.WithConsoleView;
class SecretCombinationView extends WithConsoleView {
private Controller controller;
SecretCombinationView(Controller controller) {
super();
this.controller = controller;
}
void writeln() {
for (int i = 0; i < this.controller.getWidth(); i++) {
this.console.write(MessageView.SECRET.getMessage());
}
this.console.writeln();
}
}
|
#!/usr/bin/env bash
koopa_uninstall_xorg_libxdmcp() {
koopa_uninstall_app \
--name='xorg-libxdmcp' \
"$@"
}
|
<gh_stars>0
// pages/webViewList/lookInformdetail/lookInformdetail.js
const api = require('../../../config/api.js');
Page({
/**
* 页面的初始数据
*/
data: {
url: ''
},
/**
* 生命周期函数--监听页面加载
*/
onLoad: function (options) {
console.log('页面传入的' + JSON.stringify(options), 'lookInformdetail');
let url
if (wx.getStorageSync("accesstoken")) {
url = api.webViewUrl + 'lookInfoVideo?type=xcx&videoId=' + options.videoId + '&accesstoken=' + wx.getStorageSync("accesstoken") + '&accountId=' + wx.getStorageSync("accountId")+'&infoId='+ options.infoId
} else {
if (options.videoId) {
url = api.webViewUrl + 'lookInfoVideo?type=xcx&videoId=' + options.videoId+'&infoId='+ options.infoId
} else {
url = api.webViewUrl + 'lookInfoVideo?type=xcx&infoId='+ options.infoId
}
}
this.setData({
url: url
}, () => {
console.log('访问的' + this.data.url, 'lookInfoVideo');
})
},
/**
* 生命周期函数--监听页面初次渲染完成
*/
onReady: function () {
},
/**
* 生命周期函数--监听页面显示
*/
onShow: function () {
},
/**
* 生命周期函数--监听页面隐藏
*/
onHide: function () {
},
/**
* 生命周期函数--监听页面卸载
*/
onUnload: function () {
},
/**
* 页面相关事件处理函数--监听用户下拉动作
*/
onPullDownRefresh: function () {
},
/**
* 页面上拉触底事件的处理函数
*/
onReachBottom: function () {
},
/**
* 用户点击右上角分享
*/
onShareAppMessage: function () {
return {
title: '加盟好餐饮,就找餐盟严选!',
}
}
}) |
VERSION='master' #Repository version
REPO='clarkwinkelmann/flarum-ext-who-read' #Repository name
LOCALE='resources/locale' #Locale folder
YAML1='en.yml' #Original yaml file
YAML2='clarkwinkelmann-who-read.yml' #Translated yaml file
TEMP_DIR=`mktemp -d`
WORK_DIR=`pwd`
GREEN='\033[0;32m'
RED='\033[0;31m'
NC='\033[0m'
if ! [ -x "$(command -v same-yaml)" ]; then
echo 'Error: same-yaml is not installed.' >&2
exit 1
fi
if [[ ! "$TEMP_DIR" || ! -d "$TEMP_DIR" ]]; then
exit 1
fi
function cleanup {
rm -rf "$TEMP_DIR"
}
cd "$TEMP_DIR"
curl -s -L "https://raw.githubusercontent.com/$REPO/$VERSION/$LOCALE/$YAML1" > $YAML1
RC=0
echo "Testing $YAML1 against $YAML2:"
same-yaml --ref "$YAML1" --tra "$WORK_DIR/locale/$YAML2"
if [ $YAML1 = $YAML2 ]
then
RC=1
printf "${RED}⨉ failed${NC}\n"
else
printf "${GREEN}✓ passed${NC}\n"
fi
trap cleanup EXIT
exit $RC
|
#!/usr/bin/env bash
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# Build the Python PIP installation package for TensorFlow and install
# the package.
#
# Usage:
# pip_new.sh
#
# Required step(s):
# Run configure.py prior to running this script.
#
# Required environment variable(s):
# CONTAINER_TYPE: (CPU | GPU)
# OS_TYPE: (UBUNTU | MACOS)
# TF_PYTHON_VERSION: (python2 | python2.7 | python3.5 | python3.7)
#
# Optional environment variables. If provided, overwrites any default values.
# TF_BUILD_FLAGS: Bazel build flags.
# e.g. TF_BUILD_FLAGS="--config=opt"
# TF_TEST_FLAGS: Bazel test flags.
# e.g. TF_TEST_FLAGS="--verbose_failures=true \
# --build_tests_only --test_output=errors"
# TF_TEST_FILTER_TAGS: Filtering tags for bazel tests. More specifically,
# input tags for `--test_filter_tags` flag.
# e.g. TF_TEST_FILTER_TAGS="no_pip,-nomac,no_oss"
# TF_TEST_TARGETS: Bazel test targets.
# e.g. TF_TEST_TARGETS="//tensorflow/contrib/... \
# //tensorflow/... \
# //tensorflow/python/..."
# TF_PIP_TESTS: PIP tests to run. If NOT specified, skips all tests.
# e.g. TF_PIP_TESTS="test_pip_virtualenv_clean \
# test_pip_virtualenv_clean \
# test_pip_virtualenv_oss_serial"
# IS_NIGHTLY: Nightly run flag.
# e.g. IS_NIGHTLY=1 # nightly runs
# e.g. IS_NIGHTLY=0 # non-nightly runs
# TF_PROJECT_NAME: Name of the project. This string will be pass onto
# the wheel file name. For nightly builds, it will be
# overwritten to 'tf_nightly'. For gpu builds, '_gpu'
# will be appended.
# e.g. TF_PROJECT_NAME="tensorflow"
# e.g. TF_PROJECT_NAME="tf_nightly_gpu"
# TF_PIP_TEST_ROOT: Root directory for building and testing pip pkgs.
# e.g. TF_PIP_TEST_ROOT="pip_test"
#
# To-be-deprecated variable(s).
# GIT_TAG_OVERRIDE: Values for `--git_tag_override`. This flag gets passed
# in as `--action_env` for bazel build and tests.
# TF_BUILD_INSTALL_EXTRA_PIP_PACKAGES
# Additonal pip packages to be installed.
# Caveat: pip version needs to be checked prior.
# set bash options
set -e
set -x
###########################################################################
# General helper function(s)
###########################################################################
# Strip leading and trailing whitespaces
str_strip () {
echo -e "$1" | sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//'
}
# Convert string to all lower case
lowercase() {
if [[ -z "${1}" ]]; then
die "Nothing to convert to lowercase. No argument given."
fi
echo "${1}" | tr '[:upper:]' '[:lower:]'
}
check_global_vars() {
# Check container type
if ! [[ ${CONTAINER_TYPE} == "cpu" ]] && \
! [[ ${CONTAINER_TYPE} == "rocm" ]] && \
! [[ ${CONTAINER_TYPE} == "gpu" ]]; then
die "Error: Provided CONTAINER_TYPE \"${CONTAINER_TYPE}\" "\
"is not supported."
fi
# Check OS type
if ! [[ ${OS_TYPE} == "ubuntu" ]] && \
! [[ ${OS_TYPE} == "macos" ]]; then
die"Error: Provided OS_TYPE \"${OS_TYPE}\" is not supported."
fi
}
add_test_filter_tag() {
EMPTY=""
while true; do
FILTER="${1:$EMPTY}"
if ! [[ $BAZEL_TEST_FILTER_TAGS == *"${FILTER}"* ]]; then
BAZEL_TEST_FILTER_TAGS="${FILTER},${BAZEL_TEST_FILTER_TAGS}"
fi
shift
if [[ -z "${1}" ]]; then
break
fi
done
}
remove_test_filter_tag() {
EMPTY=""
while true; do
FILTER="${1:$EMPTY}"
BAZEL_TEST_FILTER_TAGS="$(echo ${BAZEL_TEST_FILTER_TAGS} | sed -e 's/^'${FILTER}',//g' -e 's/,'${FILTER}'//g')"
shift
if [[ -z "${1}" ]]; then
break
fi
done
}
# Clean up bazel build & test flags with proper configuration.
update_bazel_flags() {
# Add git tag override flag if necessary.
GIT_TAG_STR=" --action_env=GIT_TAG_OVERRIDE"
if [[ -z "${GIT_TAG_OVERRIDE}" ]] && \
! [[ ${BAZEL_BUILD_FLAGS} = *${GIT_TAG_STR}* ]]; then
BAZEL_BUILD_FLAGS+="${GIT_TAG_STR}"
fi
# Clean up whitespaces
BAZEL_BUILD_FLAGS=$(str_strip "${BAZEL_BUILD_FLAGS}")
BAZEL_TEST_FLAGS=$(str_strip "${BAZEL_TEST_FLAGS}")
# Cleaned bazel flags
echo "Bazel build flags (cleaned):\n" "${BAZEL_BUILD_FLAGS}"
echo "Bazel test flags (cleaned):\n" "${BAZEL_TEST_FLAGS}"
}
update_test_filter_tags() {
# Add test filter tags
# This script is for validating built PIP packages. Add pip tags.
add_test_filter_tag -no_pip -nopip
# MacOS filter tags
if [[ ${OS_TYPE} == "macos" ]]; then
remove_test_filter_tag nomac no_mac
add_test_filter_tag -nomac -no_mac
fi
echo "Final test filter tags: ${BAZEL_TEST_FILTER_TAGS}"
}
# Check currently running python and pip version
check_python_pip_version() {
# Check if only the major version of python is provided by the user.
MAJOR_VER_ONLY=0
if [[ ${#PYTHON_VER} -lt 9 ]]; then
# User only provided major version (e.g. 'python2' instead of 'python2.7')
MAJOR_VER_ONLY=1
fi
# Retrieve only the version number of the user requested python.
PYTHON_VER_REQUESTED=${PYTHON_VER:6:3}
echo "PYTHON_VER_REQUESTED: ${PYTHON_VER_REQUESTED}"
# Retrieve only the version numbers of the python & pip in use currently.
PYTHON_VER_IN_USE=$(python --version 2>&1)
PYTHON_VER_IN_USE=${PYTHON_VER_IN_USE:7:3}
PIP_VER_IN_USE=$(pip --version)
PIP_VER_IN_USE=${PIP_VER_IN_USE:${#PIP_VER_IN_USE}-4:3}
# If only major versions are applied, drop minor versions.
if [[ $MAJOR_VER_ONLY == 1 ]]; then
PYTHON_VER_IN_USE=${PYTHON_VER_IN_USE:0:1}
PIP_VER_IN_USE=${PIP_VER_IN_USE:0:1}
fi
# Check if all versions match.
echo -e "User requested python version: '${PYTHON_VER_REQUESTED}'\n" \
"Detected python version in use: '${PYTHON_VER_IN_USE}'\n"\
"Detected pip version in use: '${PIP_VER_IN_USE}'"
if ! [[ $PYTHON_VER_REQUESTED == $PYTHON_VER_IN_USE ]]; then
die "Error: Mismatch in python versions detected."
else:
echo "Python and PIP versions in use match the requested."
fi
}
###########################################################################
# Setup: directories, local/global variables
###########################################################################
# Script directory and source necessary files.
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
source "${SCRIPT_DIR}/builds_common.sh"
# Required global variables
# Checks on values for these vars are done in "Build TF PIP Package" section.
CONTAINER_TYPE=$(lowercase "${CONTAINER_TYPE}")
OS_TYPE=$(lowercase "${OS_TYPE}")
PYTHON_VER=$(lowercase "${TF_PYTHON_VERSION}")
# Python bin path
if [[ -z "$PYTHON_BIN_PATH" ]]; then
die "Error: PYTHON_BIN_PATH was not provided. Did you run configure?"
fi
# Default values for optional global variables in case they are not user
# defined.
DEFAULT_BAZEL_BUILD_FLAGS='--config=opt'
DEFAULT_BAZEL_TEST_FLAGS='--test_output=errors --verbose_failures=true'
DEFAULT_BAZEL_TEST_FILTERS='-no_oss,-oss_serial'
DEFAULT_BAZEL_TEST_TARGETS='//tensorflow/python/... -//tensorflow/core/... -//tensorflow/compiler/... '
DEFAULT_PIP_TESTS="" # Do not run any tests by default
DEFAULT_IS_NIGHTLY=0 # Not nightly by default
DEFAULT_PROJECT_NAME="tensorflow"
DEFAULT_PIP_TEST_ROOT="pip_test"
# Take in optional global variables
BAZEL_BUILD_FLAGS=${TF_BUILD_FLAGS:-$DEFAULT_BAZEL_BUILD_FLAGS}
BAZEL_TEST_FLAGS=${TF_TEST_FLAGS:-$DEFAULT_BAZEL_TEST_FLAGS}
BAZEL_TEST_TARGETS=${TF_TEST_TARGETS:-$DEFAULT_BAZEL_TEST_TARGETS}
BAZEL_TEST_FILTER_TAGS=${TF_TEST_FILTER_TAGS:-$DEFAULT_BAZEL_TEST_FILTERS}
PIP_TESTS=${TF_PIP_TESTS:-$DEFAULT_PIP_TESTS}
IS_NIGHTLY=${IS_NIGHTLY:-$DEFAULT_IS_NIGHTLY}
PROJECT_NAME=${TF_PROJECT_NAME:-$DEFAULT_PROJECT_NAME}
PIP_TEST_ROOT=${TF_PIP_TEST_ROOT:-$DEFAULT_PIP_TEST_ROOT}
# Local variables
PIP_WHL_DIR="${KOKORO_ARTIFACTS_DIR}/tensorflow/${PIP_TEST_ROOT}/whl"
mkdir -p "${PIP_WHL_DIR}"
PIP_WHL_DIR=$(realpath "${PIP_WHL_DIR}") # Get absolute path
WHL_PATH=""
# Determine the major.minor versions of python being used (e.g., 2.7).
# Useful for determining the directory of the local pip installation.
PY_MAJOR_MINOR_VER=$(${PYTHON_BIN_PATH} -c "print(__import__('sys').version)" 2>&1 | awk '{ print $1 }' | head -n 1 | cut -c1-3)
if [[ -z "${PY_MAJOR_MINOR_VER}" ]]; then
die "ERROR: Unable to determine the major.minor version of Python."
fi
echo "Python binary path to be used in PIP install: ${PYTHON_BIN_PATH} "\
"(Major.Minor version: ${PY_MAJOR_MINOR_VER})"
PYTHON_BIN_PATH_INIT=${PYTHON_BIN_PATH}
PIP_BIN_PATH="$(which pip${PY_MAJOR_MINOR_VER})"
# PIP packages
INSTALL_EXTRA_PIP_PACKAGES=${TF_BUILD_INSTALL_EXTRA_PIP_PACKAGES}
###########################################################################
# Build TF PIP Package
###########################################################################
# First remove any already existing binaries for a clean start and test.
if [[ -d ${PIP_TEST_ROOT} ]]; then
echo "Test root directory ${PIP_TEST_ROOT} already exists. Deleting it."
sudo rm -rf ${PIP_TEST_ROOT}
fi
# Check that global variables are properly set.
check_global_vars
# Check if in a virtualenv and exit if yes.
IN_VENV=$(python -c 'import sys; print("1" if hasattr(sys, "real_prefix") else "0")')
if [[ "$IN_VENV" == "1" ]]; then
echo "It appears that we are already in a virtualenv. Deactivating..."
deactivate || source deactivate || die "FAILED: Unable to deactivate from existing virtualenv."
fi
# Configure python. Obtain the path to python binary.
source tools/python_bin_path.sh
# Assume PYTHON_BIN_PATH is exported by the script above.
if [[ -z "$PYTHON_BIN_PATH" ]]; then
die "PYTHON_BIN_PATH was not provided. Did you run configure?"
fi
# Bazel build the file.
PIP_BUILD_TARGET="//tensorflow/tools/pip_package:build_pip_package"
# Clean bazel cache.
bazel clean
# Clean up and update bazel flags
update_bazel_flags
# Build. This outputs the file `build_pip_package`.
bazel build ${BAZEL_BUILD_FLAGS} ${PIP_BUILD_TARGET} || \
die "Error: Bazel build failed for target: '${PIP_BUILD_TARGET}'"
###########################################################################
# Test function(s)
###########################################################################
test_pip_virtualenv_clean() {
# Create a clean directory.
CLEAN_VENV_DIR="${PIP_TEST_ROOT}/venv_clean"
# activate virtual environment and install tensorflow with PIP.
create_activate_virtualenv --clean "${CLEAN_VENV_DIR}"
# Install TF with pip
install_tensorflow_pip "${WHL_PATH}"
# cd to a temporary directory to avoid picking up Python files in the source
# tree.
TMP_DIR=$(mktemp -d)
pushd "${TMP_DIR}"
# Run a quick check on tensorflow installation.
RET_VAL=$(python -c "import tensorflow as tf; t1=tf.constant([1,2,3,4]); t2=tf.constant([5,6,7,8]); print(tf.add(t1,t2).shape)")
# Deactivate virtualenv.
deactivate || source deactivate || die "FAILED: Unable to deactivate from existing virtualenv."
# Return to original directory. Remove temp dirs.
popd
sudo rm -rf "${TMP_DIR}" "${CLEAN_VENV_DIR}"
# Check result to see if tensorflow is properly installed.
if [[ ${RET_VAL} == *'(4,)'* ]]; then
echo "PIP test on clean virtualenv PASSED."
return 0
else
echo "PIP test on clean virtualenv FAILED."
return 1
fi
}
test_pip_virtualenv_non_clean() {
# Create virtualenv directory for install test
VENV_DIR="${PIP_TEST_ROOT}/venv"
# Activate virtualenv
create_activate_virtualenv "${VENV_DIR}"
# Install TF with pip
install_tensorflow_pip "${WHL_PATH}"
# cd to a temporary directory to avoid picking up Python files in the source
# tree.
TMP_DIR=$(mktemp -d)
pushd "${TMP_DIR}"
# Run a quick check on tensorflow installation.
RET_VAL=$(python -c "import tensorflow as tf; t1=tf.constant([1,2,3,4]); t2=tf.constant([5,6,7,8]); print(tf.add(t1,t2).shape)")
# Return to original directory. Remove temp dirs.
popd
sudo rm -rf "${TMP_DIR}"
# Check result to see if tensorflow is properly installed.
if ! [[ ${RET_VAL} == *'(4,)'* ]]; then
echo "PIP test on virtualenv (non-clean) FAILED"
return 1
fi
# Install extra pip packages, if specified.
for PACKAGE in ${INSTALL_EXTRA_PIP_PACKAGES}; do
echo "Installing extra pip package required by test-on-install: ${PACKAGE}"
${PIP_BIN_PATH} install ${PACKAGE}
if [[ $? != 0 ]]; then
echo "${PIP_BIN_PATH} install ${PACKAGE} FAILED."
deactivate || source deactivate || die "FAILED: Unable to deactivate from existing virtualenv."
return 1
fi
done
# Run bazel test.
run_test_with_bazel
RESULT=$?
# Deactivate from virtualenv.
deactivate || source deactivate || die "FAILED: Unable to deactivate from existing virtualenv."
sudo rm -rf "${VENV_DIR}"
if [[ $RESULT -ne 0 ]]; then
echo "PIP test on virtualenv (non-clean) FAILED."
return 1
else
echo "PIP test on virtualenv (non-clean) PASSED."
return 0
fi
}
test_pip_virtualenv_oss_serial() {
# Create virtualenv directory
VENV_DIR="${PIP_TEST_ROOT}/venv"
create_activate_virtualenv "${VENV_DIR}"
run_test_with_bazel --oss_serial
RESULT=$?
# deactivate virtualenv
deactivate || source deactivate || die "FAILED: Unable to deactivate from existing virtualenv."
if [[ ${RESULT} -ne 0 ]]; then
echo "PIP test on virtualenv (oss-serial) FAILED."
return 1
else
echo "PIP test on virtualenv (oss-serial) PASSED."
return 0
fi
}
###########################################################################
# Test helper function(s)
###########################################################################
create_activate_virtualenv() {
VIRTUALENV_FLAGS="--system-site-packages"
if [[ "${1}" == "--clean" ]]; then
shift
fi
VIRTUALENV_DIR="${1}"
if [[ -d "${VIRTUALENV_DIR}" ]]; then
if sudo rm -rf "${VIRTUALENV_DIR}"
then
echo "Removed existing virtualenv directory: ${VIRTUALENV_DIR}"
else
die "Failed to remove existing virtualenv directory: ${VIRTUALENV_DIR}"
fi
fi
if mkdir -p "${VIRTUALENV_DIR}"
then
echo "Created virtualenv directory: ${VIRTUALENV_DIR}"
else
die "FAILED to create virtualenv directory: ${VIRTUALENV_DIR}"
fi
# Use the virtualenv from the default python version (i.e., python-virtualenv)
# to create the virtualenv directory for testing. Use the -p flag to specify
# the python version inside the to-be-created virtualenv directory.
${PYTHON_BIN_PATH_INIT} -m virtualenv -p ${PYTHON_BIN_PATH_INIT} ${VIRTUALENV_FLAGS} ${VIRTUALENV_DIR} || \
die "FAILED: Unable to create virtualenv"
source "${VIRTUALENV_DIR}/bin/activate" || \
die "FAILED: Unable to activate virtualenv in ${VIRTUALENV_DIR}"
# Update .tf_configure.bazelrc with venv python path for bazel test.
PYTHON_BIN_PATH="$(which python)"
yes "" | ./configure
}
install_tensorflow_pip() {
if [[ -z "${1}" ]]; then
die "Please provide a proper wheel file path."
fi
# Set path to pip.
PIP_BIN_PATH="$(which pip${PY_MAJOR_MINOR_VER})"
# Print python and pip bin paths
echo "PYTHON_BIN_PATH to be used to install the .whl: ${PYTHON_BIN_PATH}"
echo "PIP_BIN_PATH to be used to install the .whl: ${PIP_BIN_PATH}"
# Upgrade pip so it supports tags such as cp27mu, manylinux1 etc.
echo "Upgrade pip in virtualenv"
# NOTE: pip install --upgrade pip leads to a documented TLS issue for
# some versions in python
curl https://bootstrap.pypa.io/get-pip.py | ${PYTHON_BIN_PATH} || \
die "Error: pip install (get-pip.py) FAILED"
# Check that requested python version matches configured one.
check_python_pip_version
# Force upgrade of setuptools. This must happen before the pip install of the
# WHL_PATH, which pulls in absl-py, which uses install_requires notation
# introduced in setuptools >=20.5. The default version of setuptools is 5.5.1,
# which is too old for absl-py.
${PIP_BIN_PATH} install --upgrade setuptools==39.1.0 || \
die "Error: setuptools install, upgrade FAILED"
# Force tensorflow reinstallation. Otherwise it may not get installed from
# last build if it had the same version number as previous build.
PIP_FLAGS="--upgrade --force-reinstall"
${PIP_BIN_PATH} install -v ${PIP_FLAGS} ${WHL_PATH} || \
die "pip install (forcing to reinstall tensorflow) FAILED"
echo "Successfully installed pip package ${WHL_PATH}"
# Force downgrade of setuptools. This must happen after the pip install of the
# WHL_PATH, which ends up upgrading to the latest version of setuptools.
# Versions of setuptools >= 39.1.0 will cause tests to fail like this:
# ImportError: cannot import name py31compat
${PIP_BIN_PATH} install --upgrade setuptools==39.1.0 || \
die "Error: setuptools install, upgrade FAILED"
}
run_test_with_bazel() {
IS_OSS_SERIAL=0
if [[ "${1}" == "--oss_serial" ]]; then
IS_OSS_SERIAL=1
fi
TF_GPU_COUNT=${TF_GPU_COUNT:-4}
# PIP tests should have a "different" path. Different than the one we place
# virtualenv, because we are deleting and recreating it here.
PIP_TEST_PREFIX=bazel_pip
TEST_ROOT=$(pwd)/${PIP_TEST_PREFIX}
sudo rm -rf $TEST_ROOT
mkdir -p $TEST_ROOT
ln -s $(pwd)/tensorflow $TEST_ROOT/tensorflow
if [[ "${IS_OSS_SERIAL}" == "1" ]]; then
remove_test_filter_tag -no_oss
add_test_filter_tag oss_serial
else
add_test_filter_tag -oss_serial
fi
# Clean the bazel cache
bazel clean
# Clean up flags before running bazel commands
update_bazel_flags
# Clean up and update test filter tags
update_test_filter_tags
# Figure out how many concurrent tests we can run and do run the tests.
BAZEL_PARALLEL_TEST_FLAGS=""
if [[ $CONTAINER_TYPE == "gpu" ]]; then
# Number of test threads is the number of GPU cards available.
if [[ $OS_TYPE == "macos" ]]; then
BAZEL_PARALLEL_TEST_FLAGS="--local_test_jobs=1"
else
BAZEL_PARALLEL_TEST_FLAGS="--local_test_jobs=${TF_GPU_COUNT} \
--run_under=//tensorflow/tools/ci_build/gpu_build:parallel_gpu_execute"
fi
else
# Number of test threads is the number of physical CPUs.
if [[ $OS_TYPE == "macos" ]]; then
BAZEL_PARALLEL_TEST_FLAGS="--local_test_jobs=$(sysctl -n hw.ncpu)"
else
BAZEL_PARALLEL_TEST_FLAGS="--local_test_jobs=$(grep -c ^processor /proc/cpuinfo)"
fi
fi
if [[ ${IS_OSS_SERIAL} == 1 ]]; then
BAZEL_PARALLEL_TEST_FLAGS="--local_test_jobs=1"
fi
TEST_TARGETS_SYMLINK=""
for TARGET in ${BAZEL_TEST_TARGETS[@]}; do
TARGET_NEW=$(echo ${TARGET} | sed -e "s/\/\//\/\/${PIP_TEST_PREFIX}\//g")
TEST_TARGETS_SYMLINK+="${TARGET_NEW} "
done
echo "Test targets (symlink): ${TEST_TARGETS_SYMLINK}"
# Run the test.
bazel test --build_tests_only ${BAZEL_TEST_FLAGS} ${BAZEL_PARALLEL_TEST_FLAGS} --test_tag_filters=${BAZEL_TEST_FILTER_TAGS} -k -- ${TEST_TARGETS_SYMLINK}
unlink ${TEST_ROOT}/tensorflow
}
run_all_tests() {
if [[ -z "${PIP_TESTS}" ]]; then
echo "No test was specified to run. Skipping all tests."
return 0
fi
FAIL_COUNTER=0
PASS_COUNTER=0
for TEST in ${PIP_TESTS[@]}; do
# Run tests.
case "${TEST}" in
"test_pip_virtualenv_clean")
test_pip_virtualenv_clean
;;
"test_pip_virtualenv_non_clean")
test_pip_virtualenv_non_clean
;;
"test_pip_virtualenv_oss_serial")
test_pip_virtualenv_oss_serial
;;
*)
die "No matching test ${TEST} was found. Stopping test."
;;
esac
# Check and update the results.
RETVAL=$?
# Update results counter
if [ ${RETVAL} -eq 0 ]; then
echo "Test (${TEST}) PASSED. (PASS COUNTER: ${PASS_COUNTER})"
PASS_COUNTER=$(($PASS_COUNTER+1))
else
echo "Test (${TEST}) FAILED. (FAIL COUNTER: ${FAIL_COUNTER})"
FAIL_COUNTER=$(($FAIL_COUNTER+1))
fi
done
printf "${PASS_COUNTER} PASSED | ${FAIL_COUNTER} FAILED"
if [[ "${FAIL_COUNTER}" == "0" ]]; then
printf "PIP tests ${COLOR_GREEN}PASSED${COLOR_NC}\n"
return 0
else:
printf "PIP tests ${COLOR_RED}FAILED${COLOR_NC}\n"
return 1
fi
}
###########################################################################
# Build TF PIP Wheel file
###########################################################################
# Update the build flags for building whl.
# Flags: GPU, OS, tf_nightly, project name
GPU_FLAG=""
NIGHTLY_FLAG=""
# TF Nightly flag
if [[ "$IS_NIGHTLY" == 1 ]]; then
# If 'nightly' is not specified in the project name already, then add.
if ! [[ $PROJECT_NAME == *"nightly"* ]]; then
echo "WARNING: IS_NIGHTLY=${IS_NIGHTLY} but requested project name \
(PROJECT_NAME=${PROJECT_NAME}) does not include 'nightly' string. \
Renaming it to 'tf_nightly'."
PROJECT_NAME="tf_nightly"
fi
NIGHTLY_FLAG="--nightly_flag"
fi
# CPU / GPU flag
if [[ ${CONTAINER_TYPE} == "gpu" ]]; then
GPU_FLAG="--gpu"
if ! [[ $PROJECT_NAME == *"gpu"* ]]; then
echo "WARNING: GPU is specified but requested project name (PROJECT_NAME=${PROJECT_NAME}) \
does not include 'gpu'. Appending '_gpu' to the project name."
PROJECT_NAME="${PROJECT_NAME}_gpu"
fi
fi
./bazel-bin/tensorflow/tools/pip_package/build_pip_package ${PIP_WHL_DIR} ${GPU_FLAG} ${NIGHTLY_FLAG} "--project_name" ${PROJECT_NAME} || die "build_pip_package FAILED"
PY_DOTLESS_MAJOR_MINOR_VER=$(echo $PY_MAJOR_MINOR_VER | tr -d '.')
if [[ $PY_DOTLESS_MAJOR_MINOR_VER == "2" ]]; then
PY_DOTLESS_MAJOR_MINOR_VER="27"
fi
# Set wheel path and verify that there is only one .whl file in the path.
WHL_PATH=$(ls "${PIP_WHL_DIR}"/"${PROJECT_NAME}"-*"${PY_DOTLESS_MAJOR_MINOR_VER}"*"${PY_DOTLESS_MAJOR_MINOR_VER}"*.whl)
if [[ $(echo "${WHL_PATH}" | wc -w) -ne 1 ]]; then
echo "ERROR: Failed to find exactly one built TensorFlow .whl file in "\
"directory: ${PIP_WHL_DIR}"
fi
WHL_DIR=$(dirname "${WHL_PATH}")
WHL_BASE_NAME=$(basename "${WHL_PATH}")
AUDITED_WHL_NAME="${WHL_DIR}"/$(echo "${WHL_BASE_NAME//linux/manylinux1}")
# Print the size of the wheel file.
echo "Size of the PIP wheel file built: $(ls -l ${WHL_PATH} | awk '{print $5}')"
# Run tests (if any is specified).
run_all_tests
for WHL_PATH in $(ls ${PIP_WHL_DIR}/${PROJECT_NAME}*.whl); do
if [[ "${TF_NEED_CUDA}" -eq "1" ]]; then
# Copy and rename for gpu manylinux as we do not want auditwheel to package in libcudart.so
WHL_PATH=${AUDITED_WHL_NAME}
cp "${WHL_DIR}"/"${WHL_BASE_NAME}" "${WHL_PATH}"
echo "Copied manylinux1 wheel file at ${WHL_PATH}"
else
if [[ ${OS_TYPE} == "ubuntu" ]]; then
# Repair the wheels for cpu manylinux1
echo "auditwheel repairing ${WHL_PATH}"
pip show auditwheel
# If in virtualenv, re-pin auditwheel to version 1.5.0
if [ $(python -c 'import sys; print ("1" if hasattr(sys, "real_prefix") else "0")') == "1" ]; then
pip install auditwheel==1.5.0
fi
auditwheel --version
auditwheel repair -w "${WHL_DIR}" "${WHL_PATH}"
if [[ -f ${AUDITED_WHL_NAME} ]]; then
WHL_PATH=${AUDITED_WHL_NAME}
echo "Repaired manylinux1 wheel file at: ${WHL_PATH}"
else
die "WARNING: Cannot find repaired wheel."
fi
fi
fi
done
echo "EOF: Successfully ran pip_new.sh"
|
#include <chrono>
#include <iostream>
#include "clear_rendertarget.h"
int main(int argc, char *argv[])
{
try
{
cg::ClearRenderTarget *render = new cg::ClearRenderTarget(1920, 1080);
render->Clear();
render->Save("results/clear_rendertarget.png");
}
catch (std::exception &e)
{
std::cerr << e.what() << std::endl;
}
} |
#!/usr/bin/env sh
# dev-scp
# minify-js.sh
# Minify JavaScript files.
# Required args:
# source: path to the file to minify.
# global variables
SOURCE=${1:-""}
OUTPUT=$(echo ${SOURCE%.*}).min.js
if [ -f ${SOURCE} ]; then
curl -s -d compilation_level=SIMPLE_OPTIMIZATIONS -d output_format=text -d output_info=compiled_code --data-urlencode "js_code@${SOURCE}" http://closure-compiler.appspot.com/compile > ${OUTPUT}
else
exit
fi
|
# -*- coding:utf-8 -*-
import os,sys
import math
sys.path.insert(0, '../util')
from caffe_extractor import CaffeExtractor
from distance import get_distance
import numpy as np
import argparse
LFW_PAIRS = None
LFW_IMG_DIR = None
def parse_line(line):
splits = line.split()
# skip line
if len(splits) < 3:
return None
# name id1 id2
if len(splits) == 3:
return True, splits[0], splits[1], splits[0], splits[2]
# name1 id1 name2 id2
return False, splits[0], splits[1], splits[2], splits[3]
def find_threshold_sort(pos_list,neg_list):
pos_list = sorted(pos_list, key=lambda x: x[0])
neg_list = sorted(neg_list, key=lambda x: x[0], reverse=True)
pos_count = len(pos_list)
neg_count = len(neg_list)
correct = 0
threshold = 0
for i in range(min(pos_count, neg_count)):
if pos_list[i][0] > neg_list[i][0]:
correct = i
threshold = (pos_list[i][0] + neg_list[i][0])/2
break
precision = (correct * 2.0) / (pos_count + neg_count)
return precision, threshold
def get_accuracy(pos_list,neg_list,threshold):
pos_count = len(pos_list)
neg_count = len(neg_list)
correct = 0
for i in range(pos_count):
if pos_list[i][0] < threshold:
correct += 1
for i in range(neg_count):
if neg_list[i][0] > threshold:
correct += 1
precision = float(correct) / (pos_count + neg_count)
return precision
def best_threshold(pos_list, neg_list, thrNum = 10000):
ts = np.linspace(-1, 1, thrNum*2+1)
best_acc = 0
best_t = 0
for t in ts:
acc = get_accuracy(pos_list, neg_list, t)
if acc > best_acc:
best_acc = acc
best_t = t
return best_acc, best_t
def test_kfold(pos_list, neg_list, k = 10):
fold_size = len(pos_list)/k
sum_acc = 0
sum_thresh = 0
sum_n = 0
for i in range(k):
val_pos = []
val_neg = []
test_pos = []
test_neg = []
for j in range(len(pos_list)):
fi = j/fold_size
if fi != i:
val_pos.append(pos_list[j])
val_neg.append(neg_list[j])
else:
test_pos.append(pos_list[j])
test_neg.append(neg_list[j])
precision, threshold = find_threshold_sort(val_pos, val_neg)
accuracy = get_accuracy(test_pos, test_neg, threshold)
sum_acc += accuracy
sum_thresh += threshold
sum_n += 1
# verbose
print('precision:%.4f threshold:%f' % (accuracy, threshold))
return sum_acc/sum_n, sum_thresh/sum_n
def parse_pair_file(pair_path, prefix, feat_extractor,dist_func):
pair_list = []
# parse pairs
with open(pair_path, 'r') as f:
for line in f.readlines():
pair = parse_line(line)
if pair is not None:
pair_list.append(pair)
# print(pair)
print('#pairs:%d' % len(pair_list))
# compute feature
pos_list = []
neg_list = []
count = 0
features = []
for pair in pair_list:
count += 1
img_path1 = '%s/%s/%s_%04d.jpg' % (prefix, pair[1], pair[1], int(pair[2]))
img_path2 = '%s/%s/%s_%04d.jpg' % (prefix, pair[3], pair[3], int(pair[4]))
# skip invalid pairs
if not os.path.exists(img_path1) or not os.path.exists(img_path2):
continue
feat1 = feat_extractor.extract(img_path1)
feat2 = feat_extractor.extract(img_path2)
dist = dist_func(feat1, feat2)
if count % 100 == 1:
print('%4d dist:%.4f %s |1|:%.4f |2|:%.4f' % (count, dist, pair[0],
np.sqrt(np.sum(np.square(feat1))), np.sqrt(np.sum(np.square(feat2)))))
if pair[0]:
pos_list.append([dist, feat1, feat2, img_path1, img_path2])
else:
neg_list.append([dist, feat1, feat2, img_path1, img_path2])
features.append(feat1)
features.append(feat2)
# find best threshold()
#precision, threshold = best_threshold(pos_list, neg_list, 10000)
#return precision, threshold, pos_list, neg_list
precision, threshold = test_kfold(pos_list, neg_list)
return precision, threshold, pos_list, neg_list
def test_loss(extractor, weight, dist_type):
dist_func = get_distance(dist_type)
global LFW_PAIRS
global LFW_IMG_DIR
dir, path = os.path.split(weight)
fnames = os.listdir(dir)
fpattern = '%s.%s' % (path,dist_type)
existed = False
for fname in fnames:
if fname.startswith(fpattern):
existed = True
print('skip:%s ' % (weight))
return
print('test:%s ' % (weight))
# test
precision, threshold, pos_list, neg_list = parse_pair_file(LFW_PAIRS, LFW_IMG_DIR, extractor,dist_func)
print('precision on lfw:%.4f threshold:%f ' % (precision, threshold))
filename = '.%s.%.2f.txt' % (dist_type, precision*100)
# write result
with open(weight+filename,'w') as f:
f.write( 'precision on lfw:%.4f threshold:%f ' % (precision, threshold) )
def test_model(model, weight,dist_type='cosine',do_mirror=False, feat_layer='fc5'):
extractor = CaffeExtractor(model, weight,do_mirror=do_mirror, featLayer=feat_layer )
test_loss(extractor, weight, dist_type)
#test_loss(extractor, weight, 'SSD')
def test_dir(model_dir,dist_type='cosine',do_mirror=False):
filenames = os.listdir(model_dir)
for filename in filenames:
ext = os.path.splitext(filename)[1]
if ext != '.caffemodel':
continue
# test the model
model_path = os.path.join(model_dir, filename)
test_model(model_dir+'/deploy.prototxt',model_path,dist_type,do_mirror)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--lfw_dir", help="lfw image dir")
parser.add_argument("--lfw_pair", help="lfw pair file")
parser.add_argument("--model", help="model prototxt OR dir")
parser.add_argument("--weights", help="model weights")
parser.add_argument("--feat_layer", help="fc5")
parser.add_argument("-t", "--dist_type", default='cosine', help="distance measure ['cosine', 'L2', 'SSD']")
parser.add_argument("-f", "--do_mirror", default=False,help="mirror image and concatinate features")
args = parser.parse_args()
print(args)
LFW_PAIRS = args.lfw_pair
LFW_IMG_DIR = args.lfw_dir
if args.weights:
test_model(args.model,args.weights,args.dist_type,args.do_mirror)
else:
test_dir(args.model,args.dist_type,args.do_mirror)
|
<reponame>wjimenez5271/migration-tools
package app
import (
"flag"
"io/ioutil"
"os"
"path/filepath"
"testing"
"github.com/docker/libcompose/project"
"github.com/urfave/cli"
)
func TestProjectFactoryProjectNameIsNormalized(t *testing.T) {
projects := []struct {
name string
expected string
}{
{
name: "example",
expected: "example",
},
{
name: "example-test",
expected: "exampletest",
},
{
name: "aW3Ird_Project_with_$$",
expected: "aw3irdprojectwith",
},
}
tmpDir, err := ioutil.TempDir("", "project-factory-test")
if err != nil {
t.Fatal(err)
}
composeFile := filepath.Join(tmpDir, "docker-compose.yml")
ioutil.WriteFile(composeFile, []byte(`hello:
image: busybox`), 0700)
for _, projectCase := range projects {
globalSet := flag.NewFlagSet("test", 0)
// Set the project-name flag
globalSet.String("project-name", projectCase.name, "doc")
// Set the compose file flag
globalSet.Var(&cli.StringSlice{composeFile}, "file", "doc")
c := cli.NewContext(nil, globalSet, nil)
factory := &ProjectFactory{}
p, err := factory.Create(c)
if err != nil {
t.Fatal(err)
}
if p.(*project.Project).Name != projectCase.expected {
t.Fatalf("expected %s, got %s", projectCase.expected, p.(*project.Project).Name)
}
}
}
func TestProjectFactoryFileArgMayContainMultipleFiles(t *testing.T) {
sep := string(os.PathListSeparator)
fileCases := []struct {
requested []string
available []string
expected []string
}{
{
requested: []string{},
available: []string{"docker-compose.yml"},
expected: []string{"docker-compose.yml"},
},
{
requested: []string{},
available: []string{"docker-compose.yml", "docker-compose.override.yml"},
expected: []string{"docker-compose.yml", "docker-compose.override.yml"},
},
{
requested: []string{"one.yml"},
available: []string{"one.yml"},
expected: []string{"one.yml"},
},
{
requested: []string{"one.yml"},
available: []string{"docker-compose.yml", "one.yml"},
expected: []string{"one.yml"},
},
{
requested: []string{"one.yml", "two.yml", "three.yml"},
available: []string{"one.yml", "two.yml", "three.yml"},
expected: []string{"one.yml", "two.yml", "three.yml"},
},
{
requested: []string{"one.yml" + sep + "two.yml" + sep + "three.yml"},
available: []string{"one.yml", "two.yml", "three.yml"},
expected: []string{"one.yml", "two.yml", "three.yml"},
},
{
requested: []string{"one.yml" + sep + "two.yml", "three.yml" + sep + "four.yml"},
available: []string{"one.yml", "two.yml", "three.yml", "four.yml"},
expected: []string{"one.yml", "two.yml", "three.yml", "four.yml"},
},
{
requested: []string{"one.yml", "two.yml" + sep + "three.yml"},
available: []string{"one.yml", "two.yml", "three.yml"},
expected: []string{"one.yml", "two.yml", "three.yml"},
},
}
for _, fileCase := range fileCases {
tmpDir, err := ioutil.TempDir("", "project-factory-test")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(tmpDir)
if err = os.Chdir(tmpDir); err != nil {
t.Fatal(err)
}
for _, file := range fileCase.available {
ioutil.WriteFile(file, []byte(`hello:
image: busybox`), 0700)
}
globalSet := flag.NewFlagSet("test", 0)
// Set the project-name flag
globalSet.String("project-name", "example", "doc")
// Set the compose file flag
fcr := cli.StringSlice(fileCase.requested)
globalSet.Var(&fcr, "file", "doc")
c := cli.NewContext(nil, globalSet, nil)
factory := &ProjectFactory{}
p, err := factory.Create(c)
if err != nil {
t.Fatal(err)
}
for i, v := range p.(*project.Project).Files {
if v != fileCase.expected[i] {
t.Fatalf("requested %s, available %s, expected %s, got %s",
fileCase.requested, fileCase.available, fileCase.expected, p.(*project.Project).Files)
}
}
}
}
|
#!/bin/sh
# Build Docker Image
docker build -t infogail -f Dockerfile .
|
#!/bin/bash
python prepare_data.py --task avopix --images-dir images/avopix_mirrored_1024/ --format jpg --ratio 1 --shards-num 32 --max-images 705328 |
<reponame>lotosbin/binbin-reader-electron<gh_stars>1-10
/**
* Created by liubinbin on 02/09/2016.
*/
export interface ICallback<TError, TResult>{
(error: TError, result: TResult) : void
}
|
-- ***************************************************************************
-- File: 13_26.sql
--
-- Developed By TUSC
--
-- Disclaimer: Neither Osborne/McGraw-Hill, TUSC, nor the author warrant
-- that this source code is error-free. If any errors are
-- found in this source code, please report them to TUSC at
-- (630)960-2909 ext 1011 or <EMAIL>.
-- ***************************************************************************
SPOOL 13_26.lis
SET HEADING OFF
SET FEEDBACK OFF
SET PAGESIZE 0
SPOOL 13_26.log
SELECT 'GRANT EXECUTE ON ' || object_name || ' TO ' ||
UPPER('&user_role') || ';'
FROM user_objects
WHERE object_type IN ('PACKAGE','PROCEDURE','FUNCTION')
ORDER BY object_type, object_name;
SPOOL OFF
SPOOL OFF
|
<filename>src/main/java/io/github/rcarlosdasilva/weixin/model/notification/NotificationMeta.java<gh_stars>1-10
package io.github.rcarlosdasilva.weixin.model.notification;
import java.util.Date;
import com.thoughtworks.xstream.annotations.XStreamAlias;
import io.github.rcarlosdasilva.weixin.common.dictionary.NotificationInfoType;
import io.github.rcarlosdasilva.weixin.common.dictionary.NotificationMessageType;
public class NotificationMeta {
@XStreamAlias("AppId")
private String appId;
@XStreamAlias("ToUserName")
private String toUser;
@XStreamAlias("FromUserName")
private String fromUser;
@XStreamAlias("CreateTime")
private long time;
@XStreamAlias("MsgType")
private String messageType;
@XStreamAlias("InfoType")
private String infoType;
@XStreamAlias("Encrypt")
private String ciphertext;
/**
* appid.
* <p>
* 公众号平台的通知:appid = 公众号appid<br>
* 开放平台的通知:appid = 第三方appid
*
* @return appid
*/
public String getAppId() {
return appId;
}
public void setAppId(String appId) {
this.appId = appId;
}
/**
* 开发者微信号 (ToUserName).
*
* @return to user
*/
public String getToUser() {
return toUser;
}
/**
* 发送方帐号(一个OpenID)(FromUserName).
*
* @return from user
*/
public String getFromUser() {
return fromUser;
}
/**
* 消息创建时间 (整型)(CreateTime).
*
* @return time
*/
public Date getTime() {
return new Date(time);
}
/**
* 消息类型 (MsgType).
*
* @return {@link NotificationMessageType}
*/
public NotificationMessageType getMessageType() {
return NotificationMessageType.byValue(messageType);
}
/**
* 消息类型 (MsgType).
*
* @return {@link NotificationMessageType}
*/
public NotificationInfoType getInfoType() {
return NotificationInfoType.byValue(infoType);
}
/**
* 获取密文.
*
* @return 密文
*/
public String getCiphertext() {
return ciphertext;
}
}
|
ads_db = {}
def load_data(data):
global ads_db
ads_db = data
def add_advertisement(ad_id, ad_details):
global ads_db
ads_db[ad_id] = ad_details
def get_advertisement(ad_id):
global ads_db
if ad_id in ads_db:
return ads_db[ad_id]
else:
return "Advertisement not found" |
#!/bin/sh
if [ -n "$DESTDIR" ] ; then
case $DESTDIR in
/*) # ok
;;
*)
/bin/echo "DESTDIR argument must be absolute... "
/bin/echo "otherwise python's distutils will bork things."
exit 1
esac
DESTDIR_ARG="--root=$DESTDIR"
fi
echo_and_run() { echo "+ $@" ; "$@" ; }
echo_and_run cd "/home/xtark/ros_ws/src/third_packages/rbx1/rbx1_nav"
# ensure that Python install destination exists
echo_and_run mkdir -p "$DESTDIR/home/xtark/ros_ws/install/lib/python2.7/dist-packages"
# Note that PYTHONPATH is pulled from the environment to support installing
# into one location when some dependencies were installed in another
# location, #123.
echo_and_run /usr/bin/env \
PYTHONPATH="/home/xtark/ros_ws/install/lib/python2.7/dist-packages:/home/xtark/ros_ws/build/lib/python2.7/dist-packages:$PYTHONPATH" \
CATKIN_BINARY_DIR="/home/xtark/ros_ws/build" \
"/usr/bin/python" \
"/home/xtark/ros_ws/src/third_packages/rbx1/rbx1_nav/setup.py" \
build --build-base "/home/xtark/ros_ws/build/third_packages/rbx1/rbx1_nav" \
install \
$DESTDIR_ARG \
--install-layout=deb --prefix="/home/xtark/ros_ws/install" --install-scripts="/home/xtark/ros_ws/install/bin"
|
def replace_math_symbols(expression: str) -> str:
replacements = {
"→": "right",
"÷": "division",
"×": "multiplication",
"∑": "sum",
"λ": "lambda"
}
modified_expression = expression
for symbol, replacement in replacements.items():
modified_expression = modified_expression.replace(symbol, replacement)
return modified_expression |
import * as React from 'react';
import { Route as RouteType, NotebookHandle as NotebookHandleType, Notebook as NotebookType } from "../../types";
import Home, { Props as HomeProps } from "../Home";
import Notebook, { Props as NotebookProps } from "../Notebook";
interface Props {
route: RouteType;
notebooks?: NotebookHandleType[];
notebook?: NotebookType;
homeurl?: string;
}
export default function(props: Props) {
const { route } = props;
const NB = Notebook as any;
switch(route) {
case "home": return <Home {...(props as HomeProps)} />; break;
case "notebook": return <NB {...(props as NotebookProps)} />; break;
default: throw new Error("Unknown route:" + route);
}
} |
<html>
<head>
<script>
function submitForm() {
document.getElementById("myForm").submit();
}
</script>
</head>
<body>
<form id="myForm" action="">
<input type="text" name="field1" />
<input type="text" name="field2" />
<input type="button" onclick="submitForm()" value="Submit" />
</form>
</body>
</html> |
'''
Contains functions to help with logging.
'''
def get_fn_name(fn) -> str:
try:
return fn.__name__
except:
return 'None' |
from tempest.lib import decorators
from tempest import test
from tempest import config
import time
CONF = config.CONF
LOG = log.getLogger(__name__)
def log_test_execution_time(func):
def wrapper(*args, **kwargs):
start_time = time.time()
result = func(*args, **kwargs)
end_time = time.time()
total_time = end_time - start_time
LOG.info("Execution time of {}: {} seconds".format(func.__name__, total_time))
return result
return wrapper
class SampleTestClass(test.BaseTestCase):
@decorators.idempotent_id('random_id')
@log_test_execution_time
def sample_test_method(self):
# Your test method implementation goes here
pass |
#!/usr/bin/env bash
#
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Make sure all shell scripts:
# a.) explicitly opt out of locale dependence using "export LC_ALL=C", or
# b.) explicitly opt in to locale dependence using the annotation below.
export LC_ALL=C
EXIT_CODE=0
for SHELL_SCRIPT in $(git ls-files -- "*.sh" | grep -vE "src/(secp256k1|univalue)/"); do
if grep -q "# This script is intentionally locale dependent by not setting \"export LC_ALL=C\"" "${SHELL_SCRIPT}"; then
continue
fi
FIRST_NON_COMMENT_LINE=$(grep -vE '^(#.*|)$' "${SHELL_SCRIPT}" | head -1)
if [[ ${FIRST_NON_COMMENT_LINE} != "export LC_ALL=C" ]]; then
echo "Missing \"export LC_ALL=C\" (to avoid locale dependence) as first non-comment non-empty line in ${SHELL_SCRIPT}"
EXIT_CODE=1
fi
done
exit ${EXIT_CODE}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.