text stringlengths 1 1.05M |
|---|
<gh_stars>0
import React, { useState } from 'react';
import { Text, View, Button } from 'react-native';
import { useDispatch } from 'react-redux'
import styles from './ItemStyle';
import { EditModal } from '../editModal/EditModal';
import { setCity, weatherRequest } from '../../../action/index'
export const Item = ({ date, data }) => {
const dispatch = useDispatch();
const [isModal, setIsModal] = useState(false);
return (
<View style={styles.row} >
<Text style={styles.parametrs}>{ `Date: ${date.getDate()}.${date.getMonth()}.${date.getFullYear() }`}</Text>
<Text style={styles.parametrs}>{ `Time: ${date.getHours()} : ${date.getMinutes() }`}</Text>
<Text style={styles.parametrs}>{ `latitude:${ data.coordinates.lat } longitude:${ data.coordinates.lng }` }</Text>
<Text style={styles.parametrs}>{ `City: ${data.city}` }</Text>
<Button
title="Repeat this request"
onPress={() => {
dispatch(setCity(data.city));
dispatch(weatherRequest());
}}
/>
<Button
title="Check this request(History)"
onPress={() => {
setIsModal(true)
}}
/>
<EditModal visible={isModal} setVisible={setIsModal} data={data}/>
</View>
)
} |
<reponame>bumped-inc/atrium-java
/*
* MX API
* The MX Atrium API supports over 48,000 data connections to thousands of financial institutions. It provides secure access to your users' accounts and transactions with industry-leading cleansing, categorization, and classification. Atrium is designed according to resource-oriented REST architecture and responds with JSON bodies and HTTP response codes. Use Atrium's development environment, vestibule.mx.com, to quickly get up and running. The development environment limits are 100 users, 25 members per user, and access to the top 15 institutions. Contact MX to purchase production access.
*
* OpenAPI spec version: 0.1
*
*/
package com.mx.model;
import java.util.Objects;
import java.util.Arrays;
import com.google.gson.TypeAdapter;
import com.google.gson.annotations.JsonAdapter;
import com.google.gson.annotations.SerializedName;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import com.mx.model.CredentialOption;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
/**
* CredentialResponse
*/
public class CredentialResponse {
@SerializedName("field_name")
private String fieldName = null;
@SerializedName("guid")
private String guid = null;
@SerializedName("label")
private String label = null;
@SerializedName("options")
private List<CredentialOption> options = null;
@SerializedName("type")
private String type = null;
/**
* Get fieldName
* @return fieldName
**/
@ApiModelProperty(example = "LOGIN", value = "")
public String getFieldName() {
return fieldName;
}
/**
* Get guid
* @return guid
**/
@ApiModelProperty(example = "CRD-1ec152cd-e628-e81a-e852-d1e7104624da", value = "")
public String getGuid() {
return guid;
}
/**
* Get label
* @return label
**/
@ApiModelProperty(example = "Username", value = "")
public String getLabel() {
return label;
}
public CredentialResponse options(List<CredentialOption> options) {
this.options = options;
return this;
}
public CredentialResponse addOptionsItem(CredentialOption optionsItem) {
if (this.options == null) {
this.options = new ArrayList<CredentialOption>();
}
this.options.add(optionsItem);
return this;
}
/**
* Get options
* @return options
**/
@ApiModelProperty(value = "")
public List<CredentialOption> getOptions() {
return options;
}
public void setOptions(List<CredentialOption> options) {
this.options = options;
}
/**
* Get type
* @return type
**/
@ApiModelProperty(example = "LOGIN", value = "")
public String getType() {
return type;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
CredentialResponse credentialResponse = (CredentialResponse) o;
return Objects.equals(this.fieldName, credentialResponse.fieldName) &&
Objects.equals(this.guid, credentialResponse.guid) &&
Objects.equals(this.label, credentialResponse.label) &&
Objects.equals(this.options, credentialResponse.options) &&
Objects.equals(this.type, credentialResponse.type);
}
@Override
public int hashCode() {
return Objects.hash(fieldName, guid, label, options, type);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class CredentialResponse {\n");
sb.append(" fieldName: ").append(toIndentedString(fieldName)).append("\n");
sb.append(" guid: ").append(toIndentedString(guid)).append("\n");
sb.append(" label: ").append(toIndentedString(label)).append("\n");
sb.append(" options: ").append(toIndentedString(options)).append("\n");
sb.append(" type: ").append(toIndentedString(type)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
|
package com.blackti.oauth.dto;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.EqualsAndHashCode;
import lombok.NoArgsConstructor;
@Data
@AllArgsConstructor
@NoArgsConstructor
@EqualsAndHashCode(onlyExplicitlyIncluded = true)
public class AutorizacaoDTO {
private static final long serialVersionUID = 1L;
private Long id;
@EqualsAndHashCode.Include
private String autorizacao;
}
|
<filename>Methods/Non-Static/Assignment.java
class Assignment
{
static int i =39;
static int j =i + ++i +20;
static int l= ++i +j + ++j;
static int k = m1();
static
public static void main(String[] args)
{
System.out.println("Hello World!");
}
}
static m1()
{
System.out.println("From m1");
return 10;
} |
import Vue from "vue";
import vuex from "vuex";
Vue.use(vuex)
export default new vuex.Store({
state : {
user :false,
Permission : null
},
getters : {
getUserPermission(state) {
return state.Permission
}
},
mutations : {
setUpdateUser(state,data) {
state.user = data
},
setUpdatePermission(state,data) {
state.Permission = data
}
}
}); |
#!/bin/bash
./Examples/Monocular/mono_euroc Vocabulary/ORBvoc.txt Examples/Monocular/EuRoC.yaml /root/DATA/euroc/MH_05_difficult/mav0/cam0/data Examples/Monocular/EuRoC_TimeStamps/SEQUENCE.txt
|
#!/bin/bash
pip3 install requests
cd /notebooks
python3 opencv-match-pixels.py |
<gh_stars>10-100
package io.opensphere.develop.util.launch;
/** Represents operating system info. */
public enum OsInfo
{
/** Describes the windows launcher configuration. */
WINDOWS("Windows", "win32"),
/** Describes the linux launcher configuration. */
LINUX("Linux", "linux"),
/** Describes the macos launcher configuration. */
MACOS("Macos", "macosx");
/** The extension applied to the generated launcher. */
private final String myExtension;
/** The directory in which the launcher will run. */
private final String myDirectory;
/**
* Constructor.
*
* @param extension The extension
* @param directory The directory
*/
private OsInfo(String extension, String directory)
{
myExtension = extension;
myDirectory = directory;
}
/**
* Gets the extension.
*
* @return the extension
*/
public String getExtension()
{
return myExtension;
}
/**
* Gets the directory.
*
* @return the directory
*/
public String getDirectory()
{
return myDirectory;
}
}
|
import { useState} from 'react';
import { useForm, SubmitHandler } from 'react-hook-form';
import Header from '../components/Header';
interface IFormInput {
_id: string | number;
name: string;
email: string;
password: string;
}
function login() {
const [submitted, setSubmitted ] = useState(false)
return (
<div>
<Header />
{/* design ref
https://ordinarycoders.com/blog/article/tailwindcss-custom-forms */}
<div className="w-full max-w-xs">
<form className="bg-white shadow-md rounded px-8 pt-6 pb-8 mb-4">
<div className="mb-4">
<label className="block text-gray-700 text-sm font-bold mb-2" htmlFor="username">
Username
</label>
<input className="shadow appearance-none border rounded w-full py-2 px-3 text-gray-700 leading-tight focus:outline-none focus:shadow-outline" id="username" type="text" placeholder="Username"></input>
</div>
<div className="mb-6">
<label className="block text-gray-700 text-sm font-bold mb-2" htmlFor="password">
Password
</label>
<input className="shadow appearance-none border border-red-500 rounded w-full py-2 px-3 text-gray-700 mb-3 leading-tight focus:outline-none focus:shadow-outline" id="password" type="password" ></input>
<p className="text-red-500 text-xs italic">Please choose a password.</p>
</div>
<div className="flex items-center justify-between">
<button className="bg-blue-500 hover:bg-blue-700 text-white font-bold py-2 px-4 rounded focus:outline-none focus:shadow-outline" type="button">
Sign In
</button>
<a className="inline-block align-baseline font-bold text-sm text-blue-500 hover:text-blue-800" href="#">
Forgot Password?
</a>
</div>
</form>
<p className="text-center text-gray-500 text-xs">
©2022 Prome. All rights reserved.
</p>
</div>
</div>
)
}
export default login; |
import { Component, ViewChild, OnInit, AfterViewInit, OnDestroy, Output, EventEmitter } from '@angular/core';
import { MatSidenav } from '@angular/material';
import { SidenavService } from './shared/services/sidenav.service';
import { NgcCookieConsentService } from 'ngx-cookieconsent';
import { Subscription, Observable } from 'rxjs';
import { BrowserHistoryService } from './shared/services/browser-history.service';
import { ColormapStoreService } from './shared/stores/colormap-store.service';
import { Router, NavigationStart, NavigationEnd } from '@angular/router';
import { filter } from 'rxjs/operators';
import { LoadingIndicatorService } from './shared/services/loading-indicator.service';
@Component({
selector: 'app-root',
templateUrl: './app.component.html',
styleUrls: ['./app.component.scss']
})
export class AppComponent implements OnInit, AfterViewInit, OnDestroy {
routeLoading = false;
mapLoading = false;
private subscriptions = new Subscription();
sidenavState$: Observable<boolean>;
@ViewChild('sidenav', { static: false }) public sidenav: MatSidenav;
constructor(
private ccService: NgcCookieConsentService,
private sidenavService: SidenavService,
private browserHistory: BrowserHistoryService,
private colormapStore: ColormapStoreService,
private loadingIndicator: LoadingIndicatorService,
private router: Router,
) {
this.sidenavState$ = this.sidenavService.sidenavState$;
if (localStorage.getItem('colormap')) {
this.colormapStore.colormap = JSON.parse(localStorage.getItem('colormap'));
}
// Call the BrowserHistoryService so that it starts creating a route history
let browserHistoryInit = this.browserHistory.currentUrl;
browserHistoryInit = 'thanks!';
}
ngOnInit() {
this.subscriptions.add(this.ccService.popupOpen$.subscribe());
this.subscriptions.add(this.loadingIndicator.loading$.subscribe(
(loading: boolean) => this.mapLoading = loading,
));
this.subscriptions.add(this.router.events.pipe(
filter((e) => e instanceof NavigationStart || e instanceof NavigationEnd)
).subscribe(
(event) => {
if (event instanceof NavigationStart && this.mapLoading === false) {
if (!this.browserHistory.currentUrl) {
this.routeLoading = true;
} else {
if (this.browserHistory.currentUrl.includes('drive-time') && event.url.includes('drive-time')) {
this.routeLoading = false;
} else {
this.routeLoading = true;
}
}
}
if (event instanceof NavigationEnd) { setTimeout(() => this.routeLoading = false, 1200); }
})
);
}
ngAfterViewInit() {
this.sidenavService.setSidenav(this.sidenav);
}
ngOnDestroy() {
this.subscriptions.unsubscribe();
}
onSidenavAction(actionEvent: 'open' | 'close') {
if (actionEvent === 'open') {
this.sidenavService.open();
} else if (actionEvent === 'close') {
this.sidenavService.close();
}
}
escape(event) {
this.sidenavService.close();
}
fKey(event) {
this.sidenavService.open();
}
}
|
from typing import Optional
from distil.primitives.column_parser import ColumnParserPrimitive
from common_primitives.construct_predictions import ConstructPredictionsPrimitive
from common_primitives.dataset_to_dataframe import DatasetToDataFramePrimitive
from common_primitives.denormalize import DenormalizePrimitive
from common_primitives.extract_columns_semantic_types import (
ExtractColumnsBySemanticTypesPrimitive,
)
from common_primitives.add_semantic_types import AddSemanticTypesPrimitive
from d3m import utils
from d3m.metadata.base import ArgumentType
from d3m.metadata.pipeline import Pipeline, PrimitiveStep, Resolver
from distil.primitives.satellite_image_loader import (
DataFrameSatelliteImageLoaderPrimitive,
)
from d3m.primitives.similarity_modeling.iterative_labeling import ImageRetrieval
from d3m.primitives.remote_sensing.remote_sensing_pretrained import (
RemoteSensingPretrained,
)
def create_pipeline(
metric: str,
min_meta: bool = False,
batch_size: int = 256,
gem_p: int = 1,
reduce_dimension: int = 32,
n_jobs: int = -1,
resolver: Optional[Resolver] = None,
) -> Pipeline:
input_val = "steps.{}.produce"
# create the basic pipeline
image_pipeline = Pipeline()
image_pipeline.add_input(name="inputs")
image_pipeline.add_input(name="annotations")
# step 0 - denormalize dataframe (N.B.: injects semantic type information)
step = PrimitiveStep(
primitive_description=DenormalizePrimitive.metadata.query(), resolver=resolver
)
step.add_argument(
name="inputs", argument_type=ArgumentType.CONTAINER, data_reference="inputs.0"
)
step.add_output("produce")
image_pipeline.add_step(step)
previous_step = 0
# step 1 - extract dataframe from dataset
step = PrimitiveStep(
primitive_description=DatasetToDataFramePrimitive.metadata.query(),
resolver=resolver,
)
step.add_argument(
name="inputs",
argument_type=ArgumentType.CONTAINER,
data_reference=input_val.format(previous_step),
)
step.add_output("produce")
image_pipeline.add_step(step)
previous_step += 1
df_step = previous_step
# step 2 - read images
step = PrimitiveStep(
primitive_description=DataFrameSatelliteImageLoaderPrimitive.metadata.query(),
resolver=resolver,
)
step.add_argument(
name="inputs",
argument_type=ArgumentType.CONTAINER,
data_reference=input_val.format(previous_step),
)
step.add_output("produce")
step.add_hyperparameter("return_result", ArgumentType.VALUE, "replace")
step.add_hyperparameter("n_jobs", ArgumentType.VALUE, n_jobs)
image_pipeline.add_step(step)
previous_step += 1
image_step = previous_step
# step 3 - parse columns
step = PrimitiveStep(
primitive_description=ColumnParserPrimitive.metadata.query(), resolver=resolver
)
step.add_argument(
name="inputs",
argument_type=ArgumentType.CONTAINER,
data_reference=input_val.format(previous_step),
)
step.add_output("produce")
semantic_types = (
"http://schema.org/Integer",
"http://schema.org/Float",
"https://metadata.datadrivendiscovery.org/types/FloatVector",
)
step.add_hyperparameter("parsing_semantics", ArgumentType.VALUE, semantic_types)
image_pipeline.add_step(step)
previous_step += 1
parse_step = previous_step
step = PrimitiveStep(
primitive_description=ExtractColumnsBySemanticTypesPrimitive.metadata.query(),
resolver=resolver,
)
step.add_argument(
name="inputs",
argument_type=ArgumentType.CONTAINER,
data_reference=input_val.format(previous_step),
)
step.add_output("produce")
step.add_hyperparameter(
"semantic_types",
ArgumentType.VALUE,
(
"http://schema.org/ImageObject",
"https://metadata.datadrivendiscovery.org/types/PrimaryMultiKey",
),
)
image_pipeline.add_step(step)
previous_step += 1
attributes_step = previous_step
# step 5 - featurize imagery
step = PrimitiveStep(
primitive_description=RemoteSensingPretrained.metadata.query(),
resolver=resolver,
)
step.add_argument(
name="inputs",
argument_type=ArgumentType.CONTAINER,
data_reference=input_val.format(attributes_step),
)
step.add_output("produce")
step.add_hyperparameter("batch_size", ArgumentType.VALUE, batch_size)
image_pipeline.add_step(step)
previous_step += 1
remote_step = previous_step
# step 6
step = PrimitiveStep(
primitive_description=DatasetToDataFramePrimitive.metadata.query(),
resolver=resolver,
)
step.add_argument(
name="inputs",
argument_type=ArgumentType.CONTAINER,
data_reference="inputs.1",
)
step.add_output("produce")
image_pipeline.add_step(step)
previous_step += 1
# step 7
step = PrimitiveStep(
primitive_description=ImageRetrieval.metadata.query(),
resolver=resolver,
)
step.add_argument(
name="inputs",
argument_type=ArgumentType.CONTAINER,
data_reference=input_val.format(remote_step),
)
step.add_argument(
name="outputs",
argument_type=ArgumentType.CONTAINER,
data_reference=input_val.format(previous_step),
)
step.add_output("produce")
step.add_hyperparameter(name="gem_p", argument_type=ArgumentType.VALUE, data=gem_p)
step.add_hyperparameter(
name="reduce_dimension", argument_type=ArgumentType.VALUE, data=reduce_dimension
)
image_pipeline.add_step(step)
previous_step += 1
step = PrimitiveStep(
primitive_description=AddSemanticTypesPrimitive.metadata.query(),
resolver=resolver,
)
step.add_argument(
name="inputs",
argument_type=ArgumentType.CONTAINER,
data_reference=input_val.format(previous_step),
)
step.add_output("produce")
step.add_hyperparameter("columns", ArgumentType.VALUE, [1])
step.add_hyperparameter(
"semantic_types",
ArgumentType.VALUE,
[
"https://metadata.datadrivendiscovery.org/types/PredictedTarget",
"https://metadata.datadrivendiscovery.org/types/Score",
],
)
image_pipeline.add_step(step)
previous_step += 1
# step 8 - convert predictions to expected format
step = PrimitiveStep(
primitive_description=ConstructPredictionsPrimitive.metadata.query(),
resolver=resolver,
)
step.add_argument(
name="inputs",
argument_type=ArgumentType.CONTAINER,
data_reference=input_val.format(previous_step),
)
step.add_argument(
name="reference",
argument_type=ArgumentType.CONTAINER,
data_reference=input_val.format(parse_step),
)
step.add_output("produce")
step.add_hyperparameter("use_columns", ArgumentType.VALUE, [0, 1])
image_pipeline.add_step(step)
previous_step += 1
image_pipeline.add_output(
name="output", data_reference=input_val.format(previous_step)
)
return image_pipeline, []
|
#!/bin/bash
# here the battery capacity /sys/class/power_supply/BAT0/capacity
export XDG_RUNTIME_DIR="/run/user/1000"
export DISPLAY=:0
BAT=$(/usr/bin/cat /sys/class/power_supply/BAT0/capacity)
STAT=$(/usr/bin/cat /sys/class/power_supply/BAT0/status)
if [ $BAT -lt 15 ] && [ $STAT == "Discharging" ]
then
/usr/bin/notify-send -t 2 "Battery low!!"
/usr/bin/aplay /home/necros/scripts/crons/batteryStatusKeyboard/anyonethere.wav >> /dev/null 2>&1
fi
if [ $BAT -lt 10 ] && [ $STAT == "Discharging" ]
then
/usr/bin/notify-send -t 2 "Battery really low!!"
/usr/bin/aplay /home/necros/scripts/crons/batteryStatusKeyboard/hello.wav >> /dev/null 2>&1
fi
if [ $BAT -lt 5 ] && [ $STAT == "Discharging" ]
then
/usr/bin/notify-send "Suspending!!"
/usr/bin/systemctl suspend
/usr/bin/aplay /home/necros/scripts/crons/batteryStatusKeyboard/shutdown.wav >> /dev/null 2>&1
fi
|
#!/bin/bash
export LC_ALL="en_US.UTF-8"
shards=2
curdir="$(dirname "$(readlink -f "$0")")"
cd "$(dirname "$curdir")"
echo "Workdir: $(pwd)"
echo "Fetch updates..."
git pull --rebase --autostash
echo "Setup virtual environment..."
if [ ! -d venv ]; then
echo "Creating new venv"
python3 -m venv venv
fi
echo "Install requirements..."
./venv/bin/python -m pip install -U pip wheel
./venv/bin/python -m pip install -r requirements.txt
./venv/bin/python -m pip install -r requirements-dev.txt
echo "Stopping previous instances..."
/bin/bash scripts/stop.sh
echo "Starting $shards shards..."
for i in $(seq $shards)
do
echo "Starting shard $((i-1)) of $shards shards..." &&
./venv/bin/python . --bot discord --shard-id $((i-1)) --shard-count $shards &&
echo "Stopped shard $((i-1)) of $shards shards." &
done
wait
echo "Force stop remaining instances..."
/bin/bash scripts/stop.sh
|
<filename>src/kingdom/Animal.java
package kingdom;
public abstract class Animal {
private static int maxId = 0;
public int id;
private String name;
private int yearDiscovered;
public Animal(String name, int yearDiscovered) {
id = maxId++;
this.name = name;
this.yearDiscovered = yearDiscovered;
}
public String getName() {
return name;
}
public int getYearDiscovered() {
return yearDiscovered;
}
public String eat() {
return "Eats";
}
public String move() {
return "Moves";
}
public String breathe() {
return "Breathes";
}
public String reproduce() {
return "Reproduces";
}
} |
package br.indie.fiscal4j.nfe400.transformers;
import br.indie.fiscal4j.nfe400.classes.NFFinalidade;
import org.simpleframework.xml.transform.Transform;
public class NFFinalidadeTransformer implements Transform<NFFinalidade> {
@Override
public NFFinalidade read(final String codigo) {
return NFFinalidade.valueOfCodigo(codigo);
}
@Override
public String write(final NFFinalidade tipo) {
return tipo.getCodigo();
}
} |
<reponame>todaylg/LG-Blog
"use strict";
var express = require('express');
var path = require('path');
var http = require('http');
var fs = require('fs');
var history = require('connect-history-api-fallback');
var favicon = require('serve-favicon');
var bodyParser = require('body-parser');
var cookieParser = require('cookie-parser');
var mongoose = require('mongoose');
var init = require('./init.json');
var settings = require('./settings');
var app = express();
var port = process.env.PORT || 3001;
var dbUrl = 'mongodb://localhost/lgBlog';// 数据库地址
//连接数据库
mongoose.connect(dbUrl);
var db = mongoose.connection;
//mongoose promise 风格
mongoose.Promise = global.Promise;
app.use(history());
app.set('port',port);
app.use(express.static(path.join(__dirname, '../dist')));//entry
app.use(favicon(path.join(__dirname, '../dist', 'favicon.ico')));
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({extended: false}));
// models loading
var models_path = path.join(__dirname, 'models');// 模型所在路径
//路径加载函数,加载各模型的路径
var walk = function(path) {
fs
.readdirSync(path)
.forEach(function(file) {
var newPath = path + '/' + file;
var stat = fs.statSync(newPath);
// 如果是文件
if (stat.isFile()) {
if (/(.*)\.(js)/.test(file)) {
require(newPath);
}
// 如果是文件夹则继续遍历
}else if (stat.isDirectory()) {
walk(newPath);
}
});
};
walk(models_path);
var initialize = function () {
mongoose.model("User").find(null, function (err, doc) {
if (err) {
console.log(err)
} else if (!doc.length) {
console.log('Database opens for the first time...')
Promise.all(init.map(item => {
let Models = mongoose.model(item.type);
Models(item).save();
}))
.then(() => console.log('Initialize successfully.'))
.catch(() => console.log('Something went wrong during initializing.'))
}
})
}
db.on('error', function () {
console.log('Database connection error.')
})
db.once('open', function () {
console.log('The database has connected.')
initialize()
})
var routes = require('./routes/routes');
routes(app);
var server = http.createServer(app);
server.listen(app.get('port'));
server.on('listening', function(){
console.log('----------listening on port: ' + app.get('port') +'----------------------');
});
server.on('error', function(error){
switch (error.code) {
case 'EACCES':
console.error(bind + '需要权限许可');
process.exit(1);
break;
case 'EADDRINUSE':
console.error(bind + '端口已被占用');
process.exit(1);
break;
default:
throw error;
}
});
|
def longest_common_prefix(string_set):
LCprefix = ""
# Compare the first two strings
s1 = string_set[0]
s2 = string_set[1]
n1 = len(s1)
n2 = len(s2)
# Find the min length
minimum = min(n1, n2)
# Find the longest common prefix
for i in range(minimum):
if s1[i] == s2[i]:
LCprefix += s1[i]
else:
break
# Check the longest common prefix through all the strings in the list
for j in range(2, len(string_set)):
curr_string = string_set[j]
curr_length = len(curr_string)
if LCprefix == "":
break
for k in range(len(LCprefix)):
if k == curr_length or LCprefix[k] != curr_string[k]:
LCprefix = LCprefix[:k]
break
return LCprefix
string_set = ["codeburst", "codeforces", "codesprint", "codejams"]
print(longest_common_prefix(string_set))
# Output: code |
<reponame>vany152/FilesHash
// Copyright 2020 <NAME>
// Distributed under the Boost Software License, Version 1.0.
// https://www.boost.org/LICENSE_1_0.txt
#include <boost/describe/members.hpp>
#include <boost/describe/class.hpp>
#include <boost/core/lightweight_test.hpp>
struct X
{
void f() {}
int g() const { return 1; }
static void h() {}
};
BOOST_DESCRIBE_STRUCT(X, (), (f, g, h))
#if !defined(BOOST_DESCRIBE_CXX14)
#include <boost/config/pragma_message.hpp>
BOOST_PRAGMA_MESSAGE("Skipping test because C++14 is not available")
int main() {}
#else
#include <boost/mp11.hpp>
int main()
{
using namespace boost::describe;
using namespace boost::mp11;
{
using L = describe_members<X, mod_protected | mod_function>;
BOOST_TEST_EQ( mp_size<L>::value, 0 );
}
{
using L = describe_members<X, mod_private | mod_function>;
BOOST_TEST_EQ( mp_size<L>::value, 0 );
}
{
using L = describe_members<X, mod_public | mod_function>;
BOOST_TEST_EQ( mp_size<L>::value, 2 );
using D1 = mp_at_c<L, 0>;
using D2 = mp_at_c<L, 1>;
BOOST_TEST( D1::pointer == &X::f );
BOOST_TEST_CSTR_EQ( D1::name, "f" );
BOOST_TEST_EQ( D1::modifiers, mod_public | mod_function );
BOOST_TEST( D2::pointer == &X::g );
BOOST_TEST_CSTR_EQ( D2::name, "g" );
BOOST_TEST_EQ( D2::modifiers, mod_public | mod_function );
}
{
using L = describe_members<X, mod_public | mod_static | mod_function>;
BOOST_TEST_EQ( mp_size<L>::value, 1 );
using D1 = mp_at_c<L, 0>;
BOOST_TEST( D1::pointer == &X::h );
BOOST_TEST_CSTR_EQ( D1::name, "h" );
BOOST_TEST_EQ( D1::modifiers, mod_public | mod_static | mod_function );
}
return boost::report_errors();
}
#endif // !defined(BOOST_DESCRIBE_CXX14)
|
pattern = re.compile(r'[A-Z][0-9][A-Z]\s?[0-9][A-Z][0-9]') |
import random
def custom_choice(data):
# Use randrange to generate a random index within the range of the sequence
random_index = random.randrange(0, len(data))
return data[random_index]
# Test the custom_choice function
lottoMax = [random.randrange(1, 50, 1) for _ in range(1, 8)]
print(lottoMax)
print(custom_choice(lottoMax)) # Output: Random element from lottoMax |
<filename>2d/src/main/java/de/bitbrain/braingdx/ui/AnimationDrawable.java
package de.bitbrain.braingdx.ui;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.graphics.g2d.Batch;
import com.badlogic.gdx.graphics.g2d.Sprite;
import com.badlogic.gdx.graphics.g2d.TextureRegion;
import com.badlogic.gdx.scenes.scene2d.utils.BaseDrawable;
import de.bitbrain.braingdx.graphics.animation.AnimationCache;
import de.bitbrain.braingdx.graphics.animation.AnimationConfig;
import de.bitbrain.braingdx.graphics.animation.AnimationSpriteSheet;
public class AnimationDrawable extends BaseDrawable {
public static final String DEFAULT_FRAME_ID = "default.frame.id";
private final AnimationCache animationCache;
private final Sprite sprite;
private Color color = Color.WHITE.cpy();
private float stateTime;
public AnimationDrawable(AnimationSpriteSheet spriteSheet, AnimationConfig config) {
this.animationCache = new AnimationCache(spriteSheet, config);
this.sprite = new Sprite();
}
public void setAlpha(float alpha) {
color.a = alpha;
}
@Override
public void draw(Batch batch, float x, float y, float width, float height) {
stateTime += Gdx.graphics.getDeltaTime();
batch.setColor(color);
drawRegion(
batch,
animationCache.getAnimation(DEFAULT_FRAME_ID).getKeyFrame(stateTime),
x,
y,
width,
height
);
batch.setColor(Color.WHITE);
}
private void drawRegion(Batch batch, TextureRegion region, float x, float y, float width, float height) {
sprite.setRegion(region);
sprite.setOrigin(x + width / 2f, y + height / 2f);
sprite.setColor(color);
sprite.setBounds(
x,
y,
width,
height
);
sprite.setScale(1f, 1f);
sprite.draw(batch);
}
}
|
#!/bin/sh
## Release script, inspired from:
## https://gist.github.com/hal313/490e4aeaa591eeca14d2570ecb660f67
## Bail on error
set -e
## Parse the command line arguments
while [[ $# -gt 0 ]]; do
key="$1"
case $key in
-d|--skip-dirty-workspace-check)
SKIP_DIRTY_WORKSPACE_CHECK=true
shift
;;
-p|--skip-push)
SKIP_PUSH=true
shift
;;
-h|--help)
echo `basename $0`;
echo ' -d | --skip-dirt-workspace-check Skips the dirty workspace check'
echo ' -p | --skip-push Does not push branches and tags'
exit
;;
*) # Unknown command line option
(>&2 echo "Unknown command line option: \"$1\"; try running \"`basename $0` --help\"")
exit;
;;
esac
done
## Check the workspace status unless the user opts out
if [ `git status --porcelain | wc -l` -ne "0" -a "${SKIP_DIRTY_WORKSPACE_CHECK}" != true ]; then
(>&2 echo "Workspace is dirty, cannot complete a release")
exit;
fi
## Get the next version
NEXT_VERSION=$(node -p -e "let currentVersion = require('./package.json').version, parts = currentVersion.split('.'); parts[2] = Number.parseInt(parts[2])+1; parts.join('.');")
## Pre-checks for release
##
## Check for existing branch
if [ `git branch -l | grep release/${NEXT_VERSION} | wc -l` -ne "0" ]; then
(>&2 echo "Release branch release/${NEXT_VERSION} exists, cannot complete a release")
fi
## Check for existing tag
if [ `git tag | grep ${NEXT_VERSION} | wc -l` -ne "0" ]; then
(>&2 echo "Tag ${NEXT_VERSION} exists, cannot complete a release")
fi
## Create a new branch
git checkout -b release/${NEXT_VERSION}
## Build, test and commit the dist and site
npm run dist
npm test
npm run site
git add dist/ docs/
git commit --allow-empty -m 'Generated artifacts'
## Bump the patch version (and do not commit the changes)
npm version --no-git-tag-version patch
git commit -a -m 'Version bump'
## Update the changelog
npx auto-changelog -p
git add CHANGELOG.md
git commit -m 'Updated changelog'
## Merge into master
git checkout master
git pull origin master
git merge --no-ff -m "Merge branch 'release/${NEXT_VERSION}' into 'master'" release/${NEXT_VERSION}
## Tag and delete the release branch
git tag -a -m 'Tagged for release' ${NEXT_VERSION}
git branch -d release/${NEXT_VERSION}
## Merge down to develop
git checkout develop
git merge --no-ff -m "Merge branch 'master' into 'develop'" master
## Push the dist to CI (for deploy)
if [ "${SKIP_PUSH}" != "true" ]; then
git push --all && git push --tags
fi
|
import hashlib
def encrypt_pass(password):
sha_pass = hashlib.sha256(password.encode())
return sha_pass.hexdigest() |
<reponame>flammenmensch/nestjs-demo
import { Controller, Get, Query, UsePipes, ValidationPipe } from "@nestjs/common";
import { AppService } from './app.service';
import { IsNotEmpty } from "class-validator";
class CompressQuery {
@IsNotEmpty()
value: string;
}
@Controller()
export class AppController {
constructor(private readonly appService: AppService) {}
@Get('/api')
@UsePipes(
new ValidationPipe({
whitelist: true
})
)
async handleCompress(@Query() query: CompressQuery): Promise<string> {
return Promise.resolve(this.appService.compress(query.value));
}
}
|
#!/bin/bash
# Test struct commando
python /tests/test_structured_commando.py
# Test gnng
/tests/test_gnng.sh
# Run unit tests
TEST_ROOT=/etc/trigger/tests
TEST_DATA=$TEST_ROOT/data
export TRIGGER_SETTINGS="${TEST_DATA}/settings.py"
cd /etc/trigger
py.test -vv --strict
|
def foo(x):
if (x < 0):
return -1
elif (x == 0):
return 0
else:
return 0 |
<gh_stars>1-10
/*
* Copyright 2016 Martin.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dk.netdesign.common.osgi.config.test;
import dk.netdesign.common.osgi.config.osgi.ManagedPropertiesServiceFactory;
import dk.netdesign.common.osgi.config.osgi.service.ManagedPropertiesService;
import dk.netdesign.common.osgi.config.service.PropertyAccess;
import dk.netdesign.common.osgi.config.test.properties.AutoFilteringListTypes;
import dk.netdesign.common.osgi.config.test.properties.ChangingConfig;
import dk.netdesign.common.osgi.config.test.properties.FilteringConfig;
import dk.netdesign.common.osgi.config.test.properties.WrapperTypes;
import org.junit.*;
import org.junit.runner.RunWith;
import org.ops4j.pax.exam.Configuration;
import org.ops4j.pax.exam.Option;
import org.ops4j.pax.exam.junit.PaxExam;
import org.ops4j.pax.exam.karaf.options.KarafDistributionOption;
import org.ops4j.pax.exam.options.MavenArtifactUrlReference;
import org.ops4j.pax.exam.options.MavenUrlReference;
import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy;
import org.ops4j.pax.exam.spi.reactors.PerClass;
import org.osgi.framework.BundleContext;
import org.osgi.service.cm.ConfigurationAdmin;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import java.io.File;
import java.net.URL;
import java.util.Arrays;
import java.util.Dictionary;
import java.util.Hashtable;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertEquals;
import static org.ops4j.pax.exam.CoreOptions.maven;
import static org.ops4j.pax.exam.karaf.options.KarafDistributionOption.*;
/**
*
* @author Martin
*/
@RunWith(PaxExam.class)
@ExamReactorStrategy(PerClass.class)
public class PropertiesDSServiceTest {
@Inject
private BundleContext context;
@Inject
private ManagedPropertiesService factory;
private static final Logger LOGGER = LoggerFactory.getLogger(PropertiesDSServiceTest.class);
@Configuration
public Option[] config() throws Exception {
MavenArtifactUrlReference karafUrl = maven()
.groupId("org.apache.karaf")
.artifactId("apache-karaf")
.versionAsInProject()
.type("tar.gz");
MavenUrlReference karafStandardRepo = maven()
.groupId("org.apache.karaf.features")
.artifactId("standard")
.classifier("features")
.type("xml")
.versionAsInProject();
MavenUrlReference managedPropertiesRepo = maven()
.groupId("dk.netdesign")
.artifactId("managedproperties-feature")
.classifier("features")
.type("xml")
.versionAsInProject();
return new Option[]{
// KarafDistributionOption.debugConfiguration("5005", true),
karafDistributionConfiguration()
.frameworkUrl(karafUrl)
.unpackDirectory(new File("exam"))
.useDeployFolder(false),
keepRuntimeFolder(),
features(karafStandardRepo, "scr"),
features(managedPropertiesRepo, "ManagedProperties", "ManagedPropertiesTestResources"),
replaceConfigurationFile("etc/org.ops4j.pax.logging.cfg", new File(this.getClass().getClassLoader().getResource("dk/netdesign/common/osgi/config/test/org.ops4j.pax.logging.cfg").toURI())),
replaceConfigurationFile("etc/org.ops4j.pax.url.mvn.cfg", new File(this.getClass().getClassLoader().getResource("dk/netdesign/common/osgi/config/test/org.ops4j.pax.url.mvn.cfg").toURI())),
replaceConfigurationFile("etc/WrapperTypes.cfg", new File(this.getClass().getClassLoader().getResource("dk/netdesign/common/osgi/config/test/WrapperTypes.cfg").toURI())),
};
}
public PropertiesDSServiceTest() {
}
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
@Test
public void testCanInjectService() throws Exception {
WrapperTypes types = null;
try {
types = factory.register(WrapperTypes.class, context);
//PropertyAccess.configuration(types).setPropertyWriteDelay(30, TimeUnit.SECONDS);
//Should this be working?
assertEquals(new Double(55.12), types.getDouble());
assertEquals(new Float(22.22), types.getFloat());
assertEquals(new Integer(42), types.getInt());
assertEquals(true, types.getBoolean());
assertEquals(new Byte((byte) 1), types.getByte());
assertEquals(new Long(100), types.getLong());
assertEquals(new Short((short) 3), types.getShort());
} finally {
if (types != null) {
PropertyAccess.actions(types).unregisterProperties();
}
}
}
}
|
#!/usr/bin/env bash
# Usage ./homeworks.sh > homeworks.log 2>&1 &
# or ./homeworks.sh stop | reset
case "$1" in
reset)
rm homeworks.json*
rm homeworks.log*
rm -rf ../students
;&
stop)
ps -ef | awk '/awk/{next}/homeworks.js/{system("kill -9 "$2)}'
;;
*)
rc=0
while [ $rc = 0 ]; do
echo "Homeworks Server"
SUF=$(date +"%Y%m%d%H%M%S")
cp "homeworks.json" "homeworks.json_"${SUF}
if [ -f "homeworks.log" ];then
cp "homeworks.log" "homeworks.log_"${SUF}
fi
node homeworks.js $*
rc=$?
done
curl -X POST --data-urlencode "payload={\"channel\": \"@benno.staebler\", \"username\": \"HomeworksServer\", \"text\": \"HomeworksServer wurde unerwartet beendet!\", \"icon_emoji\": \":ghost:\"}" https://hooks.slack.com/services/$(cat slack.txt)
;;
esac
|
<filename>linked-list/linked-list.js
function LinkedList() {
this.head = new Node();
this.tail = new Node();
this.number_of_elements = 0;
this.count = function() {
return this.number_of_elements;
}
this.head.next = this.tail;
this.tail.previous = this.head;
this.pop = function() {
this.head = this.head.next;
this.number_of_elements--;
return this.head.value;
}
this.push = function(value) {
this.head.value = value;
head = new Node();
head.next = this.head;
this.head.previous = head;
this.head = head;
this.number_of_elements++;
}
this.shift = function() {
this.tail = this.tail.previous;
this.number_of_elements--;
return this.tail.value;
}
this.unshift = function(value) {
this.tail.value = value;
tail = new Node();
tail.previous = this.tail;
this.tail.next = tail;
this.tail = tail;
this.number_of_elements++;
}
this.delete = function(value) {
node = head.next;
while (node.value != value) {
node = node.next;
if (!node) {
return -1;
}
}
node.previous.next = node.next;
node.next.previous = node.previous;
this.number_of_elements--;
}
}
function Node() {
this.previous = null;
this.next = null;
this.value = null;
}
module.exports = LinkedList;
|
class DataObject:
def __init__(self):
self.id = None
self.name = None
self.lang = None
self.owner_id = None
def serialize(self):
return {
'id': self.id,
'name': self.name,
'lang': self.lang,
'owner_id': self.owner_id
}
def deserialize(self, params):
self.id = params.get('id')
self.name = params.get('name')
self.lang = params.get('lang')
self.owner_id = params.get('owner_id') |
Two different optimization algorithms that can be applied to a given problem are hill climbing and simulated annealing. Hill climbing is a local search algorithm that works by exploring different solutions or configurations in the aim of finding the maximum or minimum value. Simulated annealing is a stochastic optimization algorithm which varies the parameters of a search algorithm through a set of transitions that can move away from local solutions or basins of attraction. |
import {mocks} from '../helpers/mocks.js';
import {prebidMessenger} from '../../src/messaging.js';
describe('prebidMessenger',() => {
let win;
beforeEach(() => {
win = Object.assign(mocks.createFakeWindow(), {
parent: {
postMessage: sinon.spy()
}
});
})
describe('when publisher URL is unavailable', () => {
let sendMessage;
beforeEach(() => {
sendMessage = prebidMessenger(null, win);
});
it('should throw', () => {
expect(() => sendMessage('test')).to.throw();
})
});
describe('when publisher URL is available', () => {
const URL = 'https://www.publisher.com/page.html';
const ORIGIN = 'https://www.publisher.com'
let sendMessage;
let callback, handler;
beforeEach(() => {
win.addEventListener = function (_, h) {
handler = h;
}
win.removeEventListener = sinon.spy();
sendMessage = prebidMessenger(URL, win);
callback = sinon.spy();
})
it('should use origin for postMessage', () => {
sendMessage('test');
sinon.assert.calledWith(win.parent.postMessage, JSON.stringify('test'), ORIGIN);
});
it('should not run callback on response if origin does not mach', ()=> {
sendMessage('test', callback);
handler({origin: 'different'});
expect(callback.called).to.be.false;
});
it('should run callback on response if origin does match', () => {
sendMessage('test', callback);
const ev = {origin: ORIGIN, data: 'stuff'};
handler(ev);
sinon.assert.calledWith(callback, ev);
});
it('should remove window listener when canceled', () => {
sendMessage('test', callback)();
expect(win.removeEventListener.called).to.be.true;
})
});
})
|
package protobuf
import "github.com/christianalexander/kvdb/stores"
func RecordToProto(r stores.Record) *Record {
return &Record{
Kind: recordKindToProto(r.Kind),
Key: r.Key,
Value: r.Value,
}
}
func recordKindToProto(k stores.RecordKind) Record_RecordKind {
switch k {
case stores.RecordKindSet:
return Record_SET
case stores.RecordKindDelete:
return Record_DEL
}
return Record_SET
}
func recordKindFromProto(k Record_RecordKind) stores.RecordKind {
switch k {
case Record_SET:
return stores.RecordKindSet
case Record_DEL:
return stores.RecordKindDelete
}
return stores.RecordKindSet
}
func (r Record) ToRecord() *stores.Record {
return &stores.Record{
Kind: recordKindFromProto(r.Kind),
Key: r.Key,
Value: r.Value,
}
}
|
#!/bin/bash
# wait for Postgres to start
function postgres_ready() {
python << END
import sys
import psycopg2
try:
conn = psycopg2.connect(dbname="postgres", user="postgres", password="postgres", host="db")
except psycopg2.OperationalError:
sys.exit(-1)
sys.exit(0)
END
}
until postgres_ready; do
>&2 echo "Postgres is unavailable - sleeping"
sleep 1
done
# Start app
>&2 echo "Postgres is up - executing command"
./start.sh
|
(function() {
'use strict';
angular.module('mcGooglePlace', []);
})();
|
# -*- coding: utf-8 -*-
from datetime import datetime, timedelta
from dateutil.parser import parse as date_parse
from collections import OrderedDict
EVALUATION_POSITIVE = True
EVALUATION_NEGATIVE = False
THIS_MONTH_FILTER = 'This Month'
THIS_WEEK_FILTER = 'This Week'
TODAY_FILTER = 'Today'
ALL_FILTER = 'All'
FILTERS = OrderedDict([
(TODAY_FILTER, 'get_num_days_today'),
(THIS_WEEK_FILTER, 'get_num_days_week'),
(THIS_MONTH_FILTER, 'get_num_days_month'),
(ALL_FILTER, '')
])
class Event(object):
"""An event"""
def __init__(self, date, evaluation=EVALUATION_POSITIVE):
super(Event, self).__init__()
self.date = date
self.iso_date = date.isoformat()
self.evaluation = evaluation
def __unicode__(self):
eval_text = 'Positive' if self.evaluation else 'Negative'
return "{}: {}".format(self.iso_date, eval_text)
def __str__(self):
return self.__unicode__()
def __repr__(self):
return self.__unicode__()
def save(self, store):
print "saving %s" % unicode(self)
store.put(self.iso_date, evaluation=self.evaluation)
@classmethod
def get_events(cls, store):
events = []
for key in store:
evaluation = store.get(key).get('evaluation', True)
event = cls(
date=date_parse(key),
evaluation=evaluation
)
events.append(event)
return events
@classmethod
def reset_store(cls, store):
for key in store:
store.delete(key)
return []
@classmethod
def get_rate(cls, events):
total_events = len(events)
positives = len(filter(lambda e: e.evaluation, events))
positive_perc = (positives * 100) / float(total_events)
negative_perc = 100 - positive_perc
return positive_perc, negative_perc
@classmethod
def filter(cls, store, filter_by):
get_num_days_method = getattr(cls, FILTERS.get(filter_by), lambda: None)
num_days = get_num_days_method()
events = cls.filter_by_num_days(store, num_days)
return events
@classmethod
def get_num_days_week(cls):
today_week_number = datetime.today().weekday()
return today_week_number
@classmethod
def get_num_days_month(cls):
today_month_day = datetime.today().day
return today_month_day - 1
@classmethod
def get_num_days_today(cls):
return 0
@classmethod
def filter_by_num_days(cls, store, num_days):
events = cls.get_events(store)
if num_days is None:
return events
today = datetime.now().date()
initial_date = today - timedelta(days=num_days)
filtered_events = []
for event in events:
delta = event.date.date() - initial_date
if delta.days <= num_days and delta.days >= 0:
filtered_events.append(event)
return filtered_events
@classmethod
def remove_event(cls, store, event):
return store.delete(event.iso_date)
|
import styled from 'styled-components'
export const Leaves = styled.img`
position: absolute;
z-index: 3;
`
export const RightToMidLeaf = styled(Leaves)`
height: 250px;
right: -5%;
bottom: -12%;
@media (max-width: 600px) {
height: 150px;
bottom: -8%;
left: 60%;
}
`
export const MiddleLeaf = styled(Leaves)`
height: 250px;
left: 40%;
bottom: -20%;
@media (max-width: 600px) {
height: 150px;
bottom: -10%;
left: 60%;
}
`
export const RightLeaf = styled(Leaves)`
height: 200px;
right: 0;
bottom: -20%;
@media (max-width: 600px) {
height: 150px;
bottom: -10%;
}
`
export const LeftLeaf = styled(Leaves)`
height: 250px;
left: -30px;
bottom: -5%;
@media (max-width: 2000px) {
height: 16vw;
left: -5%;
bottom: -5%;
}
@media (max-width: 600px) {
height: 100px;
bottom: 0;
left: -8%;
}
`
export const LeftTopLeaf = styled(Leaves)`
width: 250px;
left: -30px;
top: -5%;
@media (max-width: 2000px) {
width: 20vw;
left: -5%;
bottom: -5%;
}
@media (max-width: 600px) {
width: 150px;
bottom: 0;
left: -8%;
}
`
|
package net.blay09.mods.cookingforblockheads.tile;
import net.blay09.mods.cookingforblockheads.api.kitchen.IKitchenStorageProvider;
import net.blay09.mods.cookingforblockheads.container.ContainerWithInventory;
import net.blay09.mods.cookingforblockheads.container.inventory.InventoryLarge;
import net.blay09.mods.cookingforblockheads.container.inventory.InventoryNormal;
import net.minecraft.entity.item.EntityItem;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.inventory.IInventory;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.nbt.NBTTagList;
import net.minecraft.network.NetworkManager;
import net.minecraft.network.Packet;
import net.minecraft.network.play.server.S35PacketUpdateTileEntity;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.AxisAlignedBB;
import net.minecraft.world.World;
import net.minecraftforge.common.util.Constants;
import java.util.List;
import java.util.Random;
public class BaseKitchenTileWithInventory extends TileEntity implements IInventory, IKitchenStorageProvider {
protected static final Random random = new Random();
private final String inventoryName;
protected InventoryNormal internalInventory;
protected IInventory sharedInventory;
protected int color;
protected boolean isFlipped;
protected float prevDoorAngle;
protected float doorAngle;
protected int numPlayersUsing;
protected int tickCounter;
private EntityItem renderItem;
public BaseKitchenTileWithInventory(String inventoryName) {
this.inventoryName = inventoryName;
internalInventory = new InventoryNormal(this.inventoryName);
sharedInventory = internalInventory;
}
@Override
public void setWorldObj(World world) {
super.setWorldObj(world);
renderItem = new EntityItem(world, 0, 0, 0);
renderItem.hoverStart = 0f;
}
@Override
public boolean receiveClientEvent(int id, int value) {
if (id == 1) {
numPlayersUsing = value;
return true;
} else if (id == 2) {
color = value;
return true;
}
return super.receiveClientEvent(id, value);
}
@Override
public void readFromNBT(NBTTagCompound tagCompound) {
super.readFromNBT(tagCompound);
internalInventory = new InventoryNormal(this.inventoryName);
NBTTagList tagList = tagCompound.getTagList("Items", Constants.NBT.TAG_COMPOUND);
for (int i = 0; i < tagList.tagCount(); i++) {
NBTTagCompound itemCompound = tagList.getCompoundTagAt(i);
internalInventory.setInventorySlotContents(itemCompound.getByte("Slot"), ItemStack.loadItemStackFromNBT(itemCompound));
}
color = tagCompound.getByte("Color");
isFlipped = tagCompound.getBoolean("IsFlipped");
}
@Override
public void writeToNBT(NBTTagCompound tagCompound) {
super.writeToNBT(tagCompound);
NBTTagList tagList = new NBTTagList();
for (int i = 0; i < internalInventory.getSizeInventory(); i++) {
ItemStack itemStack = internalInventory.getStackInSlot(i);
if (itemStack != null) {
NBTTagCompound itemCompound = new NBTTagCompound();
itemCompound.setByte("Slot", (byte) i);
itemStack.writeToNBT(itemCompound);
tagList.appendTag(itemCompound);
}
}
tagCompound.setTag("Items", tagList);
tagCompound.setByte("Color", (byte) color);
tagCompound.setBoolean("IsFlipped", isFlipped);
}
protected void fixBrokenContainerClosedCall() {
// Because Mojang people thought it would be more sane to check chest watchers every few ticks instead of fixing the actual issue.
if (!worldObj.isRemote && numPlayersUsing != 0 && (tickCounter + xCoord + yCoord + zCoord) % 200 == 0) {
numPlayersUsing = 0;
float range = 5.0F;
for (EntityPlayer entityPlayer : (List<EntityPlayer>) worldObj.getEntitiesWithinAABB(EntityPlayer.class, AxisAlignedBB.getBoundingBox((float) xCoord - range, (float) yCoord - range, (float) zCoord - range, (float) xCoord + 1 + range, (float) yCoord + 1 + range, (float) zCoord + 1 + range))) {
if (entityPlayer.openContainer instanceof ContainerWithInventory) {
IInventory inventory = ((ContainerWithInventory) entityPlayer.openContainer).getContainerInventory();
if (inventory == this || (inventory instanceof InventoryLarge && ((InventoryLarge) inventory).containsInventory(this))) {
numPlayersUsing++;
}
}
}
}
}
@Override
public void updateEntity() {
super.updateEntity();
tickCounter++;
fixBrokenContainerClosedCall();
prevDoorAngle = doorAngle;
if (numPlayersUsing > 0) {
final float doorSpeed = 0.2f;
doorAngle = Math.min(1f, doorAngle + doorSpeed);
} else {
final float doorSpeed = 0.1f;
doorAngle = Math.max(0f, doorAngle - doorSpeed);
}
}
@Override
public void openInventory() {
numPlayersUsing++;
worldObj.addBlockEvent(xCoord, yCoord, zCoord, getBlockType(), 1, numPlayersUsing);
}
@Override
public void closeInventory() {
numPlayersUsing--;
worldObj.addBlockEvent(xCoord, yCoord, zCoord, getBlockType(), 1, numPlayersUsing);
}
@Override
public boolean isItemValidForSlot(int i, ItemStack itemStack) {
return true;
}
@Override
public int getSizeInventory() {
return sharedInventory.getSizeInventory();
}
@Override
public ItemStack getStackInSlot(int i) {
return sharedInventory.getStackInSlot(i);
}
@Override
public ItemStack decrStackSize(int i, int amount) {
return sharedInventory.decrStackSize(i, amount);
}
@Override
public ItemStack getStackInSlotOnClosing(int i) {
return sharedInventory.getStackInSlotOnClosing(i);
}
@Override
public void setInventorySlotContents(int i, ItemStack itemStack) {
sharedInventory.setInventorySlotContents(i, itemStack);
}
@Override
public String getInventoryName() {
return sharedInventory.getInventoryName();
}
@Override
public boolean hasCustomInventoryName() {
return sharedInventory.hasCustomInventoryName();
}
@Override
public int getInventoryStackLimit() {
return sharedInventory.getInventoryStackLimit();
}
@Override
public void markDirty() {
super.markDirty();
if (hasWorldObj()) {
worldObj.markBlockForUpdate(xCoord, yCoord, zCoord);
}
}
public EntityItem getRenderItem() {
return renderItem;
}
@Override
public void onDataPacket(NetworkManager net, S35PacketUpdateTileEntity pkt) {
super.onDataPacket(net, pkt);
NBTTagCompound tagCompound = pkt.func_148857_g();
readFromNBT(tagCompound);
}
@Override
public Packet getDescriptionPacket() {
NBTTagCompound tagCompound = new NBTTagCompound();
writeToNBT(tagCompound);
return new S35PacketUpdateTileEntity(xCoord, yCoord, zCoord, 0, tagCompound);
}
public void breakBlock() {
for (int i = 0; i < internalInventory.getSizeInventory(); i++) {
ItemStack itemStack = internalInventory.getStackInSlot(i);
if (itemStack != null) {
float offsetX = random.nextFloat() * 0.8f + 0.1f;
float offsetY = random.nextFloat() * 0.8f + 0.1f;
EntityItem entityItem;
for (float offsetZ = random.nextFloat() * 0.8f + 0.1f; itemStack.stackSize > 0; worldObj.spawnEntityInWorld(entityItem)) {
int stackSize = random.nextInt(21) + 10;
if (stackSize > itemStack.stackSize) {
stackSize = itemStack.stackSize;
}
itemStack.stackSize -= stackSize;
entityItem = new EntityItem(worldObj, (double) ((float) xCoord + offsetX), (double) ((float) yCoord + offsetY), (double) ((float) zCoord + offsetZ), new ItemStack(itemStack.getItem(), stackSize, itemStack.getItemDamage()));
float f3 = 0.05F;
entityItem.motionX = (double) ((float) random.nextGaussian() * f3);
entityItem.motionY = (double) ((float) random.nextGaussian() * f3 + 0.2F);
entityItem.motionZ = (double) ((float) random.nextGaussian() * f3);
if (itemStack.hasTagCompound()) {
entityItem.getEntityItem().setTagCompound((NBTTagCompound) itemStack.getTagCompound().copy());
}
}
}
}
}
public void setColor(int color) {
this.color = color;
markDirty();
worldObj.addBlockEvent(xCoord, yCoord, zCoord, getBlockType(), 2, color);
}
public int getColor() {
return color;
}
@Override
public boolean isUseableByPlayer(EntityPlayer entityPlayer) {
return true;
}
public float getDoorAngle() {
return doorAngle;
}
public float getPrevDoorAngle() {
return prevDoorAngle;
}
@Override
public IInventory getInventory() {
return this;
}
public boolean isFlipped() {
return isFlipped;
}
public void setFlipped(boolean isFlipped) {
this.isFlipped = isFlipped;
}
public InventoryNormal getInternalInventory() {
return internalInventory;
}
}
|
<reponame>scoundrel-tech/scoundrel
package tech.scoundrel.field
trait Field[V, R] {
def name: String
def owner: R
}
|
<filename>source/Oliviann.Web.WebForms/tb_scripts.debug.js
// Simple helper to return the "exMaxLen" attribute for the specified
// field. Using "getAttribute" won't work with Firefox.
function GetMaxLength(targetField)
{
return targetField.exMaxLen;
}
// Limit the text input in the specified field.
function LimitInput(targetField, sourceEvent)
{
var isPermittedKeystroke,
enteredKeystroke,
maximumFieldLength,
currentFieldLength,
inputAllowed = true,
selectionLength = parseInt(GetSelectionLength(targetField));
if (GetMaxLength(targetField) == null)
{
sourceEvent.returnValue = inputAllowed;
return (inputAllowed);
}
// Get the current and maximum field length
currentFieldLength = parseInt(targetField.value.length);
maximumFieldLength = parseInt(GetMaxLength(targetField));
// Allow non-printing, arrow and delete keys
enteredKeystroke = window.event ? sourceEvent.keyCode : sourceEvent.which;
isPermittedKeystroke = ((enteredKeystroke < 32) || (enteredKeystroke >= 33 && enteredKeystroke <= 40) || (enteredKeystroke === 46));
// Decide whether the keystroke is allowed to proceed
if (!isPermittedKeystroke)
{
if ((currentFieldLength - selectionLength) >= maximumFieldLength)
{
inputAllowed = false;
}
}
// Force a trim of the textarea contents if necessary
if (currentFieldLength > maximumFieldLength)
{
targetField.value = targetField.value.substring(0, maximumFieldLength);
}
sourceEvent.returnValue = inputAllowed;
return (inputAllowed);
}
// Limit the text input in the specified field.
function LimitPaste(targetField, sourceEvent)
{
var clipboardText,
resultantLength,
maximumFieldLength,
currentFieldLength,
pasteAllowed = true,
selectionLength = GetSelectionLength(targetField);
if (GetMaxLength(targetField) == null)
{
sourceEvent.returnValue = pasteAllowed;
return (pasteAllowed);
}
// Get the current and maximum field length
currentFieldLength = parseInt(targetField.value.length);
maximumFieldLength = parseInt(GetMaxLength(targetField));
clipboardText = window.clipboardData.getData("Text");
resultantLength = currentFieldLength + clipboardText.length - selectionLength;
if (resultantLength > maximumFieldLength)
{
pasteAllowed = false;
}
sourceEvent.returnValue = pasteAllowed;
return (pasteAllowed);
}
// Returns the number of selected characters in the specified element
function GetSelectionLength(targetField)
{
if (targetField.selectionStart === undefined)
{
return document.selection.createRange().text.length;
}
return (targetField.selectionEnd - targetField.selectionStart);
} |
#!/bin/bash
ssh-keyscan -p 2222 $1 2>&1 | sed 's/[:]2222//g' | sed 's/[[]//g' | sed 's/[]]//g' | sed "s/$1/gitlab-gitlab-shell.gitlab.svc.cluster.local/g" |
<gh_stars>1-10
package elasta.orm.entity.impl;
import elasta.orm.entity.DependencyInfo;
import elasta.orm.entity.EntityUtils;
import elasta.orm.entity.core.Entity;
import elasta.orm.entity.core.Field;
import elasta.orm.entity.core.columnmapping.*;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Objects;
/**
* Created by sohan on 4/14/2017.
*/
public class TableToTableDependenyMapBuilder {
final Map<String, TableDependency> dependencyMap = new HashMap<>();
final Map<String, Entity> entityNameToEntityMap = new HashMap<>();
final Map<String, Map<String, Field>> entityToFieldNameToFieldIndexMap = new HashMap<>();
public EntityUtils.TableMapAndDependencyMappingInfo build(Collection<Entity> entities) {
entities.forEach(entity -> {
entityNameToEntityMap.put(entity.getName(), entity);
final RelationMapping[] relationMappings = entity.getDbMapping().getRelationMappings();
for (int columnIndex = 0; columnIndex < relationMappings.length; columnIndex++) {
final RelationMapping dbColumnMapping = relationMappings[columnIndex];
switch (dbColumnMapping.getColumnType()) {
case DIRECT: {
DirectRelationMapping mapping = (DirectRelationMapping) dbColumnMapping;
putInTable(
mapping.getReferencingTable(),
entity,
new DependencyInfo(
getField(entity, mapping.getField()),
mapping
)
);
}
break;
case INDIRECT: {
IndirectRelationMapping mapping = (IndirectRelationMapping) dbColumnMapping;
putInTable(
mapping.getReferencingTable(),
entity,
new DependencyInfo(
getField(entity, mapping.getField()),
mapping
)
);
}
break;
case VIRTUAL: {
VirtualRelationMapping mapping = (VirtualRelationMapping) dbColumnMapping;
putInTable(
mapping.getReferencingTable(),
entity,
new DependencyInfo(
getField(entity, mapping.getField()),
mapping
)
);
}
break;
}
}
});
return new EntityUtils.TableMapAndDependencyMappingInfo(
dependencyMap,
entityNameToEntityMap
);
}
private void putInTable(String referencingTable, Entity entity, DependencyInfo dependencyInfo) {
TableDependency tableDependency = dependencyMap.get(referencingTable);
if (tableDependency == null) {
dependencyMap.put(
referencingTable,
tableDependency = new TableDependency(
new HashMap<>()
)
);
}
tableDependency.add(
entity,
dependencyInfo
);
}
private Field getField(Entity entity, String fieldName) {
String entityName = entity.getName();
Map<String, Field> indexMap = entityToFieldNameToFieldIndexMap.get(entityName);
if (indexMap == null) {
entityToFieldNameToFieldIndexMap.put(
entityName,
indexMap = new HashMap<>()
);
final Field[] fields = entity.getFields();
for (Field field : fields) {
Objects.requireNonNull(field, "Null in fields in entity '" + entityName + "'");
indexMap.put(field.getName(), field);
}
}
return indexMap.get(fieldName);
}
}
|
#!/bin/bash
# Copyright 2018 The KubeSphere Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# docker login -u guest -p guest dockerhub.qingcloud.com
ks_images=(kubesphere/ks-console:v2.1.1 \
kubesphere/kubectl:v1.0.0 \
kubesphere/ks-account:v2.1.1 \
kubesphere/ks-devops:flyway-v2.1.0 \
kubesphere/ks-apigateway:v2.1.1 \
kubesphere/ks-apiserver:v2.1.1 \
kubesphere/ks-controller-manager:v2.1.1 \
kubesphere/cloud-controller-manager:v1.4.0 \
kubesphere/ks-installer:v2.1.1 \
quay.azk8s.cn/kubernetes-ingress-controller/nginx-ingress-controller:0.24.1 \
mirrorgooglecontainers/defaultbackend-amd64:1.4 \
gcr.azk8s.cn/google_containers/metrics-server-amd64:v0.3.1 \
kubesphere/configmap-reload:v0.3.0 \
kubesphere/prometheus:v2.5.0 \
kubesphere/prometheus-config-reloader:v0.34.0 \
kubesphere/prometheus-operator:v0.34.0 \
kubesphere/kube-rbac-proxy:v0.4.1 \
kubesphere/kube-state-metrics:v1.7.2 \
kubesphere/node-exporter:ks-v0.16.0 \
kubesphere/addon-resizer:1.8.4 \
kubesphere/k8s-prometheus-adapter-amd64:v0.4.1 \
grafana/grafana:5.2.4 \
redis:5.0.5-alpine \
haproxy:2.0.4 \
alpine:3.10.4 \
quay.azk8s.cn/coreos/etcd:v3.2.18 \
mysql:8.0.11 \
nginx:1.14-alpine \
postgres:9.6.8 \
osixia/openldap:1.3.0 \
minio/minio:RELEASE.2019-08-07T01-59-21Z \
minio/mc:RELEASE.2019-08-07T23-14-43Z \
\
kubesphere/notification:v2.1.0 \
kubesphere/notification:flyway_v2.1.0 \
kubesphere/alerting-dbinit:v2.1.0 \
kubesphere/alerting:v2.1.0 \
kubesphere/alert_adapter:v2.1.0 \
\
openpitrix/release-app:v0.4.3 \
openpitrix/openpitrix:flyway-v0.4.8 \
openpitrix/openpitrix:v0.4.8 \
openpitrix/runtime-provider-kubernetes:v0.1.3 \
\
kubesphere/jenkins-uc:v2.1.1 \
jenkins/jenkins:2.176.2 \
jenkins/jnlp-slave:3.27-1 \
kubesphere/builder-base:v2.1.0 \
kubesphere/builder-nodejs:v2.1.0 \
kubesphere/builder-maven:v2.1.0 \
kubesphere/builder-go:v2.1.0 \
sonarqube:7.4-community \
kubesphere/s2ioperator:v2.1.1 \
kubesphere/s2irun:v2.1.1 \
kubesphere/s2i-binary:v2.1.0 \
kubesphere/tomcat85-java11-centos7:v2.1.0 \
kubesphere/tomcat85-java11-runtime:v2.1.0 \
kubesphere/tomcat85-java8-centos7:v2.1.0 \
kubesphere/tomcat85-java8-runtime:v2.1.0 \
kubesphere/java-11-centos7:v2.1.0 \
kubesphere/java-8-centos7:v2.1.0 \
kubesphere/java-8-runtime:v2.1.0 \
kubesphere/java-11-runtime:v2.1.0 \
kubesphere/nodejs-8-centos7:v2.1.0 \
kubesphere/nodejs-6-centos7:v2.1.0 \
kubesphere/nodejs-4-centos7:v2.1.0 \
kubesphere/python-36-centos7:v2.1.0 \
kubesphere/python-35-centos7:v2.1.0 \
kubesphere/python-34-centos7:v2.1.0 \
kubesphere/python-27-centos7:v2.1.0 \
\
kubesphere/elasticsearch-curator:v5.7.6 \
kubesphere/elasticsearch-oss:6.7.0-1 \
kubesphere/fluent-bit:v1.3.2-reload \
docker.elastic.co/kibana/kibana-oss:6.7.0 \
dduportal/bats:0.4.0 \
docker:19.03 \
kubesphere/fluentbit-operator:v0.1.0 \
kubesphere/fluent-bit:v1.3.5-reload \
kubesphere/configmap-reload:v0.0.1 \
kubesphere/log-sidecar-injector:1.0 \
\
istio/kubectl:1.3.3 \
istio/proxy_init:1.3.3 \
istio/proxyv2:1.3.3 \
istio/citadel:1.3.3 \
istio/pilot:1.3.3 \
istio/mixer:1.3.3 \
istio/galley:1.3.3 \
istio/sidecar_injector:1.3.3 \
istio/node-agent-k8s:1.3.3 \
jaegertracing/jaeger-operator:1.13.1 \
jaegertracing/jaeger-agent:1.13 \
jaegertracing/jaeger-collector:1.13 \
jaegertracing/jaeger-query:1.13 \
kubesphere/examples-bookinfo-productpage-v1:1.13.0 \
kubesphere/examples-bookinfo-reviews-v1:1.13.0 \
kubesphere/examples-bookinfo-reviews-v2:1.13.0 \
kubesphere/examples-bookinfo-reviews-v3:1.13.0 \
kubesphere/examples-bookinfo-details-v1:1.13.0 \
kubesphere/examples-bookinfo-ratings-v1:1.13.0 \
\
busybox:1.31.1 \
joosthofman/wget:1.0 \
kubesphere/netshoot:v1.0 \
nginxdemos/hello:plain-text \
wordpress:4.8-apache \
mirrorgooglecontainers/hpa-example:latest \
java:openjdk-8-jre-alpine \
fluent/fluentd:v1.4.2-2.0 \
perl:latest \
)
for image in ${ks_images[@]}; do
## download_images
docker pull $image
done
|
export class Wf_Pmeto {
AUD_ESTA: string;
AUD_USUA: string;
AUD_UFAC: Date;
EMP_CODI: number;
WEB_CONT: number;
MWE_CONT: number;
PME_SECU: number;
PME_CODI: string;
PME_TIPO: string;
PME_VALO: string;
PME_CLAS: string;
PME_CONT: number;
PME_PADR: number;
PME_NDET: string;
} |
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/Alamofire/Alamofire.framework"
install_framework "$BUILT_PRODUCTS_DIR/AlamofireObjectMapper/AlamofireObjectMapper.framework"
install_framework "$BUILT_PRODUCTS_DIR/ObjectMapper/ObjectMapper.framework"
install_framework "$BUILT_PRODUCTS_DIR/TalNet/TalNet.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/Alamofire/Alamofire.framework"
install_framework "$BUILT_PRODUCTS_DIR/AlamofireObjectMapper/AlamofireObjectMapper.framework"
install_framework "$BUILT_PRODUCTS_DIR/ObjectMapper/ObjectMapper.framework"
install_framework "$BUILT_PRODUCTS_DIR/TalNet/TalNet.framework"
fi
|
#!/bin/bash
# Copyright 2019 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
KIND_LOG_LEVEL="1"
if ! [ -z $DEBUG ]; then
set -x
KIND_LOG_LEVEL="6"
fi
set -o errexit
set -o nounset
set -o pipefail
cleanup() {
if [[ "${KUBETEST_IN_DOCKER:-}" == "true" ]]; then
kind "export" logs --name ${KIND_CLUSTER_NAME} "${ARTIFACTS}/logs" || true
fi
kind delete cluster \
--verbosity=${KIND_LOG_LEVEL} \
--name ${KIND_CLUSTER_NAME}
}
trap cleanup EXIT
if ! command -v parallel &> /dev/null; then
if [[ "$OSTYPE" == "linux-gnu" ]]; then
echo "Parallel is not installed. Use the package manager to install it"
elif [[ "$OSTYPE" == "darwin"* ]]; then
echo "Parallel is not installed. Install it running brew install parallel"
fi
exit 1
fi
if ! command -v kind --version &> /dev/null; then
echo "kind is not installed. Use the package manager or visit the official site https://kind.sigs.k8s.io/"
exit 1
fi
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# Use 1.0.0-dev to make sure we use the latest configuration in the helm template
export TAG=1.0.0-dev
export ARCH=amd64
export REGISTRY=ingress-controller
export K8S_VERSION=${K8S_VERSION:-v1.18.0@sha256:0e20578828edd939d25eb98496a685c76c98d54084932f76069f886ec315d694}
export DOCKER_CLI_EXPERIMENTAL=enabled
export KIND_CLUSTER_NAME=${KIND_CLUSTER_NAME:-ingress-nginx-dev}
echo "[dev-env] creating Kubernetes cluster with kind"
export KUBECONFIG="${HOME}/.kube/kind-config-${KIND_CLUSTER_NAME}"
kind create cluster \
--verbosity=${KIND_LOG_LEVEL} \
--name ${KIND_CLUSTER_NAME} \
--config ${DIR}/kind.yaml \
--retain \
--image "kindest/node:${K8S_VERSION}"
echo "Kubernetes cluster:"
kubectl get nodes -o wide
echo "[dev-env] building image"
export EXIT_CODE=-1
echo "
make -C ${DIR}/../../ clean-image build image
make -C ${DIR}/../e2e-image image
make -C ${DIR}/../../images/fastcgi-helloserver/ GO111MODULE=\"on\" build image
make -C ${DIR}/../../images/httpbin/ image
make -C ${DIR}/../../images/echo/ image
make -C ${DIR}/../../images/cfssl/ image
" | parallel --joblog /tmp/log {} || EXIT_CODE=$?
if [ ${EXIT_CODE} -eq 0 ] || [ ${EXIT_CODE} -eq -1 ];
then
echo "Image builds were ok! Log:"
cat /tmp/log
unset EXIT_CODE
else
echo "Image builds were not ok! Log:"
cat /tmp/log
exit 1
fi
# Preload images used in e2e tests
docker pull moul/grpcbin
docker pull quay.io/kubernetes-ingress-controller/nginx:e3c49c52f4b74fe47ad65d6f3266a02e8b6b622f
KIND_WORKERS=$(kind get nodes --name="${KIND_CLUSTER_NAME}" | grep worker | awk '{printf (NR>1?",":"") $1}')
echo "[dev-env] copying docker images to cluster..."
export EXIT_CODE=-1
echo "
kind load docker-image --name="${KIND_CLUSTER_NAME}" --nodes=${KIND_WORKERS} nginx-ingress-controller:e2e
kind load docker-image --name="${KIND_CLUSTER_NAME}" --nodes=${KIND_WORKERS} ${REGISTRY}/nginx-ingress-controller:${TAG}
kind load docker-image --name="${KIND_CLUSTER_NAME}" --nodes=${KIND_WORKERS} ${REGISTRY}/fastcgi-helloserver:${TAG}
kind load docker-image --name="${KIND_CLUSTER_NAME}" --nodes=${KIND_WORKERS} ${REGISTRY}/httpbin:${TAG}
kind load docker-image --name="${KIND_CLUSTER_NAME}" --nodes=${KIND_WORKERS} ${REGISTRY}/echo:${TAG}
kind load docker-image --name="${KIND_CLUSTER_NAME}" --nodes=${KIND_WORKERS} quay.io/kubernetes-ingress-controller/nginx:e3c49c52f4b74fe47ad65d6f3266a02e8b6b622f
kind load docker-image --name="${KIND_CLUSTER_NAME}" --nodes=${KIND_WORKERS} moul/grpcbin
kind load docker-image --name="${KIND_CLUSTER_NAME}" --nodes=${KIND_WORKERS} ${REGISTRY}/cfssl:${TAG}
" | parallel --joblog /tmp/log {} || EXIT_CODE=$?
if [ ${EXIT_CODE} -eq 0 ] || [ ${EXIT_CODE} -eq -1 ];
then
echo "Image loads were ok! Log:"
cat /tmp/log
unset EXIT_CODE
else
echo "Image loads were not ok! Log:"
cat /tmp/log
exit
fi
echo "[dev-env] running e2e tests..."
make -C ${DIR}/../../ e2e-test
|
import { Repository } from "typeorm";
import { Image } from "../models/image";
export declare class ImageRepository extends Repository<Image> {
}
|
#!/bin/bash
trap 'pgrep redis-server |xargs kill' EXIT;
redis-server > /dev/null &
PORT=8080 REDIS_URL=127.0.0.1 mocha --exit --timeout 1000
|
<reponame>MccreeFei/jframe<filename>jframe-demo/demo-plugin/jframe-demo-cache/src/test/java/ehcache/TestEventListener.java
/**
*
*/
package ehcache;
import org.junit.Test;
/**
* @author dzh
* @date Nov 12, 2016 11:36:16 AM
* @since 1.0
*/
public class TestEventListener {
@Test
public void eventListenerTest() {
// CacheEventListenerConfigurationBuilder cacheEventListenerConfiguration =
// CacheEventListenerConfigurationBuilder
// .newEventListenerConfiguration(new ListenerObject(), EventType.CREATED,
// EventType.UPDATED).unordered().asynchronous();
//
// final CacheManager manager = CacheManagerBuilder.newCacheManagerBuilder()
// .withCache("foo",
// CacheConfigurationBuilder.newCacheConfigurationBuilder(String.class, String.class,
// ResourcePoolsBuilder.heap(10))
// .add(cacheEventListenerConfiguration))
// .build(true);
//
// CacheConfiguration<Long, String> cacheConfiguration =
// CacheConfigurationBuilder.newCacheConfigurationBuilder(Long.class, String.class,
// ResourcePoolsBuilder.heap(5L))
// .withDispatcherConcurrency(10).withEventListenersThreadPool("listeners-pool").build();
//
// final Cache<String, String> cache = manager.getCache("foo", String.class, String.class);
// cache.put("Hello", "World");
// cache.put("Hello", "Everyone");
// cache.remove("Hello");
}
@Test
public void runtimeEventListener() {
// ListenerObject listener = new ListenerObject();
// cache.getRuntimeConfiguration().registerCacheEventListener(listener, EventOrdering.ORDERED,
// EventFiring.ASYNCHRONOUS,
// EnumSet.of(EventType.CREATED, EventType.REMOVED));
//
// cache.put(1L, "one");
// cache.put(2L, "two");
// cache.remove(1L);
// cache.remove(2L);
//
// cache.getRuntimeConfiguration().deregisterCacheEventListener(listener);
//
// cache.put(1L, "one again");
// cache.remove(1L);
}
}
|
export interface CellArgs {
row: number;
cell: number;
grid: any;
item?: any;
}
|
##
# TypeError ISO Test
assert('TypeError', '15.2.29') do
assert_equal Class, TypeError.class
end
|
def lcm(m, n):
if(m > n):
greater = m
else:
greater = n
lcm = greater
while(True):
if((lcm % m == 0) and (lcm % n == 0)):
return lcm
lcm += greater |
list1 = [8, 5, 7]
list2 = [2, 4, 9]
merged_list = list1 + list2
print(merged_list) # [8, 5, 7, 2, 4, 9] |
#!/bin/bash
dieharder -d 101 -g 401 -S 3826679031
|
function isAnagram(str1, str2) {
// first, case insensitive
str1 = str1.toLowerCase();
str2 = str2.toLowerCase();
// sort strings alphabetically
str1 = str1.split("").sort().join("");
str2 = str2.split("").sort().join("");
// check string lengths
if (str1.length !== str2.length) {
return false;
}
// compare both strings
if (str1 === str2) {
return true;
}
// if none of above conditions is true
return false;
} |
import { Component, OnInit } from '@angular/core';
import { FormControl } from '@angular/forms';
import { Observable } from 'rxjs';
import { startWith, map } from 'rxjs/operators';
@Component({
selector: 'app-search-box',
templateUrl: './search-box.component.html',
styleUrls: ['./search-box.component.css']
})
export class SearchBoxComponent implements OnInit {
searchControl = new FormControl();
options: string[] = ['One', 'Two', 'Three'];
filteredOptions: Observable<string[]>;
ngOnInit() {
this.filteredOptions = this.searchControl.valueChanges
.pipe(
startWith(''),
map(value => this._filter(value))
);
}
private _filter(value: string): string[] {
const filterValue = value.toLowerCase();
return this.options.filter(option => option.toLowerCase().includes(filterValue));
}
} |
<gh_stars>0
import Layout from '@/layout'
export default [
{
path: '/withdraw',
component: Layout,
children: [
{
path: '',
name: 'withdraw',
component: () => import('@/views/shouka/withdraw/index'),
meta: { title: '提现', icon: 'icon0' },
hidden:true
}
],
hidden:true,
meta: { title: '', icon: '' }
},
{
path: '/',
component: Layout,
redirect: '/dashboard',
functionCode: 'f-home',
children: [
{
path: 'dashboard',
name: 'Dashboard',
functionCode: 'f-home',
component: () => import('@/views/shouka/dashboard/index'),
meta: { title: '首页', icon: 'icon0' }
}
],
icon:require('../assets/icon/sy.png'),
meta: { title: '', icon: '' }
},
// 网站设置
{
path: '/base',
component: Layout,
redirect: '/base/loginLog',
name: 'base',
functionCode: 'f-base',
meta: { title: '基础信息', icon: 'icon1' },
icon:require('../assets/icon/sz.png'),
children: [
{
path: 'loginLog',
name: 'loginLog',
functionCode: 'f-base-log',
component: () => import('@/views/shouka/base/login-log/index'),
meta: { title: '登录日志', icon: '' },
// hidden:true
},
{
path: 'message',
name: 'message',
functionCode: 'f-base-mes',
component: () => import('@/views/shouka/base/message/index'),
meta: { title: '站内消息', icon: '' },
// hidden:true
},
{
path: 'wxnotice',
name: 'wxnotice',
functionCode: 'f-base-wnotice',
component: () => import('@/views/shouka/base/wxnotice/index'),
meta: { title: '微信通知', icon: '' },
// hidden:true
},
{
path: 'settings',
name: 'settings',
functionCode: 'f-base-set',
component: () => import('@/views/shouka/base/settings/index'),
meta: { title: '商家设置', icon: '' },
// hidden:true
},
{
path: 'password',
name: 'password',
functionCode: 'f-base-password',
component: () => import('@/views/shouka/base/password/index'),
meta: { title: '修改密码', icon: '' },
// hidden:true
},
{
path: 'link',
name: 'link',
functionCode: 'f-base-link',
component: () => import('@/views/shouka/base/link/index'),
meta: { title: '店铺链接', icon: '' },
// hidden:true
},
{
path: 'apply',
name: 'apply',
functionCode: 'f-base-finance',
component: () => import('@/views/shouka/base/apply/index'),
meta: { title: '商户提现', icon: '' },
// hidden:true
},
]
},
{
path: '/goodsManager',
component: Layout,
redirect: '/goodsManager/goods',
name: 'goodsManager',
functionCode: 'f-goods',
meta: { title: '商品管理', icon: 'icon1' },
icon:require('../assets/icon/sz.png'),
children: [
{
path: 'types',
name: 'types',
functionCode: 'f-goods-catalog',
component: () => import('@/views/shouka/goods/types/list'),
meta: { title: '商品分类', icon: '' },
// hidden:true
},
{
path: 'add',
name: 'add',
functionCode: 'f-goods-addGoods',
component: () => import('@/views/shouka/goods/goods/add'),
meta: { title: '添加商品', icon: '' },
// hidden:true
},
{
path: 'goods',
name: 'goods',
functionCode: 'f-goods-self',
component: () => import('@/views/shouka/goods/goods/list'),
meta: { title: '商品列表', icon: '' },
// hidden:true
},
{
path: 'goods-inter',
name: 'goods-inter',
functionCode: 'f-goods-join',
component: () => import('@/views/shouka/goods/inter/list'),
meta: { title: '对接商品', icon: '' },
// hidden:true
},
{
path: 'network-goods-inter',
name: 'network-goods-inter',
functionCode: 'f-goods-alljoin',
component: () => import('@/views/shouka/goods/networkInner/list'),
meta: { title: '全网对接商品', icon: '' },
},
{
path: 'daili',
name: 'daili',
component: () => import('@/views/shouka/goods/inter/daili'),
meta: { title: '代理商品', icon: '' },
hidden:true
},
{
path: 'update',
name: 'update',
component: () => import('@/views/shouka/goods/goods/update'),
meta: { title: '编辑商品', icon: '' },
hidden:true
},
{
path: 'interUpdate',
name: 'update',
component: () => import('@/views/shouka/goods/inter/update'),
meta: { title: '编辑商品', icon: '' },
hidden:true
},
{
path: 'netUpdate',
name: 'update',
component: () => import('@/views/shouka/goods/networkInner/docking'),
meta: { title: '对接商品', icon: '' },
hidden:true
},
]
},
{
path: '/cardManager',
component: Layout,
redirect: '/cardManager/card',
name: 'cardManager',
functionCode: 'f-card',
meta: { title: '卡密管理', icon: 'icon1' },
icon:require('../assets/icon/sz.png'),
children: [
{
path: 'cardAdd',
name: 'cardAdd',
functionCode: 'f-card-add',
component: () => import('@/views/shouka/card/add'),
meta: { title: '添加卡密', icon: '' },
// hidden:true
},
{
path: 'card',
name: 'card',
functionCode: 'f-card-list',
component: () => import('@/views/shouka/card/card'),
meta: { title: '卡密列表', icon: '' },
// hidden:true
},
{
path: 'cardSold',
name: 'cardSold',
functionCode: 'f-card-sale',
component: () => import('@/views/shouka/card/sold'),
meta: { title: '已售卡密', icon: '' },
// hidden:true
},
{
path: 'rubbish',
name: 'rubbish',
functionCode: 'f-card-recover',
component: () => import('@/views/shouka/card/rubbish'),
meta: { title: '回收站', icon: '' },
// hidden:true
},
]
},
{
path: '/orderManager',
component: Layout,
redirect: '/orderManager/zhixiao',
name: 'orderManager',
functionCode: 'f-order',
meta: { title: '订单管理', icon: 'icon1' },
icon:require('../assets/icon/sz.png'),
children: [
{
path: 'allOrder',
name: 'allOrder',
functionCode: 'f-order-allOrder',
component: () => import('@/views/shouka/order/allOrder'),
meta: { title: '全部订单', icon: '' },
// hidden:true
},
{
path: 'zhixiao',
name: 'zhixiao',
functionCode: 'f-order-order',
component: () => import('@/views/shouka/order/zhixiao'),
meta: { title: '直销订单', icon: '' },
// hidden:true
},
{
path: 'duijie',
name: 'duijie',
functionCode: 'f-order-join',
component: () => import('@/views/shouka/order/duijie'),
meta: { title: '对接订单', icon: '' },
// hidden:true
},
]
},
{
path: '/complaint',
component: Layout,
redirect: '/complaint/complaint',
name: 'complaint',
functionCode: 'f-complaint',
meta: { title: '投诉管理', icon: 'icon1' },
icon:require('../assets/icon/sz.png'),
children: [
{
path: 'not-handle',
name: 'not-handle',
functionCode: 'f-complaint-list',
component: () => import('@/views/shouka/complaint/not-handle/index'),
meta: { title: '未投诉处理', icon: '' },
// hidden:true
},
{
path: 'complaint',
name: 'complaint',
functionCode: 'f-complaint-all',
component: () => import('@/views/shouka/complaint/complaint/index'),
meta: { title: '全部投诉', icon: '' },
// hidden:true
},
]
},
{
path: '/finance',
component: Layout,
redirect: '/finance/finance',
name: 'finance',
functionCode: 'f-finance',
meta: { title: '财务管理', icon: 'icon1' },
icon:require('../assets/icon/sz.png'),
children: [
{
path: 'charge',
name: 'charge',
functionCode: 'f-finance-attr',
component: () => import('@/views/shouka/finance/charge/index'),
meta: { title: '充值接口', icon: '' },
// hidden:true
},
{
path: 'rate',
name: 'rate',
functionCode: 'f-recharge-rate',
component: () => import('@/views/shouka/finance/rate/index'),
meta: { title: '接口费率', icon: '' },
// hidden:true
},
{
path: 'tempFrozen',
name: 'tempFrozen',
functionCode: 'f-finance-tempFrozen',
component: () => import('@/views/shouka/finance/tempFrozen/index'),
meta: { title: '临时冻结详情', icon: '' },
// hidden:true
},
]
},
{
path: '/discount',
component: Layout,
redirect: '/discount/list',
name: 'discount',
functionCode: 'f-discount',
meta: { title: '优惠券管理', icon: 'icon1' },
icon:require('../assets/icon/sz.png'),
children: [
{
path: 'add',
name: 'add',
functionCode: 'f-discount-add',
component: () => import('@/views/shouka/discount/add'),
meta: { title: '添加优惠券', icon: '' },
// hidden:true
},
{
path: 'list',
name: 'list',
functionCode: 'f-discount-list',
component: () => import('@/views/shouka/discount/list'),
meta: { title: '优惠券列表', icon: '' },
// hidden:true
},
{
path: 'useable',
name: 'useable',
functionCode: 'f-discount-used',
component: () => import('@/views/shouka/discount/useable'),
meta: { title: '已使用', icon: '' },
// hidden:true
},
]
},
{
path: '/menu1',
component: Layout,
redirect: '/menu1/1',
name: 'menu1',
functionCode: 'f-push',
meta: { title: '推广管理', icon: 'icon1' },
icon:require('../assets/icon/sz.png'),
children: [
{
path: 'sub1',
name: 'menu111',
functionCode: 'f-push',
component: () => import('@/views/menu1/menu1-1/sub1'),
meta: { title: '推广管理', icon: '' },
// hidden:true
},
]
},
] |
#!/bin/bash
# v1.2.1
#------------------------------------------------------------------------------
# creates the full package as component of larger product platform
#------------------------------------------------------------------------------
doCreateFullPackage(){
doLog "INFO START create-full-package.func.sh" ;
#define default vars
test -z ${include_file:-} && \
include_file="$product_instance_dir/met/.$env_type.$run_unit"
# relative file path is passed turn it to absolute one
[[ $include_file == /* ]] || include_file=$product_instance_dir/$include_file
if [ ! -f "$include_file" ]; then
msg="the deployment file: "'"'"$include_file"'" does not exist !!!'
export exit_code=1 ;
doExit "$msg"
exit 1
fi
tgt_env_type=$(echo `basename "$include_file"`|cut -d'.' -f2)
# start: add the perl_ignore_file_pattern
while read -r line ; do \
got=$(echo $line|perl -ne 'm|^\s*#\s*perl_ignore_file_pattern\s*=(.*)$|g;print $1'); \
test -z "$got" || perl_ignore_file_pattern="$got|${perl_ignore_file_pattern:-}" ;
done < <(cat $include_file)
# or how-to remove the last char from a string
perl_ignore_file_pattern=$(echo "$perl_ignore_file_pattern"|sed 's/.$//')
test -z $perl_ignore_file_pattern && perl_ignore_file_pattern='.*\.swp$|.*\.log|$.*\.swo$'
echo perl_ignore_file_pattern::: $perl_ignore_file_pattern
# note: | egrep -v "$perl_ignore_file_pattern" | egrep -v '^\s*#'
cd $org_base_dir
timestamp=`date "+%Y%m%d_%H%M%S"`
# the last token of the include_file with . token separator - thus no points in names
zip_file_name=$(echo $include_file | rev | cut -d'.' -f 1 | rev)
test $zip_file_name != $run_unit && zip_file_name="$zip_file_name"'--'"$livy-rester_project"
zip_file_name="$zip_file_name.$product_version.$tgt_env_type.$timestamp.$host_name.zip"
zip_file="$product_dir/$zip_file_name"
mkdir -p $product_instance_dir/dat/$run_unit/tmp
echo $zip_file>$product_instance_dir/dat/$run_unit/tmp/zip_file
# zip MM ops
# -MM --must-match
# All input patterns must match at least one file and all input files found must be readable.
set -x ; ret=1
cat $include_file | egrep -v "$perl_ignore_file_pattern" | sed '/^#/ d' | perl -ne 's|\n|\000|g;print'| \
xargs -0 -I "{}" zip -MM $zip_file "$org_name/$run_unit/$environment_name/{}"
ret=$?
set +x
test $ret -gt 0 && (
while IFS='' read f ; do (
test -d "$product_instance_dir/$f" && continue ;
test -f "$product_instance_dir/$f" && continue ;
test -f "$product_instance_dir/$f" || doLog 'ERROR not a file: "'"$f"'"' ;
test -f "$product_instance_dir/$f" || ret=1 && exit 1
);
done < <(cat $include_file | egrep -v "$perl_ignore_file_pattern" | sed '/^#/ d')
);
if [ ! $ret -eq 0 ]; then
msg="deleted $zip_file , because of packaging errors $! !!!"
rm -fv $zip_file
export exit_code=1 ; doExit "$msg" ;
exit 1
fi
# backup the project data dir if not running on the product itself ...
test -d $mix_data_dir/$(date "+%Y")/$(date "+%Y-%m")/$(date "+%Y-%m-%d") || doIncreaseDate
# and zip the project data dir
if [ ! $run_unit == $livy-rester_project ]; then
cd $mix_data_dir
for i in {1..3} ; do cd .. ; done ;
zip -r $zip_file $livy-rester_project/dat/mix/$(date "+%Y")/$(date "+%Y-%m")/$(date "+%Y-%m-%d")
cd $org_base_dir
else
zip -r $zip_file $org_name/$run_unit/$environment_name/dat/mix/$(date "+%Y")/$(date "+%Y-%m")/$(date "+%Y-%m-%d")
fi
msg="created the following full development package:"
doLog "INFO $msg"
msg="`stat -c \"%y %n\" $zip_file`"
doLog "INFO $msg"
if [[ ${network_backup_dir+x} && -n $network_backup_dir ]] ; then
if [ -d "$network_backup_dir" ] ; then
doRunCmdAndLog "cp -v $zip_file $network_backup_dir/"
msg=" with the following network dir backup :
""$(stat -c "%y %n" "$network_backup_dir/$zip_file_name")"
doLog "INFO $msg"
else
msg="skip backup as network_backup_dir does not exist"
doLog "ERROR $msg"
fi
else
msg="skip the creation of the network backup as no network_backup_dir is configured"
doLog "INFO $msg"
fi
doLog "INFO STOP create-full-package.func.sh"
}
#eof func doCreateFullPackage
|
<reponame>sinapsel/fuckthelogic
function run(){
var program = document.forms[0].codebox.value;
var result = document.getElementById("output");
var params = document.getElementById("argv").value;
result.innerHTML += interpret(program, params);
result.innerHTML += "<br>";
}
function _clear(frm) {
switch (frm){
case 'codebox':
document.forms[0].codebox.value = '';
break;
case 'argv':
document.forms[0].argv.value = '';
break;
case 'output':
document.getElementById("output").innerHTML = "";
break;
default:
return 0;
}
return false;
}
function getLoops(src){
var opened = [];
var loops = {};
for(var i = 0;i<src.length;i++){
if(src[i] == '{')
opened.push(i);
else if (src[i] == '}'){
loops[i] = opened.pop();
loops[loops[i]] = i;
}
}
return loops;
}
function getConditions(src){
var opened = [];
var conds = {};
for(var i = 0;i<src.length;i++){
if(src[i] == '(')
opened.push(i);
else if (src[i] == ')'){
conds[i] = opened.pop();
conds[conds[i]] = i;
}
}
return conds;
}
function getElses(src){
var opened = [];
var elses = {};
for(var i = 0;i<src.length;i++){
if(src[i] == ')')
opened.push(i);
else if (src[i] == '\\'){
elses[i] = opened.pop();
elses[elses[i]] = i;
}
}
return elses;
}
function interpret(prog, params){
var max_val = 1<<16;//65536=2^16
var IterCounter = 1<<14;//16384=2^14
var FuckTheLogicdict = [];
FuckTheLogicdict[0] = 0;
var x = 0;
var l = 0;
var argi = 0;
var filtArgs = function(st){
if(isNaN(parseInt(st, 16))) return 0;
else return parseInt(st, 16);
};
var filtCode = function(p){
if('><+-!?{};:/~@()\\'.indexOf(p) >=0) return p;
else return '';
};
params = (params.split(' ')).map(filtArgs);
console.log('Got arguments:\t' + params);
var result = '';
prog = ((prog.split('')).map(filtCode)).join('');
console.log('Got code:\t' + prog);
var LoopBlocks = getLoops(prog);
var CondBlocks = getConditions(prog);
var ElseBlocks = getElses(prog);
for (i = 0; i < prog.length; i++) {
switch (prog.charAt(i)) {
case ">":
x++;
if(FuckTheLogicdict[x]==undefined) FuckTheLogicdict[x] = 0;
break;
case "<":
x--; if(x<0) x = 0;
break;
case "+":
FuckTheLogicdict[x]++;
break;
case "-":
FuckTheLogicdict[x]--;
break;
case ";":
FuckTheLogicdict[x] = FuckTheLogicdict[x] << 1;
break;
case ":":
FuckTheLogicdict[x] = FuckTheLogicdict[x] >> 1;
case "~":
if(x>0)
FuckTheLogicdict[x] = FuckTheLogicdict[x-1];
case "!":
result += String.fromCharCode(FuckTheLogicdict[x]);
break;
case "@":
result += (FuckTheLogicdict[x].toString(16)).toUpperCase() + "<br/>";
break;
case "?":
FuckTheLogicdict[x] = params[argi];
argi++;
break;
case "{":
if(!FuckTheLogicdict[x])
i = LoopBlocks[i];
break;
case "}":
if(FuckTheLogicdict[x])
i = LoopBlocks[i];
break;
case "(":
if(!FuckTheLogicdict[x])
i = CondBlocks[i];
break;
case ")":
if(FuckTheLogicdict[x])
i = ElseBlocks[i];
break;
}
FuckTheLogicdict[x] %= max_val;
IterCounter--;
if(!IterCounter){
console.log('Iteration overflow');
console.log('Output:\n'+result.replace(new RegExp('<br/>',"g"),'\n'));
return result;
}
}
console.log('Output:\n'+result.replace(new RegExp('<br/>',"g"),'\n'));
return result;
}
function showFileInput() {
var fileInput = document.getElementById("fileInput");
fileInput.click();
}
function processFiles(files){
var file = files[0];
var reader = new FileReader();
reader.onload = function (e) {
document.forms[0].codebox.value = e.target.result;
};
reader.readAsText(file);
}
function SaveFile(){
var text = document.forms[0].codebox.value;
var BlobBlob = new Blob([text], {type : 'text/plain'});
ww = URL.createObjectURL(BlobBlob);
dl.href = ww;
}
function newFile(){
_clear('codebox');
_clear('argv');
_clear('output');
return false;
}
|
pkg_name=mysql
pkg_origin=starkandwayne
pkg_version=5.7.17
pkg_maintainer="Ramon Makkelie <makkelie@starkandwayne.com>"
pkg_license=('GPL-2.0')
pkg_source=http://dev.mysql.com/get/Downloads/MySQL-5.7/${pkg_name}-${pkg_version}.tar.gz
pkg_shasum=cebf23e858aee11e354c57d30de7a079754bdc2ef85eb684782458332a4b9651
pkg_upstream_url=https://www.mysql.com/
pkg_description=$(cat << EOF
Starts MySQL with a basic configuration. Configurable at run time:
* root_password: the password for the mysql root user, empty by default
* app_username: the username for an application that will connect to the database server, false by default
* app_password: the password for the app user
* bind: the bind address to listen for connections, default 127.0.0.1
Set the app_username and app_password at runtime to have that user created, it will not be otherwise.
EOF
)
pkg_deps=(
core/bash
core/coreutils
core/gawk
core/gcc-libs
core/glibc
core/grep
core/inetutils
core/ncurses
core/openssl
core/pcre
core/perl
core/procps-ng
core/sed
)
pkg_build_deps=(
core/bison
core/boost159
core/cmake
core/diffutils
core/gcc
core/make
core/patch
)
pkg_svc_user="hab"
pkg_bin_dirs=(bin)
pkg_include_dirs=(include)
pkg_lib_dirs=(lib)
pkg_exports=(
[port]=port
[password]=app_password
[username]=app_username
)
do_build() {
cmake . -DLOCAL_BOOST_DIR="$(pkg_path_for core/boost159)" \
-DBOOST_INCLUDE_DIR="$(pkg_path_for core/boost159)/include" \
-DWITH_BOOST="$(pkg_path_for core/boost159)" \
-DCURSES_INCLUDE_PATH="$(pkg_path_for core/ncurses)/include" \
-DCURSES_LIBRARY="$(pkg_path_for core/ncurses)/lib/libcurses.so" \
-DWITH_SSL=yes \
-DOPENSSL_INCLUDE_DIR="$(pkg_path_for core/openssl)/include" \
-DOPENSSL_LIBRARY="$(pkg_path_for core/openssl)/lib/libssl.so" \
-DCRYPTO_LIBRARY="$(pkg_path_for core/openssl)/lib/libcrypto.so" \
-DCMAKE_INSTALL_PREFIX="$pkg_prefix" \
-DWITH_EMBEDDED_SERVER=no \
-DWITH_EMBEDDED_SHARED_LIBRARY=no
make
}
do_install() {
do_default_install
# Remove static libraries, tests, and other things we don't need
rm -rf "$pkg_prefix/docs" "$pkg_prefix/man" "$pkg_prefix/mysql-test" \
"$pkg_prefix"/lib/*.a
fix_interpreter "$pkg_prefix/bin/mysqld_multi" core/perl bin/perl
fix_interpreter "$pkg_prefix/bin/mysqldumpslow" core/perl bin/perl
}
do_check() {
ctest
}
|
<reponame>Purlemon/oatpp
/***************************************************************************
*
* Project _____ __ ____ _ _
* ( _ ) /__\ (_ _)_| |_ _| |_
* )(_)( /(__)\ )( (_ _)(_ _)
* (_____)(__)(__)(__) |_| |_|
*
*
* Copyright 2018-present, <NAME> <<EMAIL>>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
***************************************************************************/
#include "Server.hpp"
#include <thread>
#include <chrono>
namespace oatpp { namespace network {
const v_int32 Server::STATUS_CREATED = 0;
const v_int32 Server::STATUS_STARTING = 1;
const v_int32 Server::STATUS_RUNNING = 2;
const v_int32 Server::STATUS_STOPPING = 3;
const v_int32 Server::STATUS_DONE = 4;
Server::Server(const std::shared_ptr<ConnectionProvider> &connectionProvider,
const std::shared_ptr<ConnectionHandler> &connectionHandler)
: m_status(STATUS_CREATED)
, m_connectionProvider(connectionProvider)
, m_connectionHandler(connectionHandler)
, m_threaded(false) {}
// This isn't implemented as static since threading is dropped and therefore static isn't needed anymore.
void Server::conditionalMainLoop() {
setStatus(STATUS_STARTING, STATUS_RUNNING);
std::shared_ptr<const std::unordered_map<oatpp::String, oatpp::String>> params;
while (getStatus() == STATUS_RUNNING) {
if (m_condition()) {
auto connectionHandle = m_connectionProvider->get();
if (connectionHandle.object) {
if (getStatus() == STATUS_RUNNING) {
if (m_condition()) {
m_connectionHandler->handleConnection(connectionHandle, params /* null params */);
} else {
setStatus(STATUS_STOPPING);
}
} else {
OATPP_LOGD("[oatpp::network::server::mainLoop()]", "Error. Server already stopped - closing connection...");
}
}
} else {
setStatus(STATUS_STOPPING);
}
}
setStatus(STATUS_DONE);
}
void Server::mainLoop(Server *instance) {
instance->setStatus(STATUS_STARTING, STATUS_RUNNING);
std::shared_ptr<const std::unordered_map<oatpp::String, oatpp::String>> params;
while (instance->getStatus() == STATUS_RUNNING) {
auto connectionHandle = instance->m_connectionProvider->get();
if (connectionHandle) {
if (instance->getStatus() == STATUS_RUNNING) {
instance->m_connectionHandler->handleConnection(connectionHandle, params /* null params */);
} else {
OATPP_LOGD("[oatpp::network::server::mainLoop()]", "Error. Server already stopped - closing connection...");
}
}
}
instance->setStatus(STATUS_DONE);
}
void Server::run(std::function<bool()> conditional) {
std::unique_lock<std::mutex> ul(m_mutex);
switch (getStatus()) {
case STATUS_STARTING:
throw std::runtime_error("[oatpp::network::server::run()] Error. Server already starting");
case STATUS_RUNNING:
throw std::runtime_error("[oatpp::network::server::run()] Error. Server already started");
}
m_threaded = false;
setStatus(STATUS_CREATED, STATUS_STARTING);
if (conditional) {
m_condition = std::move(conditional);
ul.unlock(); // early unlock
conditionalMainLoop();
} else {
ul.unlock();
mainLoop(this);
}
}
void Server::run(bool startAsNewThread) {
std::unique_lock<std::mutex> ul(m_mutex);
OATPP_LOGW("[oatpp::network::server::run(bool)]", "Using oatpp::network::server::run(bool) is deprecated and will be removed in the next release. Please implement your own threading (See https://github.com/oatpp/oatpp-threaded-starter).")
switch (getStatus()) {
case STATUS_STARTING:
throw std::runtime_error("[oatpp::network::server::run()] Error. Server already starting");
case STATUS_RUNNING:
throw std::runtime_error("[oatpp::network::server::run()] Error. Server already started");
}
m_threaded = startAsNewThread;
setStatus(STATUS_CREATED, STATUS_STARTING);
if (m_threaded) {
m_thread = std::thread(mainLoop, this);
} else {
ul.unlock(); // early unlock
mainLoop(this);
}
}
void Server::stop() {
std::lock_guard<std::mutex> lg(m_mutex);
switch (getStatus()) {
case STATUS_CREATED:
return;
case STATUS_STARTING:
case STATUS_RUNNING:
setStatus(STATUS_STOPPING);
break;
}
if (m_threaded && m_thread.joinable()) {
m_thread.join();
}
}
bool Server::setStatus(v_int32 expectedStatus, v_int32 newStatus) {
v_int32 expected = expectedStatus;
return m_status.compare_exchange_weak(expected, newStatus);
}
void Server::setStatus(v_int32 status) {
m_status.store(status);
}
v_int32 Server::getStatus() {
return m_status.load();
}
Server::~Server() {
stop();
}
}}
|
#!/bin/bash
set -e
FMU="${FMUS_DIR}/tests/typeconvtest/typeconvtest.fmu"
# Simplest test - check that the default output is all zeroes
mpiexec -np 2 fmigo-mpi -t 0.2 $FMU | python check.py
# Check basic type conversion, including float->int truncation
mpiexec -np 2 fmigo-mpi -t 0.2 -p r,0,8,10.9 -c r,0,0,i,0,5 -c r,0,0,b,0,6 $FMU | python check.py
mpiexec -np 2 fmigo-mpi -t 0.2 -p i,0,9,10 -c i,0,1,r,0,4 -c i,0,1,b,0,6 $FMU | python check.py
mpiexec -np 2 fmigo-mpi -t 0.2 -p b,0,10,true -c b,0,2,r,0,4 -c b,0,2,i,0,5 $FMU | python check.py
# Negative values
mpiexec -np 2 fmigo-mpi -t 0.2 -p r,0,8,-10.9 -c r,0,0,i,0,5 -c r,0,0,b,0,6 $FMU | python check.py
mpiexec -np 2 fmigo-mpi -t 0.2 -p i,0,9,-10 -c i,0,1,r,0,4 -c i,0,1,b,0,6 $FMU | python check.py
# Scaling (k=10, m=1)
mpiexec -np 2 fmigo-mpi -t 0.2 -p r,0,8,10.9 -c r,0,0,i,0,5,10,1 -c r,0,0,b,0,6,10,1 $FMU | python check.py 10 1
mpiexec -np 2 fmigo-mpi -t 0.2 -p i,0,9,10 -c i,0,1,r,0,4,10,1 -c i,0,1,b,0,6,10,1 $FMU | python check.py 10 1
mpiexec -np 2 fmigo-mpi -t 0.2 -p b,0,10,true -c b,0,2,r,0,4,10,1 -c b,0,2,i,0,5,10,1 $FMU | python check.py 10 1
echo Type conversion seems to work fine
mpiexec -np 2 fmigo-mpi -t 0.2 -p 0,r0,10.9 -c 0,r_out,0,i_in,10,1 -c 0,r_out,0,b_in,10,1 $FMU | python check.py 10 1
mpiexec -np 2 fmigo-mpi -t 0.2 -p 0,i0,10 -c 0,i_out,0,r_in,10,1 -c 0,i_out,0,b_in,10,1 $FMU | python check.py 10 1
mpiexec -np 2 fmigo-mpi -t 0.2 -p 0,b0,true -c 0,b_out,0,r_in,10,1 -c 0,b_out,0,i_in,10,1 $FMU | python check.py 10 1
echo String input seems to work fine
|
var postcss = require('postcss');
var plugin = require('./');
function run(input, output, opts) {
return postcss([ plugin(opts) ]).process(input)
.then(result => {
expect(result.css).toEqual(output);
expect(result.warnings().length).toBe(0);
});
}
it('morph -ms-flex with a single number declaration', () => {
return run(
'a{ -ms-flex: 1; flex: 1; }',
'a{ -ms-flex: 1; flex: 1; -ms-flex-preferred-size: auto; }',
{ }
);
});
it('morph -ms-flex with a double number declaration', () => {
return run(
'a{ -ms-flex: 0 1; flex: 1; }',
'a{ -ms-flex: 0 1; flex: 1; -ms-flex-preferred-size: auto; }',
{ }
);
});
it('don\'t morph -ms-flex with triple value declaration', () => {
return run(
'a{ -ms-flex: 1 1 10px; flex: 1 1 10px; }',
'a{ -ms-flex: 1 1 10px; flex: 1 1 10px; }',
{ }
);
});
|
#!/bin/bash
# ==============================================================================
# Copyright (C) 2018-2019 Intel Corporation
#
# SPDX-License-Identifier: MIT
# ==============================================================================
set -e
BASEDIR=$(dirname "$0")/../..
if [ -n ${GST_SAMPLES_DIR} ]
then
source $BASEDIR/scripts/setup_env.sh
fi
source $BASEDIR/scripts/setlocale.sh
#import GET_MODEL_PATH
source $BASEDIR/scripts/path_extractor.sh
if [ -z ${1} ]; then
echo "ERROR set path to video"
echo "Usage: ./vehicle_detection_2sources_cpu.sh <path/to/your/video/sample>"
exit
fi
FILE=${1}
MODEL=vehicle-license-plate-detection-barrier-0106
DETECT_MODEL_PATH=$(GET_MODEL_PATH $MODEL )
# Note that two pipelines create instances of singleton element 'inf0', so we can specify parameters only in first instance
gst-launch-1.0 --gst-plugin-path ${GST_PLUGIN_PATH} \
filesrc location=$FILE ! decodebin ! video/x-raw ! videoconvert ! \
gvadetect inference-id=inf0 model=$DETECT_MODEL_PATH device=CPU every-nth-frame=1 batch-size=1 ! queue ! \
gvawatermark ! videoconvert ! fpsdisplaysink video-sink=xvimagesink sync=false \
filesrc location=${FILE} ! decodebin ! video/x-raw ! videoconvert ! gvadetect inference-id=inf0 ! \
queue ! gvawatermark ! videoconvert ! fpsdisplaysink video-sink=xvimagesink sync=false
|
// CÓDIGO NÃO EXECUTÁVEL
//procedural
processamento(valor1, valor2, valor3)
// OO
objeto = {
valor1,
valor2,
valor3,
processamento () {
//...
}
}
objeto.processamento() // Foco passou a ser o objeto
// Princípios Importantes
//1. abstração: significa voce pegar um objeto do mundo real e traduzir este objeto para dentro do seu sistema
//2. encapsulamento: É voce ter os detalhas de implementação escondidos e voce mostrar para quem precisa, aquele sistema com uma interface simples para que possa interagir com o determinado objeto
//3. herança (prototype): significa que você recebe atributos e comportamentos
//4. Polimorfismo que dizer (muitas formas)... não se aplica 100% ao JS pois é fracamente typada, mas tem a sua versão typada que é o Typescript
|
<reponame>gunpowder1473/CycleGan_Tensorflow
import tensorflow as tf
def flatten(x):
batch, _, _, channels = [i.value for i in x.get_shape()]
return tf.reshape(x, shape=[batch, -1, channels])
def convLayer(net, num_filters, filter_size, name, relu, strides=1, bias=False, pad='REFLECT', Norm='INSTANCE',
training=True):
weights_init = convInit(net, num_filters, filter_size, name=name)
if 'SPECTRAL' in Norm:
weights_init = spectral(weights_init, name, training)
strides_shape = [1, strides, strides, 1]
if pad == 'REFLECT':
net = tf.pad(net, [[0, 0], [filter_size // 2, filter_size // 2],
[filter_size // 2, filter_size // 2], [0, 0]], mode="REFLECT")
net = tf.nn.conv2d(net, weights_init, strides_shape, padding='VALID')
elif pad == 'SAME':
net = tf.nn.conv2d(net, weights_init, strides_shape, padding='SAME')
elif pad == 'VALID':
net = tf.nn.conv2d(net, weights_init, strides_shape, padding='VALID')
if bias:
net = net + tf.get_variable(name + '_bias', [num_filters])
if 'NOT' not in Norm:
if 'INSTANCE' in Norm:
net = instanceNorm(net, name=name)
elif 'BATCH' in Norm:
net = batchNorm(net, training, name=name)
if relu is 'RELU':
net = tf.nn.relu(net)
elif isinstance(relu, float):
net = tf.nn.leaky_relu(net, relu)
return net
def residualBlock(net, filter_num, filter_size, name, pad, relu='RELU', Norm='INSTANCE', training=True):
tmp = convLayer(net, filter_num, filter_size, name, strides=1, relu=relu, pad=pad, Norm=Norm, training=training)
return net + convLayer(tmp, filter_num, filter_size, name + '_1', strides=1, relu=False, pad=pad, Norm=Norm,
training=training)
def newResidualBlock(net, filter_num, filter_size, name, pad, training=True):
x = batchNorm(net, training=training, name=name + '_bn')
x = tf.nn.leaky_relu(x)
x = convLayer(x, filter_num, filter_size, pad=pad, strides=1, Norm='SPECTRAL,BATCH', training=training,
name=name + '_deconv1', relu=.2, bias=False)
x = convLayer(x, filter_num, filter_size, pad=pad, Norm='SPECTRAL,NOT', name=name + '_conv1', relu=False,
training=training, bias=True)
return x + net
def residualBlockUp(net, filter_num, filter_size, name, pad, training=True):
x = batchNorm(net, training=training, name=name + '_bn')
x = tf.nn.leaky_relu(x)
x = transposeConv(x, filter_num, filter_size, pad=pad, strides=2, Norm='SPECTRAL,BATCH', training=training,
name=name + '_deconv1', relu=.2, bias=False)
x = convLayer(x, filter_num, filter_size, pad=pad, Norm='SPECTRAL,NOT', name=name + '_conv1', relu=False,
training=training, bias=True)
s = transposeConv(net, filter_num, filter_size, strides=2, Norm='SPECTRAL,NOT', pad=pad, name=name + '_deconv2',
relu=False, training=training, bias=True)
return x + s
def residualBlockDown(net, filter_num, filter_size, name, pad, training=True):
x = batchNorm(net, training=training, name=name)
x = tf.nn.leaky_relu(x)
x = convLayer(x, filter_num, filter_size, strides=2, pad=pad, Norm='SPECTRAL,BATCH', training=training,
name=name + '_conv1', relu=.2, bias=False)
x = convLayer(x, filter_num, filter_size, pad=pad, Norm='SPECTRAL,NOT', name=name + '_conv2', relu=False,
training=training, bias=True)
s = convLayer(net, filter_num, filter_size, strides=2, pad=pad, Norm='SPECTRAL,NOT', name=name + '_conv3',
relu=False, training=training, bias=True)
return x + s
def instanceNorm(net, name, training=True):
batch, rows, cols, channels = [i.value for i in net.get_shape()]
var_shape = [channels]
mu, sigma_sq = tf.nn.moments(net, [1, 2], keep_dims=True)
shift = tf.get_variable(initializer=tf.zeros(var_shape), name=name + "_shift")
scale = tf.get_variable(shape=var_shape, initializer=tf.random_normal_initializer(1.0, 0.02, dtype=tf.float32),
name=name + "_scale")
epsilon = 1e-9
normalized = (net - mu) / (sigma_sq + epsilon) ** (.5)
return scale * normalized + shift
def batchNorm(x, training, name, decay=0.9):
# batch, rows, cols, channels = [i.value for i in x.get_shape()]
# size = [channels]
# scale = tf.get_variable(initializer=tf.ones(size), name=name + 'scale')
# shift = tf.get_variable(initializer=tf.ones(size), name=name + 'shift')
# pop_mean = tf.get_variable(initializer=tf.zeros(size), trainable=False, name=name + 'pop_mean')
# pop_var = tf.get_variable(initializer=tf.ones(size), trainable=False, name=name + 'pop_var')
# epsilon = 1e-3
#
# batch_mean, batch_var = tf.nn.moments(x, [0, 1, 2])
# train_mean = tf.assign(pop_mean, pop_mean * decay + batch_mean * (1 - decay))
# train_var = tf.assign(pop_var, pop_var * decay + batch_var * (1 - decay))
#
# def batch_statistics():
# with tf.control_dependencies([train_mean, train_var]):
# return tf.nn.batch_normalization(x, batch_mean, batch_var, shift, scale, epsilon, name='batchNorm')
#
# def population_statistics():
# return tf.nn.batch_normalization(x, pop_mean, pop_var, shift, scale, epsilon, name='batchNorm')
#
# if training:
# return batch_statistics()
# else:
# return population_statistics()
return tf.layers.batch_normalization(x,
momentum=decay,
epsilon=1e-05,
training=training,
name=name)
def spectral(weight, name, is_training, iter=1):
_, _, _, out_channels = [i.value for i in weight.get_shape()]
w = tf.reshape(weight, [-1, out_channels])
u = tf.get_variable(name + "u", [1, out_channels], initializer=tf.truncated_normal_initializer(),
trainable=False) # [1, output_filters]
u_norm = u
v_norm = None
for i in range(iter):
v_ = tf.matmul(u_norm, w, transpose_b=True) # [1, N]
v_norm = tf.nn.l2_normalize(v_)
u_ = tf.matmul(v_norm, w) # [1, output_filters]
u_norm = tf.nn.l2_normalize(u_)
sigma = tf.matmul(tf.matmul(v_norm, w), u_norm, transpose_b=True) # [1,1]
w_norm = w / sigma
with tf.control_dependencies([tf.cond(tf.cast(is_training, tf.bool),
true_fn=lambda: u.assign(u_norm), false_fn=lambda: u.assign(u))]):
w_norm = tf.reshape(w_norm, [i.value for i in weight.get_shape()])
return w_norm
def convInit(net, out_channels, filter_size, name, transpose=False):
_, rows, cols, in_channels = [i.value for i in net.get_shape()]
if not transpose:
weights_shape = [filter_size, filter_size, in_channels, out_channels]
else:
weights_shape = [filter_size, filter_size, out_channels, in_channels]
weights_init = tf.get_variable(initializer=tf.truncated_normal(weights_shape, stddev=0.02),
dtype=tf.float32, name=name)
return weights_init
def transposeConv(net, num_filters, filter_size, strides, name, relu, pad="VALID", Norm='INSTANCE', training=True,
bias=True):
weights_init = convInit(net, num_filters, filter_size, transpose=True, name=name)
if 'SPECTRAL' in Norm:
weights_init = spectral(weights_init, name, training)
batch_size, rows, cols, _ = [i.value for i in net.get_shape()]
# new_shape = #tf.pack([tf.shape(net)[0], new_rows, new_cols, num_filters])
if pad == 'SAME':
new_shape = [batch_size, rows * strides, cols * strides, num_filters]
else:
new_shape = [batch_size, rows * strides + max(filter_size - strides, 0),
cols * strides + max(filter_size - strides, 0), num_filters]
strides_shape = [1, strides, strides, 1]
net = tf.nn.conv2d_transpose(net, weights_init, new_shape, strides_shape, padding=pad)
if bias:
net = net + tf.get_variable(name + '_bias', [num_filters])
if 'NOT' not in Norm:
if 'INSTANCE' in Norm:
net = instanceNorm(net, name=name)
elif 'BATCH' in Norm:
net = batchNorm(net, training, name=name)
if relu is 'RELU':
net = tf.nn.relu(net)
elif isinstance(relu, float):
net = tf.nn.leaky_relu(net, relu)
return net
def resizeConv2D(net, num_filters, filter_size, name, relu, strides=1, bias=False, pad='VALID', Norm='INSTANCE',
training=True):
'''
An alternative to transposed convolution where we first resize, then convolve.
See http://distill.pub/2016/deconv-checkerboard/
For some reason the shape needs to be statically known for gradient propagation
through tf.image.resize_images, but we only know that for fixed image size, so we
plumb through a "training" argument
'''
height = net.get_shape()[1].value
width = net.get_shape()[2].value
new_height = int(height * strides)
new_width = int(width * strides)
net_resized = tf.image.resize_images(net, [new_height, new_width], tf.image.ResizeMethod.NEAREST_NEIGHBOR)
return convLayer(net_resized, num_filters=num_filters, filter_size=filter_size, name=name, strides=1,
bias=bias, relu=relu, pad=pad, Norm=Norm, training=training)
def attention(net, num_filters, is_training, name):
f = convLayer(net, num_filters // 8, filter_size=1, strides=1, bias=True, pad='VALID', training=is_training,
name=name + '_f', Norm='SPECTRAL,NOT', relu=False)
g = convLayer(net, num_filters // 8, filter_size=1, strides=1, bias=True, pad='VALID', training=is_training,
name=name + '_g', Norm='SPECTRAL,NOT', relu=False)
h = convLayer(net, num_filters, filter_size=1, strides=1, bias=True, pad='VALID', training=is_training,
name=name + '_h', Norm='SPECTRAL,NOT', relu=False)
f_flatten = flatten(f)
g_flatten = flatten(g)
s = tf.matmul(g_flatten, f_flatten, transpose_b=True) # [bs, N, N]
beta = tf.nn.softmax(s, axis=-1) # attention map
o = tf.matmul(beta, flatten(h)) # [bs, N, N]*[bs, N, c]->[bs, N, c]
gamma = tf.get_variable(name + "gamma", [1], initializer=tf.constant_initializer(0.0))
o = tf.reshape(o, shape=[i.value for i in net.get_shape()]) # [bs, h, w, c]
net = gamma * o + net
return net
|
<reponame>Ancool/headers
#import "PSListController.h"
|
<reponame>mortennobel/KickJS_website
function destroyAllMeshRenderersInScene(){
var scene = engine.activeScene;
for (var i=scene.getNumberOfGameObjects()-1;i>=0;i--){
var gameObject = scene.getGameObject(i);
if (gameObject.getComponentOfType(KICK.scene.MeshRenderer)){
gameObject.destroy();
}
}
}
function load(xmlDom,url){
destroyAllMeshRenderersInScene();
var gameObjectsCreated = KICK.importer.ColladaImporter.loadCollada(xmlDom,engine,null,true);
for (var i=0;i<gameObjectsCreated.length;i++){
var gameObject = gameObjectsCreated[i];
var isDuck = url==="duck.dae" || url==="duck_triangulate.dae";
if (isDuck){
gameObject.transform.localScale = [0.01,0.01,0.01];
}
var meshRenderer = gameObject.getComponentOfType(KICK.scene.MeshRenderer);
if (meshRenderer){
meshRenderer.material = duckMaterial;
}
}
}
function loadCollada(url){
var oReq = new XMLHttpRequest();
function handler()
{
if (oReq.readyState == 4 /* complete */) {
if (oReq.status == 200) {
var xmlDom = oReq.responseXML;
load(xmlDom,url);
}
}
}
oReq.open("GET", url, true);
oReq.onreadystatechange = handler;
oReq.send();
}
var material;
var duckMaterial;
function duckClicked(){
loadCollada("duck.dae");
}
function cubeClicked(){
loadCollada("cube.dae");
}
function loadClicked(file){
var reader = new FileReader();
reader.onload = function(event) {
var txt = event.target.result;
var parser=new DOMParser();
load(parser.parseFromString(txt,"text/xml"),file.fileName);
};
reader.onerror = function() {
alert("Error reading file");
};
reader.readAsText(file);
}
var engine;
var meshRenderer;
function setMesh(meshFactoryFunc,subdivisions){
meshRenderer.mesh = meshFactoryFunc(engine,subdivisions);
}
function createMaterial(vertexShaderId, fragmentShaderId){
var vs = document.getElementById(vertexShaderId).value;
var fs = document.getElementById(fragmentShaderId).value;
var shader = new KICK.material.Shader(engine);
shader.vertexShaderSrc = vs;
shader.fragmentShaderSrc = fs;
shader.errorLog = console.log;
shader.updateShader();
var missingAttributes = meshRenderer.mesh.verify(shader);
if (missingAttributes){
console.log("Missing attributes in mesh "+JSON.stringify(missingAttributes));
return;
}
var material = new KICK.material.Material({
name:"Some material",
shader:shader
});
return material;
}
function recalculateNormals(){
var mesh = meshRenderer.mesh;
mesh.recalculateNormals();
mesh.updateData();
}
function recalculateTangents(){
var mesh = meshRenderer.mesh;
mesh.recalculateTangents();
mesh.updateData();
}
function addRotatorComponent(gameObject){
var time = engine.time,
transform = gameObject.transform,
rotationSpeed = 0.001,
translation = transform.localPosition,
rotVec = transform.localRotationEuler,
radius = 5,
radianToDegree = KICK.core.Constants._RADIAN_TO_DEGREE,
res = document.getElementById("res");
gameObject.addComponent({
update: function(){
var timeTotal = time.time,
rot = timeTotal*rotationSpeed;
if (window.rot){ // todo remove - let you easily control rotation from console
rot = window.rot*KICK.core.Constants._RADIAN_TO_DEGREE;
transform.localRotationEuler = window.rot;
transform.localPosition = window.pos;
} else {
translation[0] = Math.sin(rot)*radius;
translation[1] = Math.sin(rot*3);
translation[2] = Math.cos(rot)*radius;
rotVec[1] = rot*radianToDegree;
transform.localRotationEuler = rotVec;
transform.localPosition = translation;
}
}
});
}
function initDuckTexture(){
var texture = new KICK.texture.Texture(engine);
texture.setTemporaryTexture();
duckMaterial.uniforms.tex = {
value:texture,
type: KICK.core.Constants.GL_SAMPLER_2D
};
var image = new Image();
var imageName = "duckCM.jpg";
image.onload = function() {
texture.setImage(image, imageName);
console.log("Duck image loaded");
};
image.src = imageName;
}
function initLights(){
var ambientlightGameObject = engine.activeScene.createGameObject();
ambientlightGameObject.name = "ambient light";
var ambientLight = new KICK.scene.Light({type :KICK.core.Constants._LIGHT_TYPE_AMBIENT});
ambientLight.color = [0.1,0.1,0.1,1];
ambientlightGameObject.addComponent(ambientLight);
var lightGameObject = engine.activeScene.createGameObject();
lightGameObject.name = "directional light";
var light = new KICK.scene.Light(
{
type:KICK.core.Constants._LIGHT_TYPE_DIRECTIONAL,
color:[1,1,1,1],
intensity:1
}
);
lightGameObject.transform.position = [1,1,1];
lightGameObject.addComponent(light);
}
function initKick() {
engine = new KICK.core.Engine('canvas',{
enableDebugContext: true
});
var cameraObject = engine.activeScene.createGameObject();
cameraObject.name = "Camera";
var camera = new KICK.scene.Camera({
clearColor: [0,0,0,1],
fieldOfView:60
});
cameraObject.addComponent(camera);
addRotatorComponent(cameraObject);
var gameObject = engine.activeScene.createGameObject();
gameObject.name = "Mesh";
meshRenderer = new KICK.scene.MeshRenderer();
meshRenderer.mesh = engine.resourceManager.getMesh("kickjs://mesh/uvsphere/?radius=0.5");
material = createMaterial('vertexShaderColor','fragmentShader');
duckMaterial = createMaterial('vertexShaderColorImg','fragmentShaderImg');
meshRenderer.material = duckMaterial;
initDuckTexture();
initLights();
gameObject.addComponent(meshRenderer);
}
function pauseResume(){
engine.paused = !engine.paused;
this.innerHTML = engine.paused? "Play":"Pause";
}
window.addEventListener("load",function(){
initKick();
document.getElementById("duckButton").addEventListener("click", duckClicked,false);
document.getElementById("cubeButton").addEventListener("click", cubeClicked,false);
document.getElementById("pauseButton").addEventListener("click", pauseResume,false);
document.getElementById("file").onchange = function() {
loadClicked(this.files[0]);
};
},false);
|
import org.scalajs.sbtplugin.ScalaJSPlugin.autoImport._
import org.scalajs.sbtplugin.cross.CrossProject
import org.scalajs.sbtplugin.{AbstractJSDep, ScalaJSPlugin}
import sbt.Keys._
import sbt._
import sbt.complete.Parser
//import scoverage.ScoverageKeys.coverageExcludedPackages
object AtlastBuild {
val baseSettings = Seq(
version := "0.0.1",
scalaVersion := "2.12.1",
scalaOrganization := "org.typelevel",
scalacOptions ++= Seq(
"-encoding", "UTF-8",
"-Xlint",
"-deprecation",
"-feature",
"-language:implicitConversions",
"-language:higherKinds",
"-language:existentials",
"-unchecked",
"-Xfatal-warnings",
"-Yno-adapted-args",
"-Ywarn-unused-import",
"-Ywarn-adapted-args",
"-Ywarn-inaccessible",
"-Ywarn-infer-any",
"-Ywarn-nullary-override",
"-Yinduction-heuristics",
"-Ypartial-unification",
"-Yliteral-types",
"-Ywarn-nullary-unit",
"-Xfuture"
),
scalacOptions in Compile += "-Ywarn-value-discard",
persistLauncher in Compile := true,
persistLauncher in Test := false,
Dependencies.kindProjector
)
// amazingly hard to do
def emptyInputTask: Def.Initialize[InputTask[Unit]] =
InputTask.createDyn[String, Unit](
InputTask.parserAsInput(
Parser.zeroOrMore(
Parser.charClass(_ => true)).map(_.mkString))
)(Def.task { (_: String) => Def.task(()) })
private val disableTests: Seq[Def.Setting[_]] = Seq(
test in Test := (),
testQuick in Test := emptyInputTask.inputTaskValue,
testOnly in Test := emptyInputTask.inputTaskValue
)
lazy val core: Project = project.in(file("core"))
.settings(baseSettings: _*)
.settings(Dependencies.cats: _*)
.settings(Dependencies.scalatest: _*)
lazy val atlast: Project = project.in(file("."))
.aggregate(core)
.settings(Defaults.projectCore)
.settings(baseSettings: _*)
.settings(disableTests: _*)
}
|
import Foundation
struct CoursesModel {
let id: Int
let name: String
let date: Date
// Other properties and methods
}
class CoursesService: CoursesServiceProtocol {
func getCourses(onDate: Date?, completion: @escaping ((Result<[CoursesModel], Error>, String) -> Void)) {
// Simulate asynchronous network request
DispatchQueue.global().async {
// Replace with actual network request to retrieve courses based on the provided date
if let date = onDate {
// Perform network request with the provided date
// Assume coursesData is the retrieved data from the server
let coursesData: [CoursesModel] = fetchCoursesFromServer(forDate: date)
completion(.success(coursesData), "Courses retrieved successfully")
} else {
// Handle error for missing date
let error = NSError(domain: "com.example.CoursesService", code: 400, userInfo: [NSLocalizedDescriptionKey: "Invalid date provided"])
completion(.failure(error), "Error: Invalid date provided")
}
}
}
private func fetchCoursesFromServer(forDate date: Date) -> [CoursesModel] {
// Replace with actual network request to fetch courses for the given date
// This is a placeholder method to simulate data retrieval
let courses: [CoursesModel] = [
CoursesModel(id: 1, name: "Course A", date: date),
CoursesModel(id: 2, name: "Course B", date: date),
CoursesModel(id: 3, name: "Course C", date: date)
]
return courses
}
}
// Usage example
let coursesService = CoursesService()
let currentDate = Date()
coursesService.getCourses(onDate: currentDate) { result, message in
switch result {
case .success(let courses):
print("Courses retrieved: \(courses)")
print("Message: \(message)")
case .failure(let error):
print("Error: \(error.localizedDescription)")
print("Message: \(message)")
}
} |
<reponame>xzfn/toy<gh_stars>0
#include "script_runner.h"
#define SOL_ALL_SAFETIES_ON 1
#include <sol/sol.hpp>
#include <pybind11/eval.h>
namespace py = pybind11;
#include "lua_util.h"
void ScriptRunner::lua_script(std::string code) {
sol::state_view lua(luautil::get_global_state());
lua.script(code);
}
void ScriptRunner::py_script(std::string code) {
py::exec(code);
}
|
def count_character(string, character):
count = 0
for char in string:
if char == character:
count += 1
return count
string = "Hello, world!"
character = "l"
count = count_character(string, character)
print(f"The character {character} appears {count} times in the string.") |
# The Book of Ruby - http://www.sapphiresteel.com
def showFamily( aClass )
if (aClass != nil) then
puts( "#{aClass}" )
showFamily( aClass.superclass )
end
end
begin
x = 1/0
rescue Exception => exc
x = 0
puts( exc )
puts( "Family Tree of this exception..." )
showFamily( exc.class )
end
|
class Stack:
def __init__(self):
self.items = []
def push(self, item):
self.items.append(item)
def pop(self):
if not self.is_empty():
return self.items.pop()
else:
raise IndexError("Cannot pop from an empty stack")
def peek(self):
if not self.is_empty():
return self.items[-1]
else:
return None
def is_empty(self):
return len(self.items) == 0 |
/****************************************************************************
*
* Copyright (c) 2020 Vivante Corporation
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
*****************************************************************************/
/** @file */
#ifndef _VSI_NN_UTIL_H
#define _VSI_NN_UTIL_H
/*-------------------------------------------
Includes
-------------------------------------------*/
#include "vsi_nn_platform.h"
#include "vsi_nn_tensor.h"
#include "vsi_nn_types.h"
#include "vsi_nn_context.h"
#ifdef __cplusplus
extern "C" {
#endif
/*-------------------------------------------
Macros and Variables
-------------------------------------------*/
#ifndef _cnt_of_array
#define _cnt_of_array( arr ) (sizeof( arr )/sizeof( arr[0] ))
#endif
#define vsi_nn_safe_free( _PTR ) if( _PTR ){ \
free( _PTR ); _PTR = NULL; }
#define END_OF_VARIADIC_ARGUMENTS 0xbadcaffe
#define FOREACH_ARGS(_args, _next, _arg_type) \
while(((_arg_type)END_OF_VARIADIC_ARGUMENTS) != (_next = va_arg(_args, _arg_type)))
/*-------------------------------------------
Functions
-------------------------------------------*/
/**
* Load binary data from file
* Load binary data from file, it will malloc the buffer to store
* the data, user need to free it with vsi_nn_Free().
* @see vsi_nn_Free
*
* @param[in] filename Binary data file path.
* @param[out] sz Size(bytes) of data.
*
* @return Data buffer on success, or NULL otherwise.
*/
OVXLIB_API uint8_t * vsi_nn_LoadBinaryData
(
const char * filename,
uint32_t * sz
);
OVXLIB_API uint32_t vsi_nn_GetStrideSize
(
vsi_nn_tensor_attr_t * attr,
uint32_t * stride
);
OVXLIB_API uint32_t vsi_nn_GetStrideSizeBySize
(
uint32_t * size,
uint32_t dim_num,
vsi_nn_type_e type,
uint32_t * stride
);
OVXLIB_API uint32_t vsi_nn_GetTotalBytesBySize
(
uint32_t * size,
uint32_t dim_num,
vsi_nn_type_e type
);
/**
* Convert data to float32
* Convert data from any type to float32.
*
* @param[in] data The scalar data address.
* @param[in] type Data type.
*
* @return Converted float32 data.
*/
OVXLIB_API float vsi_nn_DataAsFloat32
(
uint8_t * data,
vsi_nn_type_e type
);
OVXLIB_API void vsi_nn_UpdateTensorDims
(
vsi_nn_tensor_attr_t * attr
);
OVXLIB_API uint32_t vsi_nn_ComputeFilterSize
(
uint32_t i_size,
uint32_t ksize,
uint32_t * pad,
uint32_t stride,
uint32_t dilation,
vsi_nn_round_type_e rounding
);
OVXLIB_API void vsi_nn_InitTensorsId
(
vsi_nn_tensor_id_t * ids,
int num
);
OVXLIB_API void vsi_nn_ComputePadWithPadType
(
uint32_t * in_shape,
uint32_t in_dim_num,
uint32_t * ksize,
uint32_t * stride,
vsi_nn_pad_e pad_type,
vsi_nn_round_type_e rounding,
uint32_t * out_pad
);
OVXLIB_API void vsi_nn_ComputePadWithPadTypeForConv1D
(
uint32_t * in_shape,
uint32_t in_dim_num,
uint32_t * ksize,
uint32_t * stride,
vsi_nn_pad_e pad_type,
vsi_nn_round_type_e rounding,
uint32_t * out_pad
);
OVXLIB_API void vsi_nn_GetPadForOvx
(
uint32_t * in_pad,
uint32_t * out_pad
);
OVXLIB_API vsi_bool vsi_nn_CreateTensorGroup
(
vsi_nn_graph_t * graph,
vsi_nn_tensor_t * in_tensor,
uint32_t axis,
vsi_nn_tensor_t ** out_tensors,
uint32_t group_number
);
OVXLIB_API uint32_t vsi_nn_ShapeToString
(
uint32_t * shape,
uint32_t dim_num,
char * buf,
uint32_t buf_sz,
vsi_bool for_print
);
OVXLIB_API int32_t vsi_nn_Access
(
const char *path,
int32_t mode
);
OVXLIB_API int32_t vsi_nn_Mkdir
(
const char *path,
int32_t mode
);
OVXLIB_API vsi_bool vsi_nn_CheckFilePath
(
const char *path
);
OVXLIB_API void vsi_nn_GetFP32MultiAndPostShift
(
vx_float32 mult,
vx_uint16 *M0,
vx_int8 *N
);
/**
* Malloc aligned buffer
* Malloc address and size aligned buffer.
*
* @param[in] mem_size Buffer size to malloc.
* @param[in] align_start_size Address aligned bytes.
* @param[in] align_block_size Buffer size aligned bytes.
*
* @return The aligned buffer address on success, or NULL otherwise.
*/
OVXLIB_API uint8_t * vsi_nn_MallocAlignedBuffer
(
uint32_t mem_size,
uint32_t align_start_size,
uint32_t align_block_size
);
/**
* Free aligned buffer
* Free aligend buffer malloc with vsi_nn_MallocAlignedBuffer().
*
* @param[in] handle Buffer handle to free.
* @see vsi_nn_MallocAlignedBuffer
*/
OVXLIB_API void vsi_nn_FreeAlignedBuffer
(
uint8_t* handle
);
OVXLIB_API vsi_bool vsi_nn_IsBufferAligned
(
uint8_t * buf,
uint32_t align_start_size
);
OVXLIB_API void vsi_nn_FormatToString
(
vsi_nn_tensor_t *tensor,
char *buf,
uint32_t buf_sz
);
OVXLIB_API const char* vsi_nn_DescribeStatus
(
vsi_status status
);
uint32_t vsi_nn_compute_filter_shape
(
vsi_nn_pad_e padding_type,
uint32_t image_size,
uint32_t ksize,
uint32_t stride,
uint32_t dilation_rate
);
void vsi_nn_compute_padding
(
uint32_t * in_shape,
uint32_t * ksize,
uint32_t * stride,
uint32_t * dilation,
vsi_nn_pad_e pad_type,
uint32_t * out_pad
);
void vsi_nn_compute_padding_conv1d
(
uint32_t * in_shape,
uint32_t * ksize,
uint32_t * stride,
uint32_t * dilation,
vsi_nn_pad_e pad_type,
uint32_t * out_pad
);
void vsi_nn_OptimizedEltOPShape
(
vsi_nn_tensor_t * input,
uint32_t sizes[VSI_NN_MAX_DIM_NUM],
uint32_t * num_of_dims
);
vsi_bool vsi_nn_OptimizedEltWiseOPShape
(
vsi_nn_tensor_t * input0,
vsi_nn_tensor_t * input1,
vsi_nn_tensor_t * output,
uint32_t sizes0[VSI_NN_MAX_DIM_NUM],
uint32_t sizes1[VSI_NN_MAX_DIM_NUM],
uint32_t sizes2[VSI_NN_MAX_DIM_NUM],
uint32_t * dim_num
);
vsi_bool vsi_nn_IsEVISFeatureAvaiable
(
vsi_nn_context_t context
);
int32_t vsi_nn_compareVersion
(
vsi_nn_graph_t * graph,
uint32_t version_major,
uint32_t version_minor,
uint32_t version_patch
);
typedef uint32_t(*comp_func)(void* data, int32_t left, int32_t right);
/**
* the meta function for sort/partial sort
* This function is the key meta function of qsort, which can be used in sort/partial sort.
* But you can NOT use this function directly to sort/partial sort.
* This function do NOT sort data itself, but sort its index.
*
* @param[in] buffer of data which will be sorted.
* @param[in] the left(start) index of data.
* @param[in] the right(end) index of data.
* @param[in] compare function. the meaning of return value is as same as std::sort.
* @param[in] recursively execute vsi_nn_partition.
* @param[out] the sorted index of data.
*/
OVXLIB_API int32_t vsi_nn_partition
(
void* data,
int32_t left,
int32_t right,
comp_func func,
vsi_bool is_recursion,
uint32_t* indices
);
/**
* Reorder tensors
*
* @param[in] tensors Tensor list to reorder.
* @param[in] order New orders.
* @param[in] num Number of tensors.
* @param[out] out_tensors Ordered tensors
* */
static inline void vsi_nn_reorder_tensor
(
vsi_nn_tensor_t** tensors,
const int32_t* order,
size_t num,
vsi_nn_tensor_t** out_tensors
)
{
size_t i;
for( i = 0; i < num; i++ )
{
out_tensors[i] = tensors[order[i]];
}
}
void vsi_nn_print_int_array( int32_t* array, size_t size );
float vsi_nn_activation
(
float value,
vsi_nn_activation_e activation
);
#ifdef __cplusplus
}
#endif
#endif
|
<filename>node_modules/webcrypto-core/build/types/aes/ecb.d.ts<gh_stars>1-10
import { KeyUsages } from "../types";
import { AesProvider } from "./base";
export declare abstract class AesEcbProvider extends AesProvider {
readonly name = "AES-ECB";
usages: KeyUsages;
abstract onEncrypt(algorithm: Algorithm, key: CryptoKey, data: ArrayBuffer): Promise<ArrayBuffer>;
abstract onDecrypt(algorithm: Algorithm, key: CryptoKey, data: ArrayBuffer): Promise<ArrayBuffer>;
}
|
<gh_stars>0
""" Alarm handler module."""
from django.utils import timezone
from agents.models import Alarm
def add_alarm(profile, alarm_type, test):
try:
existing_alarm = Alarm.objects.get(
profile=profile, is_active=True,
alarm_type=alarm_type
)
except Alarm.DoesNotExist:
existing_alarm = None
if not test:
if existing_alarm:
pass
else:
alarm = Alarm()
alarm.profile = profile
alarm.event_time = timezone.now()
alarm.alarm_type = alarm_type
alarm.save()
else:
if existing_alarm:
existing_alarm.is_active = False
existing_alarm.save()
def add_availability_alarm(profile, availability_test_passed):
add_alarm(
profile, 'Availability Failure', availability_test_passed
)
def add_performance_alarm(profile, performance_test_passed, total_time):
add_alarm(
profile, 'Performance Failure', performance_test_passed
)
|
package com.example.corespringsecurity.domain.entity;
import javax.persistence.*;
@Entity
@Table(name = "role_resources")
public class RoleResource {
@Id
@Column(name = "id")
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "role_id", foreignKey = @ForeignKey(name = "FK__role_resources__role_id"))
private Role role;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "resource_id", foreignKey = @ForeignKey(name = "FK__role_resources__resource_id"))
private Resources resource;
}
|
<reponame>twinstone/open-anonymizer
package openanonymizer.datasource.neo4j;
import openanonymizer.core.storage.TransformationStorage;
import openanonymizer.datasource.DataSource;
import openanonymizer.datasource.PagedDataSource;
import openanonymizer.model.dataset.*;
import openanonymizer.model.describer.EntityDescriber;
import openanonymizer.model.describer.RelationEntityDescriber;
import openanonymizer.model.describer.RelationFieldDescriber;
import openanonymizer.model.mapper.EntityWrapperMapper;
import openanonymizer.model.mapper.Neo4jEntityMapper;
import openanonymizer.model.wrapper.EntityWrapper;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.log4j.Logger;
import org.neo4j.driver.*;
import org.neo4j.driver.exceptions.Neo4jException;
import java.io.IOException;
import java.util.List;
import java.util.Optional;
/**
* Data source for executing CRUD operations on Neo4j database.
*
* @version 0.1
* @since Open Anonymizer 1.0.0
*/
public class Neo4jDataSource implements DataSource, PagedDataSource {
private static final EntityWrapperMapper<Record> mapper = new Neo4jEntityMapper();
private static final Logger logger = Logger.getLogger(Neo4jDataSource.class);
private final Driver driver;
public Neo4jDataSource(Driver driver) {
Validate.notNull(driver, "Driver must be not null.");
this.driver = driver;
}
@Override
public DataSet readDataSet(EntityDescriber describer) {
Validate.notNull(describer, "Describer must be not null.");
try (Session session = driver.session()) {
Result result = session.run(Neo4jUtils.entityMatchQuery(describer));
List<EntityWrapper> list = result.list(r -> mapper.getFromEntity(r, describer));
logger.info(String.format("Reading from Neo4j database. %s nodes with label [%s] found.", list.size(), describer.getSource()));
return new DataSetImpl(list, describer);
} catch (Neo4jException e) {
logger.error("Exception reading from Neo4j.", e);
return EmptyDataSet.build();
}
}
@Override
public void saveEntities(EntityDescriber describer, DataSet dataSet) {
Validate.notNull(describer, "Describer must be not null.");
Validate.notNull(dataSet, "Data set must be nit null.");
try (Session session = driver.session()) {
for (final EntityWrapper wrapper : dataSet) {
String query = Neo4jUtils.entityCreateQuery(wrapper);
Result result = session.run(query, wrapper.getEntityAsMap());
long id = result.single().get(0).asLong();
TransformationStorage.insertValue(describer.getSource(), wrapper.getId(), id);
logger.info(String.format("Created new node with id [%s].", id));
if (describer.getRelationFields() != null) createRelations(wrapper, id, session);
}
} catch (Neo4jException e) {
logger.error(String.format("Exception creating new nodes with label [%s].", describer.getName()), e);
}
}
@Override
public void updateEntities(EntityDescriber describer, DataSet dataSet) {
Validate.notNull(describer, "Describer must be not null.");
Validate.notNull(dataSet, "Data set must be not null.");
try (Session session = driver.session()) {
for (final EntityWrapper wrapper : dataSet) {
String query = Neo4jUtils.entityUpdateQuery(wrapper);
session.run(query, wrapper.getEntityAsMap());
logger.info(String.format("Updating node with id [%s].", wrapper.getId()));
}
} catch (Neo4jException e) {
logger.error(String.format("Exception updating nodes with label [%s].", describer.getSource()), e);
}
}
@Override
public PagedDataSet readPage(EntityDescriber describer, long offset, int limit) {
try (Session session = driver.session()) {
Result result = session.run(Neo4jUtils.entityPagedMatchQuery(describer, offset, limit));
List<EntityWrapper> list = result.list(r -> mapper.getFromEntity(r, describer));
logger.info(String.format("Reading from Neo4j database. %s nodes with label [%s] found.", list.size(), describer.getSource()));
return new PagedDataSetImpl(list, describer, offset, 0, limit, getTotalItemsCount(describer));
} catch (Neo4jException e) {
logger.error("Exception reading from Neo4j.", e);
return EmptyDataSet.build();
}
}
@Override
public void saveRelationEntity(RelationEntityDescriber describer, DataSet dataSet) {
Validate.notNull(describer, "Describer must be not null.");
Validate.notNull(dataSet, "Data set must be not null.");
Session session = driver.session();
Transaction transaction = session.beginTransaction();
try {
for (final EntityWrapper wrapper : dataSet) {
Optional<Pair<?, ?>> pairLeft = TransformationStorage.findByLeft(describer.getLeft().getParentSource(), wrapper.getValue(describer.getLeft().getName()));
Optional<Pair<?, ?>> pairRight = TransformationStorage.findByLeft(describer.getRight().getParentSource(), wrapper.getValue(describer.getRight().getName()));
if (pairLeft.isPresent() && pairRight.isPresent()) {
EntityDescriber left = new EntityDescriber();
left.setSource(describer.getLeft().getParentSource());
left.setName(describer.getLeft().getName());
EntityDescriber right = new EntityDescriber();
right.setSource(describer.getRight().getParentSource());
right.setName(describer.getRight().getName());
transaction.run(Neo4jUtils.relationCreateQuery(left, right, pairLeft.get().getRight(), pairRight.get().getRight(), describer.getName()));
logger.info(String.format("Created new relation [%s]<-(%s)->[%s].", describer.getLeft().getParentSource(), describer.getName(), describer.getRight().getParentSource()));
} else {
if (!pairLeft.isPresent()) {
logger.warn(String.format("Could not create relation [%s]<-(%s)->[%s]. Transformation for node with label [%s] and previous id [%s] not found.",
describer.getLeft().getParentSource(), describer.getSource(), describer.getRight().getParentSource(), describer.getLeft().getParentSource(), wrapper.getValue(describer.getLeft().getName())));
}
if (!pairRight.isPresent()) {
logger.warn(String.format("Could not create relation [%s]<-(%s)->[%s]. Transformation for node with label [%s] and previous id [%s] not found.",
describer.getLeft().getParentSource(), describer.getSource(), describer.getRight().getParentSource(), describer.getRight().getParentSource(), wrapper.getValue(describer.getRight().getName())));
}
}
}
transaction.commit();
} catch (Neo4jException e) {
logger.error(String.format("Exception saving relation between nodes with label [%s] and [%s].", describer.getLeft().getParentSource(), describer.getRight().getParentSource()), e);
transaction.rollback();
} finally {
transaction.close();
session.close();
}
}
@Override
public long getTotalItemsCount(EntityDescriber describer) {
try (Session session = driver.session()) {
Result result = session.run(Neo4jUtils.countEntitiesQuery(describer));
return result.single().get(0).asLong();
} catch (Neo4jException e) {
return 0;
}
}
@Override
public void close() throws IOException {
driver.close();
}
private void createRelations(EntityWrapper wrapper, Object leftId, Session current) {
Validate.notNull(wrapper, "Wrapper must be not null.");
Validate.notNull(leftId, "Id must be not null.");
EntityDescriber describer = wrapper.describeEntity();
for (final RelationFieldDescriber relation : describer.getRelationFields()) {
if (RelationFieldDescriber.RelationType.MANY_TO_ONE.equals(relation.getRelationType())) {
EntityDescriber r = new EntityDescriber();
r.setSource(relation.getTargetSource());
r.setName(relation.getName());
try {
Optional<Pair<?, ?>> pair = TransformationStorage.findByLeft(relation.getTargetSource(), wrapper.getValue(relation.getName()));
if (pair.isPresent()) {
current.run(Neo4jUtils.relationCreateQuery(describer, r, leftId, pair.get().getRight(), relation.getName()));
logger.info(String.format("Created new relation [%s]-(%s)->[%s].", describer.getSource(), relation.getName(), r.getSource()));
} else {
logger.warn(String.format("Could not create relation [%s]-(%s)->[%s]. Transformation for node with label [%s] and previous id [%s] not found.",
describer.getSource(), relation.getName(), r.getSource(), r.getSource(), wrapper.getValue(relation.getName())));
}
} catch (Neo4jException e) {
logger.error(String.format("Exception saving relation between nodes with label [%s] and [%s].", describer.getSource(), r.getSource()), e);
}
}
}
}
}
|
public int[][][] generateSample() {
int[][][] sample = new int[probabilities.length][][];
for (int i = 0; i < probabilities.length; i++) {
sample[i] = new int[probabilities[i].length][];
for (int j = 0; j < probabilities[i].length; j++) {
sample[i][j] = new int[probabilities[i][j].length];
double rand = random.nextDouble();
double cumulativeProb = 0.0;
for (int k = 0; k < probabilities[i][j].length; k++) {
cumulativeProb += probabilities[i][j][k];
if (rand < cumulativeProb) {
sample[i][j][k] = 1;
break;
}
}
}
}
return sample;
} |
<reponame>ChristopherChudzicki/mathbox
// TODO: This file was created by bulk-decaffeinate.
// Sanity-check the conversion and remove this comment.
/*
* decaffeinate suggestions:
* DS206: Consider reworking classes to avoid initClass
* Full docs: https://github.com/decaffeinate/decaffeinate/blob/master/docs/suggestions.md
*/
import { Parent } from "./parent.js";
export class Unit extends Parent {
static initClass() {
this.traits = ["node", "unit"];
}
make() {
return this._helpers.unit.make();
}
unmake() {
return this._helpers.unit.unmake();
}
getUnit() {
return this._helpers.unit.get();
}
getUnitUniforms() {
return this._helpers.unit.uniforms();
}
}
Unit.initClass();
|
<reponame>estekhin/personal-stream-expression-transformer
package com.github.estekhin.set.ast;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
public final class NumberNode extends ExpressionNode {
static final @NotNull String UNARY_MINUS = "-";
private final long value;
NumberNode(long value) {
this.value = value;
}
public long getValue() {
return value;
}
@Override
public @NotNull ExpressionType type() {
return ExpressionType.INTEGER;
}
@Override
public <R> @Nullable R visit(@NotNull NodeVisitor<R> visitor) {
return visitor.visitNumberNode(this);
}
@Override
public int hashCode() {
return Long.hashCode(value);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj == null || obj.getClass() != getClass()) {
return false;
}
NumberNode other = (NumberNode) obj;
return value == other.value;
}
@Override
public @NotNull String toString() {
return String.valueOf(value);
}
}
|
import network from './network';
import eventBus from '../core/event-bus';
import config from '../core/config/config';
import crypto from 'crypto';
import _ from 'lodash';
import database from '../database/database';
import async from 'async';
import walletSync from '../core/wallet/wallet-sync';
import peerRotation from './peer-rotation';
class Peer {
constructor() {
this.noop = () => {
};
this.pendingTransactionSync = {};
this.pendingTransactionSpendSync = {};
this.pendingTransactionIncludePathSync = {};
}
sendNodeAddress(ipAddress, messageID, ws) {
if (!ws) {
return;
}
let payload = {
type : 'node_address_response:' + messageID,
content: {ip_address: ipAddress}
};
eventBus.emit('node_event_log', payload);
let data = JSON.stringify(payload);
ws.nodeConnectionReady && ws.send(data);
}
getNodeAddress() {
return new Promise((resolve, reject) => {
let id = crypto.randomBytes(20).toString('hex');
let payload = {
type : 'node_address_request',
content: {request_id: id}
};
eventBus.emit('node_event_log', payload);
eventBus.once('node_address_response:' + id, (data) => {
resolve(data);
});
setTimeout(() => {
if (eventBus.listenerCount('node_address_response:' + id) > 0) {
reject('get_address_timeout');
eventBus.removeAllListeners('node_address_response:' + id);
}
}, config.NETWORK_LONG_TIME_WAIT_MAX);
let data = JSON.stringify(payload);
network.registeredClients.forEach(ws => {
try {
ws.nodeConnectionReady && ws.send(data);
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
}
});
});
}
sendNodeList(ws) {
return database.getRepository('node')
.listNodes()
.then(nodes => {
nodes = _.map(nodes, node => _.pick(node, [
'node_prefix',
'node_ip_address',
'node_port',
'node_id'
]));
nodes.push({
node_prefix : config.WEBSOCKET_PROTOCOL,
node_ip_address: network.nodePublicIp,
node_port : config.NODE_PORT,
node_id : network.nodeID
}); // add self
if (nodes.length === 0) {
return;
}
let payload = {
type : 'node_list',
content: nodes
};
eventBus.emit('node_event_log', payload);
let data = JSON.stringify(payload);
if (ws) { // send to a single node
try {
ws.nodeConnectionReady && ws.send(data);
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
}
}
else {
network.registeredClients.forEach(ws => {
try {
ws.nodeConnectionReady && ws.send(data);
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
}
});
}
});
}
transactionSend(transaction, excludeWS) {
let payload = {
type : 'transaction_new',
content: {transaction}
};
eventBus.emit('node_event_log', payload);
let data = JSON.stringify(payload);
network.registeredClients.forEach(ws => {
try {
if (excludeWS !== ws) {
ws.nodeConnectionReady && ws.send(data);
}
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
}
});
return transaction;
}
transactionSendToNode(transaction, ws) {
let payload = {
type : 'transaction_new',
content: transaction
};
eventBus.emit('node_event_log', payload);
let data = JSON.stringify(payload);
try {
ws.nodeConnectionReady && ws.send(data);
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
}
return transaction;
}
auditPointValidationResponse(transactions, auditPointID, ws) {
let payload = {
type : 'audit_point_validation_response:' + auditPointID,
content: {transaction_id_list: transactions}
};
eventBus.emit('node_event_log', payload);
let data = JSON.stringify(payload);
try {
ws.nodeConnectionReady && ws.send(data);
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
}
return transactions;
}
transactionIncludePathRequest(transactionID, excludeTransactions) {
if (this.pendingTransactionIncludePathSync[transactionID]) {
return Promise.reject();
}
this.pendingTransactionIncludePathSync[transactionID] = true;
return new Promise((resolve, reject) => {
let payload = {
type : 'transaction_include_path_request',
content: {
transaction_id : transactionID,
transaction_id_exclude_list: excludeTransactions
}
};
let data = JSON.stringify(payload);
eventBus.emit('node_event_log', payload);
let nodesWS = _.shuffle(network.registeredClients);
async.eachSeries(nodesWS, (ws, callback) => {
let callbackCalled = false;
let nodeID = ws.nodeID;
try {
if (ws.nodeConnectionReady) {
eventBus.removeAllListeners('transaction_include_path_response:' + transactionID);
eventBus.once('transaction_include_path_response:' + transactionID, function(eventData, eventWS) {
console.log('[peer] stopping transaction spend sync for transaction id ', transactionID, 'because data was received from node ', nodeID);
if (!callbackCalled) {
resolve([
eventData,
eventWS
]);
callbackCalled = true;
callback(true);
}
});
ws.send(data);
setTimeout(function() {
if (!callbackCalled) {
callbackCalled = true;
callback();
}
}, config.NETWORK_SHORT_TIME_WAIT_MAX);
}
else {
callback();
}
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
if (!callbackCalled) {
callbackCalled = true;
callback();
}
}
}, (done) => {
eventBus.removeAllListeners('transaction_include_path_response:' + transactionID);
delete this.pendingTransactionIncludePathSync[transactionID];
if (!done) {
console.log('[peer] transaction_include_path_response:' + transactionID + ' not received. skip...');
reject();
}
});
});
}
transactionSpendResponse(transactionID, transactions, ws) {
let payload = {
type : 'transaction_spend_response:' + transactionID,
content: {transaction_id_list: transactions}
};
eventBus.emit('node_event_log', payload);
let data = JSON.stringify(payload);
try {
ws.nodeConnectionReady && ws.send(data);
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
}
}
transactionSpendRequest(transactionID) {
if (this.pendingTransactionSpendSync[transactionID]) {
return Promise.reject();
}
this.pendingTransactionSpendSync[transactionID] = true;
return new Promise((resolve, reject) => {
let payload = {
type : 'transaction_spend_request',
content: {transaction_id: transactionID}
};
let data = JSON.stringify(payload);
eventBus.emit('node_event_log', payload);
let nodesWS = _.shuffle(network.registeredClients);
async.eachSeries(nodesWS, (ws, callback) => {
let callbackCalled = false;
let nodeID = ws.nodeID;
try {
if (ws.nodeConnectionReady) {
eventBus.removeAllListeners('transaction_spend_response:' + transactionID);
eventBus.once('transaction_spend_response:' + transactionID, function(eventData) {
console.log('[peer] stopping transaction spend sync for transaction id ', transactionID, 'because data was received from node ', nodeID);
if (!callbackCalled) {
resolve(eventData);
callbackCalled = true;
callback(true);
}
});
ws.send(data);
setTimeout(function() {
if (!callbackCalled) {
callbackCalled = true;
callback();
}
}, config.NETWORK_SHORT_TIME_WAIT_MAX);
}
else {
callback();
}
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
if (!callbackCalled) {
callbackCalled = true;
callback();
}
}
}, (done) => {
eventBus.removeAllListeners('transaction_spend_response:' + transactionID);
delete this.pendingTransactionSpendSync[transactionID];
if (!done) {
console.log('[peer] transaction_spend_response:' + transactionID + ' not received. skip...');
reject();
}
});
});
}
transactionIncludePathResponse(message, ws) {
let payload = {
type : 'transaction_include_path_response:' + message.transaction_id,
content: message
};
eventBus.emit('node_event_log', payload);
let data = JSON.stringify(payload);
try {
ws.nodeConnectionReady && ws.send(data);
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
}
return message;
}
transactionValidationRequest(content, ws) {
return new Promise((resolve, reject) => {
let payload = {
type: 'transaction_validation_request',
content
};
eventBus.emit('node_event_log', payload);
console.log('[peer] validation request from node ', ws.node);
let data = JSON.stringify(payload);
try {
let callbackCalled = false;
if (ws.nodeConnectionReady) {
const messageID = 'transaction_validation_response:' + content.transaction_id + ':' + content.consensus_round + ':' + ws.nodeID;
eventBus.removeAllListeners(messageID);
eventBus.once(messageID, function(eventData, eventWS) {
console.log('[peer] received validation response for ', eventData.transaction_id, ' from node ', eventWS.node);
if (eventWS.nodeID !== ws.nodeID || eventWS.connectionID !== ws.connectionID) {
return;
}
console.log('[peer] received validation response for ', eventData.transaction_id, ' from node ', eventWS.node);
if (!callbackCalled) {
console.log('[peer] received validation response for ', eventData.transaction_id, ' from node ', eventWS.node, 'success');
callbackCalled = true;
resolve([
eventData,
eventWS
]);
}
});
ws.send(data);
setTimeout(function() {
if (!callbackCalled) {
console.log('[peer] validation response from node ', ws.node, 'timeout');
callbackCalled = true;
reject();
}
}, config.CONSENSUS_VALIDATION_WAIT_TIME_MAX);
}
else {
reject();
}
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
reject();
}
});
}
allocateNodeToValidateTransaction(content, ws) {
return new Promise((resolve, reject) => {
const payload = {
type: 'transaction_validation_node_allocate',
content
};
eventBus.emit('node_event_log', payload);
let data = JSON.stringify(payload);
try {
let callbackCalled = false;
if (ws.nodeConnectionReady) {
const messageID = 'transaction_validation_node_allocate_response:' + ws.nodeID;
eventBus.removeAllListeners(messageID);
eventBus.once(messageID, function(eventData, eventWS) {
if (eventWS.nodeID !== ws.nodeID || eventWS.connectionID !== ws.connectionID) {
return;
}
console.log('[peer] received allocation response for ', eventData.transaction_id, ' from node ', eventWS.node);
if (!callbackCalled) {
callbackCalled = true;
resolve([
eventData,
eventWS
]);
}
});
ws.send(data);
setTimeout(function() {
if (!callbackCalled) {
callbackCalled = true;
reject();
}
}, config.NETWORK_SHORT_TIME_WAIT_MAX);
}
else {
reject();
}
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
reject();
}
});
}
replyNodeAllocationRequest(content, ws) {
return new Promise((resolve, reject) => {
let payload = {
type: 'transaction_validation_node_allocate_response:' + network.nodeID,
content
};
eventBus.emit('node_event_log', payload);
let data = JSON.stringify(payload);
try {
let callbackCalled = false;
if (ws.nodeConnectionReady) {
const messageID = 'transaction_validation_node_allocate_acknowledge:' + ws.nodeID;
eventBus.removeAllListeners(messageID);
eventBus.once(messageID, function(eventData, eventWS) {
if (eventWS.nodeID !== ws.nodeID || eventWS.connectionID !== ws.connectionID) {
return;
}
console.log('[peer] received allocation acknowledge for consensus round of ', eventData.transaction_id, ' from node ', eventWS.nodeID);
if (!callbackCalled) {
callbackCalled = true;
resolve();
}
});
ws.send(data);
setTimeout(function() {
if (!callbackCalled) {
callbackCalled = true;
reject();
}
}, config.NETWORK_SHORT_TIME_WAIT_MAX);
}
else {
reject();
}
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
reject();
}
});
}
acknowledgeAllocateNodeToValidateTransaction(content, ws) {
let payload = {
type: 'transaction_validation_node_allocate_acknowledge:' + network.nodeID,
content
};
eventBus.emit('node_event_log', payload);
let data = JSON.stringify(payload);
try {
ws.nodeConnectionReady && ws.send(data);
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
}
}
releaseNodeToValidateTransaction(content, ws) {
let payload = {
type: 'transaction_validation_node_release',
content
};
eventBus.emit('node_event_log', payload);
let data = JSON.stringify(payload);
try {
ws.nodeConnectionReady && ws.send(data);
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
}
}
auditPointValidationRequest(content, ws) {
let payload = {
type: 'audit_point_validation_request',
content
};
eventBus.emit('node_event_log', payload);
let data = JSON.stringify(payload);
try {
ws.nodeConnectionReady && ws.send(data);
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
}
return content;
}
transactionValidationResponse(message, ws) {
let payload = {
type : 'transaction_validation_response:' + message.transaction_id + ':' + message.consensus_round + ':' + network.nodeID,
content: message
};
eventBus.emit('node_event_log', payload);
let data = JSON.stringify(payload);
try {
ws.nodeConnectionReady && ws.send(data);
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
}
return message;
}
shardSyncResponse(content, ws) {
let payload = {
type: 'shard_sync_response:' + content.shard_id,
content
};
eventBus.emit('node_event_log', payload);
let data = JSON.stringify(payload);
try {
ws.nodeConnectionReady && ws.send(data);
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
}
}
shardSync(shardID, ws) {
return new Promise((resolve, reject) => {
let payload = {
type : 'shard_sync_request',
content: {shard_id: shardID}
};
eventBus.emit('node_event_log', payload);
let data = JSON.stringify(payload);
try {
if (ws.nodeConnectionReady) {
let timeoutID;
let listener = function(data) {
resolve(data);
clearTimeout(timeoutID);
};
eventBus.once('shard_sync_response:' + shardID, listener);
ws.send(data);
ws = null;
timeoutID = setTimeout(() => {
eventBus.removeListener('shard_sync_response:' + shardID, listener);
reject('shard_sync_response_timeout');
}, config.NETWORK_SHORT_TIME_WAIT_MAX);
}
}
catch (e) {
reject('[WARN]: try to send data over a closed connection.');
}
});
}
addressTransactionSync(address, updated, ws) {
if (network.registeredClients.length === 0) {
return address;
}
console.log('[peer] requesting transaction sync for address:', address, ' from ', updated);
let payload = {
type : 'address_transaction_sync',
content: {
address,
updated
}
};
eventBus.emit('node_event_log', payload);
let data = JSON.stringify(payload);
if (ws) {
try {
ws.nodeConnectionReady && ws.send(data);
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
}
}
else {
network.registeredClients.forEach(ws => {
try {
ws.nodeConnectionReady && ws.send(data);
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
}
});
}
return address;
}
transactionSyncResponse(content, ws) {
let payload = {
type: 'transaction_sync_response:' + content.transaction.transaction_id,
content
};
eventBus.emit('node_event_log', payload);
let data = JSON.stringify(payload);
try {
ws.nodeConnectionReady && ws.send(data);
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
}
return content;
}
transactionSyncRequest(transactionID, options = {}) {
const {depth: currentDepth, request_node_id: requestNodeID, routing, priority, dispatch_request: dispatchRequest} = options;
return new Promise((resolve, reject) => {
walletSync.add(transactionID, {
delay: !dispatchRequest ? 0 : config.NETWORK_LONG_TIME_WAIT_MAX * 10,
priority
});
if (network.registeredClients.length === 0 || this.pendingTransactionSync[transactionID]) {
return reject();
}
else if (!dispatchRequest) {
return resolve();
}
console.log('[peer] requesting transaction sync for :', transactionID);
let payload = {
type : 'transaction_sync',
content: {
transaction_id : transactionID,
depth : currentDepth || 0,
routing : routing,
routing_request_node_id: requestNodeID
}
};
eventBus.emit('node_event_log', payload);
let data = JSON.stringify(payload);
this.pendingTransactionSync[transactionID] = true;
let nodesWS = _.shuffle(network.registeredClients);
async.eachSeries(nodesWS, (ws, callback) => {
let callbackCalled = false;
let nodeID = ws.nodeID;
try {
if (ws.nodeConnectionReady) {
eventBus.removeAllListeners('transaction_sync_response:' + transactionID);
eventBus.once('transaction_sync_response:' + transactionID, function(eventData, eventWS) {
console.log('[peer] stopping transaction sync for transaction id ', transactionID, 'because data was received from node ', nodeID);
eventBus.emit('transaction_new', eventData, eventWS, true);
if (!callbackCalled) {
callbackCalled = true;
callback(true);
}
});
ws.send(data);
setTimeout(function() {
if (!callbackCalled) {
callbackCalled = true;
callback();
}
}, config.NETWORK_SHORT_TIME_WAIT_MAX);
}
else {
callback();
}
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
if (!callbackCalled) {
callbackCalled = true;
callback();
}
}
}, (done) => {
eventBus.removeAllListeners('transaction_sync_response:' + transactionID);
delete this.pendingTransactionSync[transactionID];
if (!done) {
console.log('[peer] transaction_sync_response:' + transactionID + ' not received. skip...');
walletSync.add(transactionID, {priority});
}
resolve();
});
});
}
transactionSyncByWebSocket(transactionID, ws, currentDepth) {
return new Promise((resolve) => {
if (this.pendingTransactionSync[transactionID]) {
return resolve();
}
console.log('[peer] requesting transaction sync for :', transactionID);
let payload = {
type : 'transaction_sync',
content: {
transaction_id: transactionID,
depth : currentDepth || 0
}
};
eventBus.emit('node_event_log', payload);
let data = JSON.stringify(payload);
try {
if (ws.nodeConnectionReady) {
eventBus.removeAllListeners('transaction_sync_response:' + transactionID);
eventBus.once('transaction_sync_response:' + transactionID, function(data, eventWS) {
eventBus.emit('transaction_new', data, eventWS, true);
});
ws.send(data);
ws = null;
setTimeout(() => {
if (!this.pendingTransactionSync[transactionID]) {
eventBus.removeAllListeners('transaction_sync_response:' + transactionID);
}
}, config.NETWORK_SHORT_TIME_WAIT_MAX);
}
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
}
resolve();
});
}
_onNodeList(nodes, ws) {
eventBus.emit('node_event_log', {
type : 'node_list',
content: nodes,
from : ws.node
});
const nodeRepository = database.getRepository('node');
async.eachSeries(nodes, (data, callback) => {
data.node_port_api = data.node_port_api || config.NODE_PORT_API;
if (network.addNode(data.node_prefix, data.node_ip_address, data.node_port, data.node_port_api, data.node_id)) {
nodeRepository.addNode(data)
.then(() => callback())
.catch(() => callback());
}
else {
callback();
}
}, () => {
nodeRepository.addNodeAttribute(ws.nodeID, 'peer_count', nodes.length)
.then(_ => _)
.catch(_ => _);
});
}
sendConnectionReady(ws) {
ws.nodeConnectionState = !ws.nodeConnectionState ? 'waiting' : 'open';
if (ws.nodeConnectionState === 'open') {
ws.nodeConnectionReady = true;
}
let payload = {
type: 'connection_ready'
};
eventBus.emit('node_event_log', payload);
let data = JSON.stringify(payload);
try {
ws.send(data);
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
}
}
nodeAttributeRequest(content, ws) {
let payload = {
type: 'node_attribute_request',
content
};
eventBus.emit('node_event_log', payload);
let data = JSON.stringify(payload);
try {
ws.nodeConnectionReady && ws.send(data);
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
}
}
nodeAttributeResponse(content, ws) {
let payload = {
type: 'node_attribute_response',
content
};
eventBus.emit('node_event_log', payload);
let data = JSON.stringify(payload);
try {
ws.nodeConnectionReady && ws.send(data);
}
catch (e) {
console.log('[WARN]: try to send data over a closed connection.');
}
}
_onNodeAttributeRequest(content, ws) {
eventBus.emit('node_event_log', {
type: 'node_attribute_request',
from: ws.node,
content
});
database.getRepository('node')
.getNodeAttribute(content.node_id, content.attribute_type)
.then(attributeValue => {
this.nodeAttributeResponse({
node_id : content.node_id,
attribute_type: content.attribute_type,
value : attributeValue
}, ws);
})
.catch(_ => _);
}
_onNodeAttributeResponse(content, ws) {
eventBus.emit('node_event_log', {
type: 'node_attribute_response',
from: ws.node,
content
});
if (content.node_id && content.attribute_type && content.value !== undefined) {
const nodeRepository = database.getRepository('node');
nodeRepository.addNodeAttribute(content.node_id, content.attribute_type, content.value)
.then(_ => _)
.catch(_ => _);
}
}
_doPeerRotation() {
return peerRotation.doPeerRotation();
}
initialize() {
eventBus.on('node_list', this._onNodeList.bind(this));
eventBus.on('node_attribute_request', this._onNodeAttributeRequest.bind(this));
eventBus.on('node_attribute_response', this._onNodeAttributeResponse.bind(this));
}
stop() {
eventBus.removeAllListeners('node_list');
}
}
export default new Peer();
|
package com.foxconn.iot.sso.dto;
import javax.validation.constraints.NotBlank;
public class ChangePwdDto {
@NotBlank(message = "工号不能为空")
private String username;
@NotBlank(message = "旧密码不能为空")
private String password;
@NotBlank(message = "新密码不能为空")
private String newpwd;
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getNewpwd() {
return newpwd;
}
public void setNewpwd(String newpwd) {
this.newpwd = newpwd;
}
}
|
<reponame>nimbus-cloud/cli
package api_test
import (
. "cf/api"
"code.google.com/p/go.net/websocket"
"code.google.com/p/gogoprotobuf/proto"
"github.com/cloudfoundry/loggregatorlib/logmessage"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"net/http/httptest"
"strings"
testapi "testhelpers/api"
testconfig "testhelpers/configuration"
"time"
)
var _ = Describe("loggregator logs repository", func() {
var (
logChan chan *logmessage.Message
testServer *httptest.Server
requestHandler *requestHandlerWithExpectedPath
logsRepo *LoggregatorLogsRepository
messagesToSend [][]byte
)
BeforeEach(func() {
startTime := time.Now()
messagesToSend = [][]byte{
marshalledLogMessageWithTime("My message 1", startTime.UnixNano()),
marshalledLogMessageWithTime("My message 2", startTime.UnixNano()),
marshalledLogMessageWithTime("My message 3", startTime.UnixNano()),
}
logChan = make(chan *logmessage.Message, 1000)
testServer, requestHandler, logsRepo = setupTestServerAndLogsRepo(messagesToSend...)
})
AfterEach(func() {
testServer.Close()
})
Describe("RecentLogsFor", func() {
BeforeEach(func() {
err := logsRepo.RecentLogsFor("my-app-guid", func() {}, logChan)
Expect(err).NotTo(HaveOccurred())
close(logChan)
})
It("connects to the dump endpoint", func() {
Expect(requestHandler.lastPath).To(Equal("/dump/"))
})
It("writes log messages onto the provided channel", func() {
dumpedMessages := []*logmessage.Message{}
for msg := range logChan {
dumpedMessages = append(dumpedMessages, msg)
}
Expect(len(dumpedMessages)).To(Equal(3))
Expect(dumpedMessages[0]).To(Equal(parseMessage(messagesToSend[0])))
Expect(dumpedMessages[1]).To(Equal(parseMessage(messagesToSend[1])))
Expect(dumpedMessages[2]).To(Equal(parseMessage(messagesToSend[2])))
})
})
Describe("TailLogsFor", func() {
BeforeEach(func() {
err := logsRepo.TailLogsFor("my-app-guid", func() {}, logChan, make(chan bool), time.Duration(1*time.Second))
Expect(err).NotTo(HaveOccurred())
close(logChan)
})
It("connects to the tailing endpoint", func() {
Expect(requestHandler.lastPath).To(Equal("/tail/"))
})
It("writes log messages on the channel in the correct order", func() {
var messages []string
for msg := range logChan {
messages = append(messages, string(msg.GetLogMessage().Message))
}
Expect(messages).To(Equal([]string{"My message 1", "My message 2", "My message 3"}))
})
})
})
func parseMessage(msgBytes []byte) (msg *logmessage.Message) {
msg, err := logmessage.ParseMessage(msgBytes)
Expect(err).ToNot(HaveOccurred())
return
}
type requestHandlerWithExpectedPath struct {
handlerFunc func(conn *websocket.Conn)
lastPath string
}
func setupTestServerAndLogsRepo(messages ...[]byte) (testServer *httptest.Server, requestHandler *requestHandlerWithExpectedPath, logsRepo *LoggregatorLogsRepository) {
requestHandler = new(requestHandlerWithExpectedPath)
requestHandler.handlerFunc = func(conn *websocket.Conn) {
request := conn.Request()
requestHandler.lastPath = request.URL.Path
Expect(request.URL.RawQuery).To(Equal("app=my-app-guid"))
Expect(request.Method).To(Equal("GET"))
Expect(request.Header.Get("Authorization")).To(ContainSubstring("BEARER my_access_token"))
for _, msg := range messages {
conn.Write(msg)
}
time.Sleep(time.Duration(50) * time.Millisecond)
conn.Close()
}
testServer = httptest.NewTLSServer(websocket.Handler(requestHandler.handlerFunc))
configRepo := testconfig.NewRepositoryWithDefaults()
configRepo.SetApiEndpoint("https://localhost")
endpointRepo := &testapi.FakeEndpointRepo{}
endpointRepo.LoggregatorEndpointReturns.Endpoint = strings.Replace(testServer.URL, "https", "wss", 1)
repo := NewLoggregatorLogsRepository(configRepo, endpointRepo)
logsRepo = &repo
return
}
func marshalledLogMessageWithTime(messageString string, timestamp int64) []byte {
messageType := logmessage.LogMessage_OUT
sourceName := "DEA"
protoMessage := &logmessage.LogMessage{
Message: []byte(messageString),
AppId: proto.String("my-app-guid"),
MessageType: &messageType,
SourceName: &sourceName,
Timestamp: proto.Int64(timestamp),
}
message, err := proto.Marshal(protoMessage)
Expect(err).ToNot(HaveOccurred())
return message
}
|
<filename>multiprocessing/mp1.py<gh_stars>1-10
from __future__ import unicode_literals
import os,json,sys,csv
if __name__ == '__main__' and __package__ is None:
from os import sys, path
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
import youtube_dl
import multiprocessing as mp
output = mp.Queue()
class MyLogger(object):
def debug(self, msg):
pass
def warning(self, msg):
pass
def error(self, msg):
print(msg)
def my_hook(d):
print(d)
ydl_opts ={
'format': '720p_HD/720p/best',
'postprocessors': [],
'logger': MyLogger(),
'progress_hooks': [my_hook],
}
def dwnl(kworker):
fab='LIST'
url_1=[]
url_2=[]
fr = open(fab, "r") #file read
k=0
for line in fr:
k+=1
if(k%2==0):
url_1.append(line)
else:
url_2.append(line)
_i=len(url_1)
_j=len(url_2)
k=0
n=0
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
if(kworker==1):
for item1 in url_1:
k+=1
if k <= _i:
_str_1="downloading "+str(item1)
print(_str_1)
os.system('notify-send '+_str_1)
ydl.download([item1])
elif(kworker==2):
for item2 in url_2:
n+=1
if n<=_j:
_str_2="downloading"+str(item2)
print(_str_2)
os.system('notify-send'+_str_2)
ydl.download([item2])
pool = mp.Pool(processes=2)
results = [pool.apply_async(dwnl, args=(x,)) for x in range(1,3)]
output = [p.get() for p in results]
print(output)
|
import { ExtensionContext } from "vscode";
import * as vscode from 'vscode';
import { ICommandService } from "./ICommandService";
import { injectable, multiInject } from "inversify";
import TYPES from "../../Types";
import { ICommand } from "../../commands/ICommand";
@injectable()
export class CommandService implements ICommandService {
constructor(
@multiInject(TYPES.command) private commands: ICommand[]
) {}
registerCommands(context: ExtensionContext): void {
for (const c of this.commands) {
const cmd = vscode.commands.registerCommand(c.id, c.execute, c);
context.subscriptions.push(cmd);
}
}
} |
#!/bin/bash
rm metal
echo "上传文件......"
rz
echo "文件执行权限"
chmod u+x metal
echo "git pull"
git pull origin master
echo "重启服务"
pm2 restart metal
# pm2 startOrRestart pm2.json
|
#!/bin/bash
#SBATCH -J Act_linear_1
#SBATCH --mail-user=eger@ukp.informatik.tu-darmstadt.de
#SBATCH --mail-type=FAIL
#SBATCH -e /work/scratch/se55gyhe/log/output.err.%j
#SBATCH -o /work/scratch/se55gyhe/log/output.out.%j
#SBATCH -n 1 # Number of cores
#SBATCH --mem-per-cpu=6000
#SBATCH -t 23:59:00 # Hours, minutes and seconds, or '#SBATCH -t 10' -only mins
#module load intel python/3.5
python3 /home/se55gyhe/Act_func/sequence_tagging/arg_min/PE-my.py linear 263 sgd 3 0.37534246970479646 0.010914214951220788 he_uniform 0.05
|
<gh_stars>0
# -*- coding: utf-8 -*-
import sys
from abc import abstractmethod, ABC
import torch
import torch.nn as nn
import pytrol.util.argsparser as parser
from pytrol.control.agent.StatModelAgent import StatModelAgent
from pytrol.model import Paths
from pytrol.model.knowledge.EnvironmentKnowledge import EnvironmentKnowledge
from pytrol.util import misc
from pytrol.util.net.Connection import Connection
# MAPTrainer project
import maptrainer.pathformatter as pf
import maptrainer.model as model_pckg
# Pytorch's model agent
class MAPTrainerModelAgent(StatModelAgent, ABC):
def __init__(self,
id_: int,
original_id: str,
env_knl: EnvironmentKnowledge,
connection: Connection,
agts_addrs: list,
model_type: str,
model_variant: str,
datasrc: str,
interaction: bool = False,
variant: str = '',
gpu: bool = False,
depth: float = 3.0):
r"""
Args:
id_ (int):
original_id (str):
env_knl (EnvironmentKnowledge):
connection (Connection):
agts_addrs (list):
model_type (str): The type of the model used to make predictions
model_variant (str): The variant of the model used to make
predictions
datasrc (str):
interaction (bool):
variant (str):
gpu (bool):
depth (float):
"""
self.gpu = gpu
# self.model_variant_2 = "Adagrad-pre"
self.model_variant_2 = '' # TODO: passing this attribute as argument
# of the constructor
self.variant_2 = "1-50--1" # TODO: passing this attribute as argument
# of the constructor
self.nlayers = self.load_nlayers(self.variant_2)
self.nhid = self.load_nhid(self.variant_2)
self.bptt = self.load_bptt(self.variant_2)
super().__init__(id_=id_, original_id=original_id,
env_knl=env_knl, connection=connection,
agts_addrs=agts_addrs, model_type=model_type,
model_variant=model_variant, variant=variant,
depth=depth, interaction=interaction,
datasrc=datasrc)
self.model = self.cuda(self.model, self.gpu)
def load_nlayers(self, variant: str):
r"""
Args:
variant (str): `variant` is in the form of
`<nlayers>-<nhid>-<bptt>`
"""
return int(variant.split('-')[0])
def load_nhid(self, variant: str):
r"""
Args:
variant (str): `variant` is in the form of
`<nlayers>-<nhid>-<bptt>`
"""
return int(variant.split('-')[1])
def load_bptt(self, variant: str):
r"""
Args:
variant (str): `variant` is in the form of
`rlpm-<nlayers>-<nhid>-<bptt>`
"""
splt_variant = variant.split('-')
return int(splt_variant[2]) if len(splt_variant) == 3 \
else -int(splt_variant[3])
def load_model(self, **kwargs) -> nn.Module:
args = parser.parse_args()
print(self.model_variant_2)
# Directory path of the model
dirpathmodel = pf. \
generate_savefile_dirpath(type_="model",
graph=self.env_knl.ntw_name,
nlayers=self.nlayers,
nhid=self.nhid,
bptt=self.bptt,
nagts=self.env_knl.nagts,
model_type=self.model_type,
model_variant=self.model_variant,
suffix=self.model_variant_2,
datasrc=self.datasrc,
log_rep_dir=args.dirpath_models)
# Path of the model's latest version
# model_path = misc.get_latest_pytorch_model(dirpathmodel)
model_path = pf. \
entire_model_path(misc.get_latest_pytorch_model(dirpathmodel))
with open(model_path, "rb") as s:
if self.gpu:
# Loading on GPU if trained with CUDA
model = torch.load(s)
else:
# Loading on CPU if trained with CUDA
model = torch.load(s,
map_location=lambda storage, loc: storage)
print("Path of the loaded model: {}".format(model_path), '\n')
return model
def prepare_input(self, input_):
r"""
Args:
input_:
"""
# Use of the LSTM network
input_ = torch.Tensor(input_)
return self.cuda(input_, self.gpu)
@staticmethod
def cuda(o, gpu):
r"""
Args:
o:
gpu:
"""
return o.cuda() if gpu else o
|
#!/bin/sh
TARGETS=${1:-avr atmega2560 esp32 atmelsam}
echo "execute for ${TARGETS}"
if [ "$GITHUB_WORKSPACE" != "" ]
then
# Make sure we are inside the github workspace
cd $GITHUB_WORKSPACE
fi
# install platformio, if needed
which pio
if [ $? -ne 0 ]
then
# Install PlatformIO CLI
export PATH=$PATH:~/.platformio/penv/bin
curl -fsSL https://raw.githubusercontent.com/platformio/platformio-core-installer/master/get-platformio.py -o get-platformio.py
python3 get-platformio.py
# Use automated install from pio run
# pio platform install "atmelavr"
# pio platform install "atmelsam"
# pio platform install "espressif32"
fi
# arduino libraries are not allowed to contain symbolic links in the repository.
# So create the pio_dirs-directory during the github action
rm -fR pio_dirs
mkdir pio_dirs
for i in `ls examples`
do
mkdir -p pio_dirs/$i/src
cd pio_dirs/$i
ln -s ../../ci/platformio.ini .
cd src
FILES=`cd ../../../examples/$i;find . -type f`
for f in $FILES;do ln -s ../../../examples/$i/$f .;done
cd ../../..
done
# Make one directory to test PoorManFloat no device
mkdir pio_dirs/PMF_test
mkdir pio_dirs/PMF_test/src
cd pio_dirs/PMF_test
ln -s ../../ci/platformio.ini .
cd src
#sed -e 's/%d/%ld/g' <../../../tests/test_03.h >test_03.h
ln -s ../../../tests/pc_based/test_03.h .
ln -s ../../../tests/pc_based/PMF_test.ino PMF_test.ino
cd ../../..
set -e
for i in pio_dirs/*
do
for p in ${TARGETS}
do
echo $p: $i
(cd $i;pio run -s -e $p)
done
done
|
/*
*
*/
package net.community.chest.swing.component.frame;
import java.lang.reflect.Method;
import javax.swing.JInternalFrame;
import javax.swing.JMenuBar;
import net.community.chest.awt.menu.MenuReflectiveProxy;
import net.community.chest.convert.ValueStringInstantiator;
import net.community.chest.dom.DOMUtils;
import net.community.chest.dom.proxy.XmlProxyConvertible;
import net.community.chest.swing.WindowCloseOptionsValueStringInstantiator;
import net.community.chest.swing.component.JComponentReflectiveProxy;
import net.community.chest.swing.component.menu.JMenuBarReflectiveProxy;
import org.w3c.dom.Element;
/**
* <P>Copyright 2008 as per GPLv2</P>
*
* @param <F> The reflected {@link JInternalFrame}
* @author <NAME>.
* @since Aug 27, 2008 3:51:58 PM
*/
public class JInternalFrameReflectiveProxy<F extends JInternalFrame> extends JComponentReflectiveProxy<F> {
public JInternalFrameReflectiveProxy (Class<F> objClass) throws IllegalArgumentException
{
this(objClass, false);
}
protected JInternalFrameReflectiveProxy (Class<F> objClass, boolean registerAsDefault)
throws IllegalArgumentException, IllegalStateException
{
super(objClass, registerAsDefault);
}
public static final String DEFAULT_CLOSE_OPER_ATTR="DefaultCloseOperation",
ICON_ATTR="frameIcon";
/*
* @see net.community.chest.awt.dom.UIReflectiveAttributesProxy#resolveAttributeInstantiator(java.lang.String, java.lang.Class)
*/
@SuppressWarnings("unchecked")
@Override
protected <C> ValueStringInstantiator<C> resolveAttributeInstantiator (String name, Class<C> type) throws Exception
{
if (DEFAULT_CLOSE_OPER_ATTR.equalsIgnoreCase(name))
return (ValueStringInstantiator<C>) WindowCloseOptionsValueStringInstantiator.DEFAULT;
return super.resolveAttributeInstantiator(name, type);
}
/*
* @see net.community.chest.dom.transform.ReflectiveAttributesProxy#updateObjectAttribute(java.lang.Object, java.lang.String, java.lang.String, java.lang.reflect.Method)
*/
@Override
protected F updateObjectAttribute (F src, String name, String value, Method setter) throws Exception
{
if (ICON_ATTR.equalsIgnoreCase(name))
return updateObjectResourceAttribute(src, name, value, setter);
return super.updateObjectAttribute(src, name, value, setter);
}
public boolean isMenuBarElement (final Element elem, final String tagName)
{
return isMatchingElement(elem, tagName, MenuReflectiveProxy.MENU_ELEMNAME);
}
public XmlProxyConvertible<? extends JMenuBar> getMenuBarConverter (final Element elem) throws Exception
{
return (null == elem) ? null : JMenuBarReflectiveProxy.BAR;
}
public JMenuBar setMenuBar (final F src, final Element elem) throws Exception
{
final XmlProxyConvertible<? extends JMenuBar> inst=getMenuBarConverter(elem);
@SuppressWarnings("unchecked")
final JMenuBar org=src.getJMenuBar(),
bar=
(null == org) ? inst.fromXml(elem) : ((XmlProxyConvertible<JMenuBar>) inst).fromXml(org, elem);
if (bar != null)
{
if (null == org)
src.setJMenuBar(bar);
else if (org != bar)
throw new IllegalStateException("setMenuBar(" + DOMUtils.toString(elem) + ") mismatched reconstructed instances");
}
return bar;
}
/*
* @see net.community.chest.swing.component.JComponentReflectiveProxy#fromXmlChild(javax.swing.JComponent, org.w3c.dom.Element)
*/
@Override
public F fromXmlChild (F src, Element elem) throws Exception
{
final String tagName=elem.getTagName();
if (isMenuBarElement(elem, tagName))
{
setMenuBar(src, elem);
return src;
}
return super.fromXmlChild(src, elem);
}
public static final JInternalFrameReflectiveProxy<JInternalFrame> INTFRAME=
new JInternalFrameReflectiveProxy<JInternalFrame>(JInternalFrame.class, true);
}
|
import os
import logging
import time
from parser import split_explores, parse_explores, split_views, parse_views, clean_defolderize, has_child_folder
def main():
start = time.process_time()
split_explores()
logging.info("Split up Model files to Explore paylods.")
parse_explores()
logging.info("Completed parsing explores and retrieving explore metadata.")
for sub_folder in next(os.walk('../views'))[1]:
if has_child_folder(f'../views/{sub_folder}'):
logging.info(f"Extracting views from sub folder {sub_folder}...")
clean_defolderize(f'../views/{sub_folder}')
split_views()
logging.info("Split up View files to base views.")
parse_views()
logging.info("Completed parsing base views and retrieving view metadata.")
end = time.process_time()
logging.info(f'Completed process in {end-start} seconds.')
if __name__ == '__main__':
main()
|
# Load the necessary libraries
library(tidyverse)
library(caret)
# Load the data
data <- read.csv("data.csv")
# Split the data into train and test sets
set.seed(123)
data_split <- initial_split(data, prop=0.8)
train_data <- training(data_split)
test_data <- testing(data_split)
# Create the model
model <- train(x=train_data[, -1],
y=train_data[,1],
method='lm',
trControl=trainControl(method='cv',
number=3))
# Make predictions
predictions <- predict(model, test_data[,-1])
# Evaluate the accuracy
confusionMatrix(data=predictions,
reference=test_data[,1], positive='1') |
import { Application, Context } from "probot"; // eslint-disable-line no-unused-vars
import git from "simple-git/promise";
import rimraf from "rimraf";
import { system, deferred } from "./utils";
import { execSync } from "child_process";
import { mkdirSync, exists } from "fs";
import { promisify } from "util";
function botLog(...msg: string[]): void {
console.log(">> DENO TEST BOT:", ...msg);
}
const INSTALL_PATH = ".deno";
const INSTALLED_DENO_PATH = ".deno/bin/deno";
const CLONE_PATH = ".cloned";
let counter = 0;
enum TestStatus {
PENDING,
SUCCESS,
FAILED,
ERROR,
};
type TestStatusString = "pending" | "success" | "failure" | "error";
function statusToString(s: TestStatus): TestStatusString {
switch (s) {
case TestStatus.PENDING:
return "pending";
case TestStatus.SUCCESS:
return "success";
case TestStatus.FAILED:
return "failure";
default:
return "error";
}
}
interface TestInfo {
status: TestStatus,
stdout: string | null,
stderr: string | null,
errorLog: string | null,
};
async function cloneAndTest(
repoPath: string,
localPath: string,
branchName: string,
): Promise<TestInfo> {
// Remove existing cloned
const p0 = deferred();
rimraf(localPath, (_) => p0.resolve());
await p0;
botLog("Check if deno is installed...");
if (!(await promisify(exists)(INSTALLED_DENO_PATH))) {
botLog("deno not installed. Installing...");
installDeno();
}
let output: TestInfo | undefined;
try {
const GIT = git();
botLog("Cloning", repoPath, "branch", branchName, "to", localPath);
await GIT.clone(repoPath, localPath, ["-b", branchName]);
botLog(`Starting tests using ${process.cwd()}/${INSTALLED_DENO_PATH}...`);
const { code, stdout, stderr, error } = await system(`${process.cwd()}/${INSTALLED_DENO_PATH} test -A`, {
cwd: localPath,
env: {"NO_COLOR": 1}
});
output = {
status: code === 0 ? TestStatus.SUCCESS : TestStatus.FAILED,
stdout,
stderr,
errorLog: code === 0 ? null : error,
};
botLog("Test output is", JSON.stringify(output));
} catch (e) {
output = {
status: TestStatus.ERROR,
stdout: null,
stderr: null,
errorLog: e.message,
}
}
botLog("Clean up", localPath);
// Cleanup cloned
const p = deferred();
rimraf(localPath, (_) => p.resolve());
await p;
return output!;
}
function getCommentBody(info: TestInfo): string {
let s = "\`deno test\` status: " + statusToString(info.status) + ".\n\n";
if (info.status !== TestStatus.ERROR) {
s += `stdout:
${"```"}
${info.stdout}
${"```"}
stderr:
${"```"}
${info.stderr}
${"```"}
`
} else {
s += `error log:
${"```"}
${info.errorLog}
${"```"}
`
}
return s;
}
function createPRStatus(
sha: string,
state: TestStatus,
description?: string
): {
sha: string,
state: TestStatusString,
[key: string]: string | undefined,
} {
return {
sha,
state: statusToString(state),
description,
context: 'Deno Test Bot'
}
}
function createDirectories(): void {
try {
mkdirSync(INSTALL_PATH); // called only once on startup
} catch {}
try {
mkdirSync(CLONE_PATH); // called only once on startup
} catch {}
}
// Sync install.
function installDeno(): void {
execSync(`curl -fsSL https://deno.land/x/install/install.sh | DENO_INSTALL=${INSTALL_PATH} sh`);
}
function runOnceOnStartup(): void {
createDirectories();
installDeno();
}
runOnceOnStartup();
async function handlePullRequest(context: Context): Promise<void> {
// https://developer.github.com/v3/activity/events/types/#pullrequestevent
const { head } = context.payload.pull_request;
const sha = head.sha;
const cloneUrl: string = head.repo.clone_url;
const branchName: string = head.ref;
botLog("Update commit status to PENDING...");
// Update PR status.
const prPreStatus = createPRStatus(sha, TestStatus.PENDING, "Test Pending");
// TODO: check failures
await context.github.repos.createStatus(context.repo(prPreStatus));
botLog("Start testing...");
// TODO: use config files to control behavior (e.g. running in directory)
// context.github.pullRequests.
const uniqueLocalPath = `${CLONE_PATH}/${counter++}`;
const info = await cloneAndTest(cloneUrl, uniqueLocalPath, branchName);
botLog("Testing complete. Creating comments...");
// Add a comment.
await context.github.issues.createComment(context.issue({
body: getCommentBody(info),
}));
botLog("Update commit status to " + statusToString(info.status));
// Update PR status.
const prStatus = createPRStatus(sha, info.status, "Test Failed");
// TODO: check failures
await context.github.repos.createStatus(context.repo(prStatus));
botLog("DONE");
}
export = (app: Application) => {
// TODO
app.on([
"pull_request.opened",
"pull_request.edited",
"pull_request.synchronize"
], async (context) => {
await handlePullRequest(context);
})
// For more information on building apps:
// https://probot.github.io/docs/
// To get your app running against GitHub, see:
// https://probot.github.io/docs/development/
}
|
<gh_stars>1-10
require 'spree_core'
require 'spree_limit_quantity/engine'
|
<gh_stars>1-10
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.modules.taxonomy.ui;
import java.util.List;
import org.olat.core.commons.persistence.SortKey;
import org.olat.core.gui.components.form.flexible.impl.elements.table.DefaultFlexiTableDataModel;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiSortableColumnDef;
import org.olat.core.gui.components.form.flexible.impl.elements.table.FlexiTableColumnModel;
import org.olat.core.gui.components.form.flexible.impl.elements.table.SortableFlexiTableDataModel;
/**
*
* Initial date: 3 Oct 2017<br>
* @author srosse, <EMAIL>, http://www.frentix.com
*
*/
public class TaxonomyLevelCompetenceTableModel extends DefaultFlexiTableDataModel<TaxonomyLevelCompetenceRow>
implements SortableFlexiTableDataModel<TaxonomyLevelCompetenceRow> {
public TaxonomyLevelCompetenceTableModel(FlexiTableColumnModel columnsModel) {
super(columnsModel);
}
@Override
public void sort(SortKey orderBy) {
if(orderBy != null) {
List<TaxonomyLevelCompetenceRow> views = new TaxonomyLevelCompetenceTableModelSortDelegate(orderBy, this, null).sort();
super.setObjects(views);
}
}
@Override
public Object getValueAt(int row, int col) {
TaxonomyLevelCompetenceRow competence = getObject(row);
return getValueAt(competence, col);
}
@Override
public Object getValueAt(TaxonomyLevelCompetenceRow row, int col) {
if(col < TaxonomyLevelCompetenceController.USER_PROPS_OFFSET) {
switch(CompetenceCols.values()[col]) {
case key: return row.getKey();
case type: return row.getCompetenceType();
case achievement: return row.getAchievement();
case reliability: return row.getReliability();
case expiration: return row.getExpiration();
}
} else {
int propPos = col - TaxonomyLevelCompetenceController.USER_PROPS_OFFSET;
return row.getIdentityProp(propPos);
}
return null;
}
public enum CompetenceCols implements FlexiSortableColumnDef {
key("table.header.key"),
type("table.header.competence.type"),
achievement("table.header.competence.achievement"),
reliability("table.header.competence.reliability"),
expiration("table.header.competence.expiration");
private final String i18nHeaderKey;
private CompetenceCols(String i18nHeaderKey) {
this.i18nHeaderKey = i18nHeaderKey;
}
@Override
public boolean sortable() {
return true;
}
@Override
public String sortKey() {
return name();
}
@Override
public String i18nHeaderKey() {
return i18nHeaderKey;
}
}
}
|
#!/bin/sh
#Let clone fail0verflow repositories
cd /opt/
#Grab a coffee... downloading quite massive amount of bytes!
git clone https://github.com/fail0verflow/shofel2.git && \
git clone --recursive https://github.com/fail0verflow/switch-coreboot.git coreboot && \
git clone https://github.com/fail0verflow/switch-u-boot.git u-boot && \
git clone https://github.com/fail0verflow/switch-linux.git linux && \
#Albeit we are not going to compile the usb-loader, we just clone it from the container by exploiting the git command \
git clone https://github.com/boundarydevices/imx_usb_loader.git
#Build the shofEL2
cd /opt/shofel2/exploit
make
#Build the u-boot
cd ../../u-boot
export CROSS_COMPILE=aarch64-linux-gnu-
make nintendo-switch_defconfig
make -j4
#Then the coreboot turn
cd ../coreboot
make nintendo_switch_defconfig
make iasl
make -j4
#Grab a shorter coffee... it's the time for the linux kernel
cd ../linux
export ARCH=arm64
make nintendo-switch_defconfig
make -j4
#Make the final image
cd ../shofel2/usb_loader
../../u-boot/tools/mkimage -A arm64 -T script -C none -n "boot.scr" -d switch.scr switch.scr.img
|
#!/bin/bash
# you can set the hparams by using --hparams=xxx
CUDA_VISIBLE_DEVICES=3 python train.py \
-l logdir \
-o outdir \
--n_gpus=1 \
--hparams=speaker_adversial_loss_w=20.,ce_loss=False,speaker_classifier_loss_w=0.1,contrastive_loss_w=30.
# Multi GPUs
CUDA_VISIBLE_DEVICES=0,1 python -m multiproc train.py \
-l logdir \
-o outdir/vctk/test_wgan_bs16 \
--n_gpus=2 \
--hparams=distributed_run=True,batch_size=16,speaker_adversial_loss_w=20.,ce_loss=False,speaker_classifier_loss_w=0.1,contrastive_loss_w=30.
# Multi GPUs on aipool-linux14
python -m multiproc train.py \
-l logdir \
-o outdir/vctk/test_wgan_bs16 \
-c outdir/vctk/test_wgan_bs32/checkpoint_50000 \
--n_gpus=4 \
--hparams=distributed_run=True,batch_size=16,epochs=2000,iters_per_checkpoint=2000,speaker_adversial_loss_w=20.,ce_loss=False,speaker_classifier_loss_w=0.1,contrastive_loss_w=30.
# Single GPU
CUDA_VISIBLE_DEVICES=1 python train.py \
-l logdir \
-o outdir/vctk/test_wgan_bs32 \
--n_gpus=1 \
--hparams=distributed_run=False,batch_size=32,epochs=2000,iters_per_checkpoint=5000,speaker_adversial_loss_w=20.,ce_loss=False,speaker_classifier_loss_w=0.1,contrastive_loss_w=30.
# Single GPU, original features
python train_orig.py \
-l logdir \
-o outdir/vctk/test_orig_bs16 \
-c outdir/vctk/test_orig_bs16/checkpoint_434000 \
--n_gpus=1 \
--hparams=distributed_run=False,batch_size=16,epochs=2000,iters_per_checkpoint=5000,speaker_adversial_loss_w=20.,ce_loss=False,speaker_classifier_loss_w=0.1,contrastive_loss_w=30.
|
<gh_stars>0
/*
Copyright 2019 <NAME> - Fuzzview
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#ifndef FV_METADATA_H
#define FV_METADATA_H
#include "llvm/IR/Module.h"
#include "llvm/IR/DebugInfoMetadata.h"
#define METADATA_BLOCK_ID "fv.block.id"
#define GET_MD_STR(MD) \
llvm::cast<llvm::MDString>(MD->getOperand(0))->getString().str()
namespace fv {
// Helper to set and get metadata, since
// every metadata access involves several
// non-obvious calls to get to the value.
class Metadata {
public:
// Set metadata to an instruction, something like:
// item.metadata[metadata_key] = data
static void set(
llvm::Instruction *item,
const std::string &metadata_key,
const std::string &data
) {
item->setMetadata(metadata_key,
llvm::MDNode::get(item->getContext(),
llvm::MDString::get(item->getContext(), data)));
}
// Get metadata of an instruction, something like:
// return item.metadata[metadata_key]
static std::string get(
const llvm::Instruction *item,
const std::string &metadata_key
) {
auto *MD = item->getMetadata(metadata_key);
if (!MD)
Error::fatal<llvm::Instruction>(item, "No " + metadata_key + " found.");
return GET_MD_STR(MD);
}
};
}
#endif
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.